ansible-playbook [core 2.16.14] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-qja executable location = /usr/local/bin/ansible-playbook python version = 3.12.1 (main, Feb 21 2024, 14:18:26) [GCC 8.5.0 20210514 (Red Hat 8.5.0-21)] (/usr/bin/python3.12) jinja version = 3.1.6 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_basic.yml ****************************************************** 2 plays in /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:5 Wednesday 02 April 2025 12:12:36 -0400 (0:00:00.009) 0:00:00.009 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_test_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n" }, "mysql_container_root_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n" } }, "ansible_included_var_files": [ "/tmp/podman-u04/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Ensure that the role runs with default parameters] *********************** TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:9 Wednesday 02 April 2025 12:12:36 -0400 (0:00:00.065) 0:00:00.075 ******* ok: [managed-node2] TASK [Create tmpdir for testing] *********************************************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:143 Wednesday 02 April 2025 12:12:38 -0400 (0:00:01.318) 0:00:01.393 ******* changed: [managed-node2] => { "changed": true, "gid": 0, "group": "root", "mode": "0700", "owner": "root", "path": "/tmp/lsr_sr1vi4ai_podman", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [Change tmpdir permissions] *********************************************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:150 Wednesday 02 April 2025 12:12:38 -0400 (0:00:00.420) 0:00:01.814 ******* changed: [managed-node2] => { "changed": true, "gid": 0, "group": "root", "mode": "0777", "owner": "root", "path": "/tmp/lsr_sr1vi4ai_podman", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [Enable podman copr] ****************************************************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:161 Wednesday 02 April 2025 12:12:39 -0400 (0:00:00.489) 0:00:02.304 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [Install podman from updates-testing] ************************************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:166 Wednesday 02 April 2025 12:12:39 -0400 (0:00:00.050) 0:00:02.355 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [Podman version] ********************************************************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:171 Wednesday 02 April 2025 12:12:39 -0400 (0:00:00.044) 0:00:02.399 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [Create user] ************************************************************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:176 Wednesday 02 April 2025 12:12:39 -0400 (0:00:00.042) 0:00:02.442 ******* changed: [managed-node2] => { "changed": true, "comment": "", "create_home": true, "group": 3001, "home": "/home/podman_basic_user", "name": "podman_basic_user", "shell": "/bin/bash", "state": "present", "system": false, "uid": 3001 } TASK [Create tempfile for kube_src] ******************************************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:181 Wednesday 02 April 2025 12:12:39 -0400 (0:00:00.632) 0:00:03.074 ******* changed: [managed-node2 -> localhost] => { "changed": true, "gid": 0, "group": "root", "mode": "0600", "owner": "root", "path": "/tmp/lsr_podman_djkjebp5.yml", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 0, "state": "file", "uid": 0 } TASK [Write kube_file_src] ***************************************************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:189 Wednesday 02 April 2025 12:12:40 -0400 (0:00:00.208) 0:00:03.282 ******* changed: [managed-node2 -> localhost] => { "changed": true, "checksum": "d1a0affd331e4d848e9d9c76188d08738b98b67f", "dest": "/tmp/lsr_podman_djkjebp5.yml", "gid": 0, "group": "root", "md5sum": "007174a997607fecd309e07294822217", "mode": "0600", "owner": "root", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 710, "src": "/root/.ansible/tmp/ansible-tmp-1743610360.162552-8178-66675442607778/source", "state": "file", "uid": 0 } TASK [Create host directories for data] **************************************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:196 Wednesday 02 April 2025 12:12:40 -0400 (0:00:00.634) 0:00:03.917 ******* changed: [managed-node2] => (item=['httpd1', 'podman_basic_user', 3001]) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": [ "httpd1", "podman_basic_user", 3001 ], "mode": "0755", "owner": "podman_basic_user", "path": "/tmp/lsr_sr1vi4ai_podman/httpd1", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 3001 } changed: [managed-node2] => (item=['httpd2', 'root', 0]) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": [ "httpd2", "root", 0 ], "mode": "0755", "owner": "root", "path": "/tmp/lsr_sr1vi4ai_podman/httpd2", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } changed: [managed-node2] => (item=['httpd3', 'root', 0]) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": [ "httpd3", "root", 0 ], "mode": "0755", "owner": "root", "path": "/tmp/lsr_sr1vi4ai_podman/httpd3", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [Create data files] ******************************************************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:204 Wednesday 02 April 2025 12:12:41 -0400 (0:00:00.985) 0:00:04.903 ******* changed: [managed-node2] => (item=['httpd1', 'podman_basic_user', 3001]) => { "ansible_loop_var": "item", "changed": true, "checksum": "40bd001563085fc35165329ea1ff5c5ecbdbbeef", "dest": "/tmp/lsr_sr1vi4ai_podman/httpd1/index.txt", "gid": 0, "group": "root", "item": [ "httpd1", "podman_basic_user", 3001 ], "md5sum": "202cb962ac59075b964b07152d234b70", "mode": "0644", "owner": "podman_basic_user", "secontext": "unconfined_u:object_r:admin_home_t:s0", "size": 3, "src": "/root/.ansible/tmp/ansible-tmp-1743610361.8035398-8233-134635055705623/source", "state": "file", "uid": 3001 } changed: [managed-node2] => (item=['httpd2', 'root', 0]) => { "ansible_loop_var": "item", "changed": true, "checksum": "40bd001563085fc35165329ea1ff5c5ecbdbbeef", "dest": "/tmp/lsr_sr1vi4ai_podman/httpd2/index.txt", "gid": 0, "group": "root", "item": [ "httpd2", "root", 0 ], "md5sum": "202cb962ac59075b964b07152d234b70", "mode": "0644", "owner": "root", "secontext": "unconfined_u:object_r:admin_home_t:s0", "size": 3, "src": "/root/.ansible/tmp/ansible-tmp-1743610362.4155128-8233-19129380191169/source", "state": "file", "uid": 0 } changed: [managed-node2] => (item=['httpd3', 'root', 0]) => { "ansible_loop_var": "item", "changed": true, "checksum": "40bd001563085fc35165329ea1ff5c5ecbdbbeef", "dest": "/tmp/lsr_sr1vi4ai_podman/httpd3/index.txt", "gid": 0, "group": "root", "item": [ "httpd3", "root", 0 ], "md5sum": "202cb962ac59075b964b07152d234b70", "mode": "0644", "owner": "root", "secontext": "unconfined_u:object_r:admin_home_t:s0", "size": 3, "src": "/root/.ansible/tmp/ansible-tmp-1743610363.036985-8233-254696807528367/source", "state": "file", "uid": 0 } TASK [Run role - do not pull images] ******************************************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:214 Wednesday 02 April 2025 12:12:43 -0400 (0:00:01.859) 0:00:06.763 ******* TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Wednesday 02 April 2025 12:12:43 -0400 (0:00:00.052) 0:00:06.815 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Wednesday 02 April 2025 12:12:43 -0400 (0:00:00.025) 0:00:06.841 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Wednesday 02 April 2025 12:12:43 -0400 (0:00:00.036) 0:00:06.878 ******* ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Wednesday 02 April 2025 12:12:44 -0400 (0:00:00.330) 0:00:07.209 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Wednesday 02 April 2025 12:12:44 -0400 (0:00:00.023) 0:00:07.232 ******* ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Wednesday 02 April 2025 12:12:44 -0400 (0:00:00.334) 0:00:07.567 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Wednesday 02 April 2025 12:12:44 -0400 (0:00:00.024) 0:00:07.591 ******* ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node2] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Wednesday 02 April 2025 12:12:44 -0400 (0:00:00.053) 0:00:07.645 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Wednesday 02 April 2025 12:12:46 -0400 (0:00:01.643) 0:00:09.289 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Wednesday 02 April 2025 12:12:46 -0400 (0:00:00.048) 0:00:09.337 ******* changed: [managed-node2] => { "changed": true, "rc": 0, "results": [ "Installed: shadow-utils-subid-2:4.6-22.el8.x86_64", "Installed: podman-gvproxy-3:4.9.4-0.1.module_el8+971+3d3df00d.x86_64", "Installed: libnet-1.1.6-15.el8.x86_64", "Installed: runc-1:1.1.12-1.module_el8+885+7da147f3.x86_64", "Installed: container-selinux-2:2.229.0-2.module_el8+847+7863d4e6.noarch", "Installed: dnsmasq-2.79-33.el8.x86_64", "Installed: yajl-2.1.0-12.el8.x86_64", "Installed: podman-plugins-3:4.9.4-0.1.module_el8+971+3d3df00d.x86_64", "Installed: fuse-common-3.3.0-19.el8.x86_64", "Installed: podman-3:4.9.4-0.1.module_el8+971+3d3df00d.x86_64", "Installed: containernetworking-plugins-1:1.4.0-2.module_el8+974+0c52b299.x86_64", "Installed: criu-3.18-4.module_el8+804+f131391c.x86_64", "Installed: libslirp-4.4.0-1.module_el8+804+f131391c.x86_64", "Installed: fuse3-3.3.0-19.el8.x86_64", "Installed: containers-common-2:1-81.module_el8+968+fbb249c7.x86_64", "Installed: fuse-overlayfs-1.13-1.module_el8+804+f131391c.x86_64", "Installed: podman-catatonit-3:4.9.4-0.1.module_el8+971+3d3df00d.x86_64", "Installed: criu-libs-3.18-4.module_el8+804+f131391c.x86_64", "Installed: protobuf-c-1.3.0-8.el8.x86_64", "Installed: slirp4netns-1.2.3-1.module_el8+951+32019cde.x86_64", "Installed: fuse3-libs-3.3.0-19.el8.x86_64", "Installed: crun-1.14.3-2.module_el8+968+fbb249c7.x86_64", "Installed: conmon-3:2.1.10-1.module_el8+804+f131391c.x86_64" ] } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Wednesday 02 April 2025 12:13:48 -0400 (0:01:02.441) 0:01:11.778 ******* skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Wednesday 02 April 2025 12:13:48 -0400 (0:00:00.049) 0:01:11.828 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Wednesday 02 April 2025 12:13:48 -0400 (0:00:00.048) 0:01:11.877 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Wednesday 02 April 2025 12:13:48 -0400 (0:00:00.048) 0:01:11.925 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.031222", "end": "2025-04-02 12:13:49.199713", "rc": 0, "start": "2025-04-02 12:13:49.168491" } STDOUT: podman version 4.9.4-dev TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Wednesday 02 April 2025 12:13:49 -0400 (0:00:00.472) 0:01:12.397 ******* ok: [managed-node2] => { "ansible_facts": { "podman_version": "4.9.4-dev" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Wednesday 02 April 2025 12:13:49 -0400 (0:00:00.034) 0:01:12.432 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Wednesday 02 April 2025 12:13:49 -0400 (0:00:00.040) 0:01:12.473 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "(podman_quadlet_specs | length > 0) or (podman_secrets | length > 0)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Wednesday 02 April 2025 12:13:49 -0400 (0:00:00.049) 0:01:12.522 ******* META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Wednesday 02 April 2025 12:13:49 -0400 (0:00:00.049) 0:01:12.572 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Wednesday 02 April 2025 12:13:49 -0400 (0:00:00.038) 0:01:12.611 ******* META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Wednesday 02 April 2025 12:13:49 -0400 (0:00:00.070) 0:01:12.682 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:13:49 -0400 (0:00:00.060) 0:01:12.742 ******* ok: [managed-node2] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "root", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:13:50 -0400 (0:00:00.446) 0:01:13.189 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:13:50 -0400 (0:00:00.033) 0:01:13.222 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:13:50 -0400 (0:00:00.042) 0:01:13.265 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1700557386.0, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610401.5914862, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986657, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "2059311478", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:13:50 -0400 (0:00:00.348) 0:01:13.614 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:13:50 -0400 (0:00:00.030) 0:01:13.645 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:13:50 -0400 (0:00:00.032) 0:01:13.678 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:13:50 -0400 (0:00:00.030) 0:01:13.709 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:13:50 -0400 (0:00:00.031) 0:01:13.741 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:13:50 -0400 (0:00:00.030) 0:01:13.772 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:13:50 -0400 (0:00:00.032) 0:01:13.804 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:13:50 -0400 (0:00:00.030) 0:01:13.835 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Wednesday 02 April 2025 12:13:50 -0400 (0:00:00.031) 0:01:13.867 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Wednesday 02 April 2025 12:13:50 -0400 (0:00:00.061) 0:01:13.928 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Wednesday 02 April 2025 12:13:50 -0400 (0:00:00.059) 0:01:13.987 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Wednesday 02 April 2025 12:13:50 -0400 (0:00:00.031) 0:01:14.018 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Wednesday 02 April 2025 12:13:50 -0400 (0:00:00.029) 0:01:14.048 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Wednesday 02 April 2025 12:13:51 -0400 (0:00:00.092) 0:01:14.141 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Wednesday 02 April 2025 12:13:51 -0400 (0:00:00.030) 0:01:14.172 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Wednesday 02 April 2025 12:13:51 -0400 (0:00:00.031) 0:01:14.203 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Wednesday 02 April 2025 12:13:51 -0400 (0:00:00.060) 0:01:14.264 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Wednesday 02 April 2025 12:13:51 -0400 (0:00:00.030) 0:01:14.294 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Wednesday 02 April 2025 12:13:51 -0400 (0:00:00.031) 0:01:14.326 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Wednesday 02 April 2025 12:13:51 -0400 (0:00:00.085) 0:01:14.411 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Wednesday 02 April 2025 12:13:51 -0400 (0:00:00.047) 0:01:14.459 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Wednesday 02 April 2025 12:13:51 -0400 (0:00:00.040) 0:01:14.499 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Wednesday 02 April 2025 12:13:51 -0400 (0:00:00.036) 0:01:14.536 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Wednesday 02 April 2025 12:13:51 -0400 (0:00:00.041) 0:01:14.577 ******* TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Wednesday 02 April 2025 12:13:51 -0400 (0:00:00.108) 0:01:14.685 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Wednesday 02 April 2025 12:13:51 -0400 (0:00:00.055) 0:01:14.740 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Wednesday 02 April 2025 12:13:51 -0400 (0:00:00.035) 0:01:14.776 ******* ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Wednesday 02 April 2025 12:13:52 -0400 (0:00:00.381) 0:01:15.157 ******* ok: [managed-node2] => { "ansible_facts": { "__firewall_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Wednesday 02 April 2025 12:13:52 -0400 (0:00:00.038) 0:01:15.195 ******* ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Wednesday 02 April 2025 12:13:52 -0400 (0:00:00.350) 0:01:15.546 ******* ok: [managed-node2] => { "ansible_facts": { "__firewall_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 Wednesday 02 April 2025 12:13:52 -0400 (0:00:00.037) 0:01:15.583 ******* ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43 Wednesday 02 April 2025 12:13:55 -0400 (0:00:02.861) 0:01:18.445 ******* skipping: [managed-node2] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48 Wednesday 02 April 2025 12:13:55 -0400 (0:00:00.030) 0:01:18.476 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53 Wednesday 02 April 2025 12:13:55 -0400 (0:00:00.031) 0:01:18.507 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Collect service facts] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Wednesday 02 April 2025 12:13:55 -0400 (0:00:00.030) 0:01:18.537 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9 Wednesday 02 April 2025 12:13:55 -0400 (0:00:00.038) 0:01:18.576 ******* skipping: [managed-node2] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22 Wednesday 02 April 2025 12:13:55 -0400 (0:00:00.036) 0:01:18.613 ******* ok: [managed-node2] => { "changed": false, "name": "firewalld", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "dbus.socket dbus.service polkit.service sysinit.target basic.target system.slice", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "network-pre.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target ebtables.service ipset.service ip6tables.service iptables.service nftables.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "man:firewalld(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice dbus.socket", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-pre.target", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 Wednesday 02 April 2025 12:13:56 -0400 (0:00:00.781) 0:01:19.395 ******* changed: [managed-node2] => { "changed": true, "enabled": true, "name": "firewalld", "state": "started", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "system.slice polkit.service dbus.socket sysinit.target dbus.service basic.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "network-pre.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "ebtables.service nftables.service shutdown.target ip6tables.service ipset.service iptables.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "man:firewalld(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice dbus.socket", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-pre.target", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34 Wednesday 02 April 2025 12:13:57 -0400 (0:00:00.970) 0:01:20.365 ******* ok: [managed-node2] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/libexec/platform-python", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43 Wednesday 02 April 2025 12:13:57 -0400 (0:00:00.044) 0:01:20.410 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55 Wednesday 02 April 2025 12:13:57 -0400 (0:00:00.028) 0:01:20.438 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Wednesday 02 April 2025 12:13:57 -0400 (0:00:00.033) 0:01:20.471 ******* changed: [managed-node2] => (item={'port': '15001-15003/tcp', 'state': 'enabled'}) => { "__firewall_changed": true, "ansible_loop_var": "item", "changed": true, "item": { "port": "15001-15003/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120 Wednesday 02 April 2025 12:13:58 -0400 (0:00:01.403) 0:01:21.875 ******* skipping: [managed-node2] => (item={'port': '15001-15003/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "'detailed' in fw[0]", "item": { "port": "15001-15003/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Wednesday 02 April 2025 12:13:58 -0400 (0:00:00.053) 0:01:21.928 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'detailed' in fw[0]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139 Wednesday 02 April 2025 12:13:58 -0400 (0:00:00.046) 0:01:21.975 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144 Wednesday 02 April 2025 12:13:58 -0400 (0:00:00.035) 0:01:22.011 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153 Wednesday 02 April 2025 12:13:58 -0400 (0:00:00.037) 0:01:22.049 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163 Wednesday 02 April 2025 12:13:58 -0400 (0:00:00.032) 0:01:22.082 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169 Wednesday 02 April 2025 12:13:58 -0400 (0:00:00.030) 0:01:22.113 ******* skipping: [managed-node2] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Wednesday 02 April 2025 12:13:59 -0400 (0:00:00.082) 0:01:22.195 ******* redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.seboolean to ansible.posix.seboolean TASK [fedora.linux_system_roles.selinux : Set ansible_facts required by role and install packages] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:2 Wednesday 02 April 2025 12:13:59 -0400 (0:00:00.163) 0:01:22.358 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml for managed-node2 TASK [fedora.linux_system_roles.selinux : Ensure ansible_facts used by role] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:2 Wednesday 02 April 2025 12:13:59 -0400 (0:00:00.064) 0:01:22.423 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Ensure SELinux packages] ************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:7 Wednesday 02 April 2025 12:13:59 -0400 (0:00:00.038) 0:01:22.461 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml for managed-node2 TASK [fedora.linux_system_roles.selinux : Check if system is ostree] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:5 Wednesday 02 April 2025 12:13:59 -0400 (0:00:00.058) 0:01:22.519 ******* ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.selinux : Set flag to indicate system is ostree] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:10 Wednesday 02 April 2025 12:13:59 -0400 (0:00:00.357) 0:01:22.876 ******* ok: [managed-node2] => { "ansible_facts": { "__selinux_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.selinux : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:17 Wednesday 02 April 2025 12:13:59 -0400 (0:00:00.039) 0:01:22.916 ******* ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.selinux : Set flag if transactional-update exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:22 Wednesday 02 April 2025 12:14:00 -0400 (0:00:00.348) 0:01:23.264 ******* ok: [managed-node2] => { "ansible_facts": { "__selinux_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.selinux : Install SELinux python2 tools] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:26 Wednesday 02 April 2025 12:14:00 -0400 (0:00:00.039) 0:01:23.304 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_python_version is version('3', '<')", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35 Wednesday 02 April 2025 12:14:00 -0400 (0:00:00.033) 0:01:23.338 ******* ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:46 Wednesday 02 April 2025 12:14:03 -0400 (0:00:02.856) 0:01:26.194 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_os_family == \"Suse\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux tool semanage] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 Wednesday 02 April 2025 12:14:03 -0400 (0:00:00.056) 0:01:26.250 ******* changed: [managed-node2] => { "changed": true, "rc": 0, "results": [ "Installed: policycoreutils-python-utils-2.9-26.el8.noarch" ] } TASK [fedora.linux_system_roles.selinux : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:72 Wednesday 02 April 2025 12:14:06 -0400 (0:00:03.582) 0:01:29.832 ******* skipping: [managed-node2] => { "false_condition": "__selinux_is_transactional | d(false)" } TASK [fedora.linux_system_roles.selinux : Reboot transactional update systems] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:77 Wednesday 02 April 2025 12:14:06 -0400 (0:00:00.030) 0:01:29.863 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Fail if reboot is needed and not set] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:82 Wednesday 02 April 2025 12:14:06 -0400 (0:00:00.069) 0:01:29.932 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Refresh facts] *********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:89 Wednesday 02 April 2025 12:14:06 -0400 (0:00:00.031) 0:01:29.963 ******* ok: [managed-node2] TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if enabled] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:5 Wednesday 02 April 2025 12:14:07 -0400 (0:00:00.748) 0:01:30.712 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_selinux.status == \"enabled\" and (selinux_state or selinux_policy)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if disabled] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:13 Wednesday 02 April 2025 12:14:07 -0400 (0:00:00.029) 0:01:30.741 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_selinux.status == \"disabled\" and selinux_state", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set selinux_reboot_required] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:21 Wednesday 02 April 2025 12:14:07 -0400 (0:00:00.031) 0:01:30.772 ******* ok: [managed-node2] => { "ansible_facts": { "selinux_reboot_required": false }, "changed": false } TASK [fedora.linux_system_roles.selinux : Fail if reboot is required] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:25 Wednesday 02 April 2025 12:14:07 -0400 (0:00:00.048) 0:01:30.821 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_reboot_required", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Warn if SELinux is disabled] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:30 Wednesday 02 April 2025 12:14:07 -0400 (0:00:00.030) 0:01:30.851 ******* skipping: [managed-node2] => { "false_condition": "ansible_selinux.status == \"disabled\"" } TASK [fedora.linux_system_roles.selinux : Drop all local modifications] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:35 Wednesday 02 April 2025 12:14:07 -0400 (0:00:00.029) 0:01:30.881 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_all_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux boolean local modifications] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:43 Wednesday 02 April 2025 12:14:07 -0400 (0:00:00.030) 0:01:30.911 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_booleans_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux file context local modifications] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:48 Wednesday 02 April 2025 12:14:07 -0400 (0:00:00.029) 0:01:30.941 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_fcontexts_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux port local modifications] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:53 Wednesday 02 April 2025 12:14:07 -0400 (0:00:00.030) 0:01:30.971 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_ports_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux login local modifications] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:58 Wednesday 02 April 2025 12:14:07 -0400 (0:00:00.029) 0:01:31.001 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_logins_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set SELinux booleans] **************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:63 Wednesday 02 April 2025 12:14:07 -0400 (0:00:00.030) 0:01:31.031 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set SELinux file contexts] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:74 Wednesday 02 April 2025 12:14:07 -0400 (0:00:00.028) 0:01:31.059 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set an SELinux label on a port] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:87 Wednesday 02 April 2025 12:14:07 -0400 (0:00:00.029) 0:01:31.089 ******* changed: [managed-node2] => (item={'ports': '15001-15003', 'setype': 'http_port_t'}) => { "__selinux_item": { "ports": "15001-15003", "setype": "http_port_t" }, "ansible_loop_var": "__selinux_item", "changed": true, "ports": [ "15001-15003" ], "proto": "tcp", "setype": "http_port_t", "state": "present" } TASK [fedora.linux_system_roles.selinux : Set linux user to SELinux user mapping] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:99 Wednesday 02 April 2025 12:14:10 -0400 (0:00:02.119) 0:01:33.208 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Get SELinux modules facts] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:112 Wednesday 02 April 2025 12:14:10 -0400 (0:00:00.030) 0:01:33.239 ******* ok: [managed-node2] => { "ansible_facts": { "selinux_checksums": true, "selinux_installed_modules": { "abrt": { "100": { "checksum": "sha256:13dad22da122be9f7d5df4dbedae6a515323542fdc1a7e466d7a1a3d36d29731", "enabled": 1 } }, "accountsd": { "100": { "checksum": "sha256:8bd25829d921be0b5adf92ddaca7ab94cedca1d57796749cfa63571b6550e3da", "enabled": 1 } }, "acct": { "100": { "checksum": "sha256:2699d826efd46176017695c768804c505a54b277b05f1feb9c43a613bab4e6aa", "enabled": 1 } }, "afs": { "100": { "checksum": "sha256:99920dd4e0855870f7e6f9666928d13fe18ddccca9d38b92ea70a6ce3c8c7539", "enabled": 1 } }, "aiccu": { "100": { "checksum": "sha256:a7aedc8354b4335412871adfd2ab5b0c6da1ea63c8dd797718e4214a5d511bb5", "enabled": 1 } }, "aide": { "100": { "checksum": "sha256:8adb5c3a5ed74695e975eecbf290640b179eb6345a7740745ecfe3164efe209f", "enabled": 1 } }, "ajaxterm": { "100": { "checksum": "sha256:d3a03c2837d5dde7145e27902ff8578e00734ab34e8ea1a45aee58b83e9ad6d1", "enabled": 1 } }, "alsa": { "100": { "checksum": "sha256:202f94345fba8f4bc942dc9b75bbb6eea3b4cb02411cf6ed79858d72aa883c89", "enabled": 1 } }, "amanda": { "100": { "checksum": "sha256:f9a99d97370017307349a154ce479969395bbbfe434e4829573269f770efdd0d", "enabled": 1 } }, "amtu": { "100": { "checksum": "sha256:bc9934a2ae61fa117614f201479966d788484f3a7382de4ebad99790a465e2b7", "enabled": 1 } }, "anaconda": { "100": { "checksum": "sha256:b8aabc624243533d483c3dd5574a490a43e7ec0f2f7940798c12b4089bbd0642", "enabled": 1 } }, "antivirus": { "100": { "checksum": "sha256:1de6460ccaea5a5749eba17489b9765035c8202eb9492485ff39157564001a2c", "enabled": 1 } }, "apache": { "100": { "checksum": "sha256:1a0c38364558bebdae3efaa1fcf8be232184dcddcaab345bba1c40bf239dd0ed", "enabled": 1 } }, "apcupsd": { "100": { "checksum": "sha256:175308edb201092c22791f419d32da3f661e7ccfb9c5d5855ad753405c10023b", "enabled": 1 } }, "apm": { "100": { "checksum": "sha256:a1410f65d6bf017caedaffaa59016877686099fb7df3c4d801136de79a61795e", "enabled": 1 } }, "application": { "100": { "checksum": "sha256:a8e9d90aa1188068ca66be55c4d8abf9982666171bbdd8d4da1f2a254c34a080", "enabled": 1 } }, "arpwatch": { "100": { "checksum": "sha256:2cb8afd237d6bc5693e5d54be1a455b6ed632fbbe76cea406163f9c48d00e79f", "enabled": 1 } }, "asterisk": { "100": { "checksum": "sha256:0b66b387174001e926cf1454c3516bb32d96610a0f598065fe6d7a917ca897fe", "enabled": 1 } }, "auditadm": { "100": { "checksum": "sha256:dcd9e7f5e71fb9f7aace30b5755efcbf85fe88f884d4253cc9abcad1c44e5f4d", "enabled": 1 } }, "authconfig": { "100": { "checksum": "sha256:bdb8072e463c84cb01e6933093428be2b6ee5299d82e26730b12dd2b66d89355", "enabled": 1 } }, "authlogin": { "100": { "checksum": "sha256:a89b04c7a40bb373de2bbb0a2210cca454e7d4a805321fbe65462ae5551db656", "enabled": 1 } }, "automount": { "100": { "checksum": "sha256:41ec4e0c5c46118cb4dfa8c8b1834f330dce4ffdea3d534a8d5007a63b3e5262", "enabled": 1 } }, "avahi": { "100": { "checksum": "sha256:7628cb8340258102798a6e36902d0210e2051ffb9fb4f7a1e4c62a612edfe6fa", "enabled": 1 } }, "awstats": { "100": { "checksum": "sha256:9b92e64a3331076ad443862aa2ba98a2c4d9b00638bf19bb9726f572dee5eff4", "enabled": 1 } }, "bacula": { "100": { "checksum": "sha256:32cedcc57f6a973ac5adc16d8df343fc1ca4b3716f7cdcdae0d2490a6e5765ac", "enabled": 1 } }, "base": { "100": { "checksum": "sha256:d99ed290beecf2b10a557a21b06b63cabc28dab4050f2e7197d2cb9e30519fd3", "enabled": 1 } }, "bcfg2": { "100": { "checksum": "sha256:ea510637d47b7fabc3f617f8a6f3ca3172bf9215c2d6b64ad19cd5d8819c8b6b", "enabled": 1 } }, "bind": { "100": { "checksum": "sha256:39520749f8aba46f975a87187975d8dcd014ad67d22515951f51fa3fd1b0478f", "enabled": 1 } }, "bitlbee": { "100": { "checksum": "sha256:bf04e481614825a35c26a547b19098ff1c8acd0d915c5b4f938b9fa595459d00", "enabled": 1 } }, "blkmapd": { "100": { "checksum": "sha256:ca870c95742bf987a2e739286cbcb998b58c091a422251fdd8de57228b28fd96", "enabled": 1 } }, "blueman": { "100": { "checksum": "sha256:7e4b2b3df3962273436b561c806c816fe4b1e5d6781efa33a7109b05f796edd7", "enabled": 1 } }, "bluetooth": { "100": { "checksum": "sha256:da457ef2ce595c3bf9f70697029ea90e96472ae562f685a7f919a7778a778d09", "enabled": 1 } }, "boinc": { "100": { "checksum": "sha256:d74bd3b6b3850c30b5bbf77822ab82b43f36600e4f76cd68674ef361328afb05", "enabled": 1 } }, "boltd": { "100": { "checksum": "sha256:4ccf41e247c5a7066042a0ebaae492805a1d640f777e8e771701f340a76bce30", "enabled": 1 } }, "bootloader": { "100": { "checksum": "sha256:46e55021d6c4cede091a992ab33521bb1aba4ca1d44879d778973b279204933c", "enabled": 1 } }, "brctl": { "100": { "checksum": "sha256:f9645adde2441e43369a255c6a194f01c6f5800347ad710ce3e147df884b98aa", "enabled": 1 } }, "brltty": { "100": { "checksum": "sha256:603734d4772f482f282eb217c03647f705d66de27fc927c64e02787369b0f78a", "enabled": 1 } }, "bugzilla": { "100": { "checksum": "sha256:326d2a188603c908cdae3c9dcdae6bda37b98ec4cc23f3b31878e2bbd0cd33b2", "enabled": 1 } }, "bumblebee": { "100": { "checksum": "sha256:e8ca8d5318a68243441fdb993fbab6d566f7462fd5557b55733f8ddbfcc4b276", "enabled": 1 } }, "cachefilesd": { "100": { "checksum": "sha256:86fe9c1aa8b2d7a6bdd9bd8d0c7a41a7ae0e4e14e32eaea6cb920367c2f495d7", "enabled": 1 } }, "calamaris": { "100": { "checksum": "sha256:1069377693a5d730d57e4ddd6f73ce20b67b595aae90a16459e852d238163b48", "enabled": 1 } }, "callweaver": { "100": { "checksum": "sha256:880b626c3d04c5669d64ee617ee36a18566e91adeaac67b9527b0a795543575e", "enabled": 1 } }, "canna": { "100": { "checksum": "sha256:b9256764ca5e34142e8cffea57fafc2fa66f78dc8c05761f97fa9becd1d77311", "enabled": 1 } }, "ccs": { "100": { "checksum": "sha256:ad293ee5e252966d14fa6bf09240f143460df4b928672a33a398a5793777c4e4", "enabled": 1 } }, "cdrecord": { "100": { "checksum": "sha256:dda8d62c3bf2503ff9762bd031c35a76cac8059d08592fe23e4d3fe11b0ac8cc", "enabled": 1 } }, "certmaster": { "100": { "checksum": "sha256:b431dd84f2c6b971bc573674fa6c4ee2fedf910b0123ba5d9acb5011c208fd72", "enabled": 1 } }, "certmonger": { "100": { "checksum": "sha256:965ec65dfc98cbabce2350bd52fa7ce92c2f4ab4704348f1555f2a3d9edfd1b8", "enabled": 1 } }, "certwatch": { "100": { "checksum": "sha256:77f0299f67e43927eacb553d1002beeebc3098b4bee64d8dc3dadb8fd23fbb5c", "enabled": 1 } }, "cfengine": { "100": { "checksum": "sha256:c78b908838f1d64ee9ebb0a51b7fa438527716936471a573e1b4b7c393bd6b8d", "enabled": 1 } }, "cgdcbxd": { "100": { "checksum": "sha256:5d3633e0b77db69721e4f64167d7e5f7779c3e5fa76e095d25f8467f2a0bdfec", "enabled": 1 } }, "cgroup": { "100": { "checksum": "sha256:9368c6c54bd5ec6f20e4c3b47c86e60af07346c4e86e525b6bd7288b54b7e774", "enabled": 1 } }, "chrome": { "100": { "checksum": "sha256:d31ce9d2fe78cafcd5e3c8decf22ae1e9ea6f74026ca65b6320afe9a33cd609a", "enabled": 1 } }, "chronyd": { "100": { "checksum": "sha256:7d9624729861397cf7720c2324c65489a3d30485e6a884ab1ff9a8ca22efa678", "enabled": 1 } }, "cinder": { "100": { "checksum": "sha256:fc169721c78f5b0857ed8312e59ba4c134b685c4c322dae242b92e815e35e6fb", "enabled": 1 } }, "cipe": { "100": { "checksum": "sha256:02c20398b9eff51ed431b7ad739a6015d2451b4bf6e3e5da380606d85a77852c", "enabled": 1 } }, "clock": { "100": { "checksum": "sha256:4f90655d2243cfc32ea7436a953cccb8a34af895f83361235a3a5cda40dbc75f", "enabled": 1 } }, "clogd": { "100": { "checksum": "sha256:ba78a422a10b65591c48cb038f8a55614944163f3140275852d293fb0c548bfa", "enabled": 1 } }, "cloudform": { "100": { "checksum": "sha256:481f5fbc7810a5a81851edbe5a6b124141257f5fbbb83d8830ae0a34924ed3d9", "enabled": 1 } }, "cmirrord": { "100": { "checksum": "sha256:8f8fb986f15b8b7c5c250d250fdbbb2f78874e13394105c9c486488a16e94c91", "enabled": 1 } }, "cobbler": { "100": { "checksum": "sha256:e0e264b9cc83962dbbb27c152a72f01c6a355467c4e845b52e65c8b88d8d75d6", "enabled": 1 } }, "cockpit": { "100": { "checksum": "sha256:cb7fccd94903a6e256a586d758085f6f59c0f8b1c5b4cb99536915526d2224ec", "enabled": 1 } }, "collectd": { "100": { "checksum": "sha256:7f08e2e248d33162dc9b237c37ed3a3dba0511bbcc71d87482e95093fb8c6456", "enabled": 1 } }, "colord": { "100": { "checksum": "sha256:86e58c9f12c519a2c3b090b64a276722374054ea900c775b2f8ab4ef2867dcf0", "enabled": 1 } }, "comsat": { "100": { "checksum": "sha256:1d57ffaad6b96e3ca8ac82c23b52d58d81e1f69f5d54a648a16da8ffa8070e53", "enabled": 1 } }, "condor": { "100": { "checksum": "sha256:dbc3f2f0c12f9aeed14056fd7e7c46a4ecab3569198f891643172cd032f3fc00", "enabled": 1 } }, "conman": { "100": { "checksum": "sha256:1270caf15af248a487cd5ce728daae2699ffd9139823c805ec49213ab1c835cb", "enabled": 1 } }, "conntrackd": { "100": { "checksum": "sha256:56fd7d7a550dbc4188b93afd0fde8c706623b3a5d26db265ee016967ba4ddfee", "enabled": 1 } }, "consolekit": { "100": { "checksum": "sha256:5bd7a7acc191766583d933b04321e64657138959bf40a4d2986b013b942c4ba8", "enabled": 1 } }, "container": { "200": { "checksum": "sha256:301be7dafa07cdc68b4e5ade7e1a07017fab3efd85986bdfab7faa9466a95836", "enabled": 1 } }, "couchdb": { "100": { "checksum": "sha256:12b2e3e7314bda4e76d3883901e6470927e85343f742fb44b174ce968f1ad8b5", "enabled": 1 } }, "courier": { "100": { "checksum": "sha256:40ae5f173004741838002644e5bff73cf16f2f3a1928c45fa17674f9a0df5148", "enabled": 1 } }, "cpucontrol": { "100": { "checksum": "sha256:1485a6d64d00619898d2789d27391f2a57a7fb1f0e8c73daf59baca8641564a3", "enabled": 1 } }, "cpufreqselector": { "100": { "checksum": "sha256:687564eb09acf3e7f1475fe2a133941c36999bd037aa8a794feea2d9f2c26385", "enabled": 1 } }, "cpuplug": { "100": { "checksum": "sha256:c16e376ff6c51da1911e68a8a7d42f5730eda45febfd0875c78cac4b9cf6e78c", "enabled": 1 } }, "cron": { "100": { "checksum": "sha256:6be0252b3c6bcbfb4c51dfd3ae1ae262f5de153234917ac4d342b18ae0292060", "enabled": 1 } }, "ctdb": { "100": { "checksum": "sha256:06dd65a4361bf8076c14b322dd30003295c0b9d75bf1ae610961b13a1f9431da", "enabled": 1 } }, "cups": { "100": { "checksum": "sha256:3d5e5bbf131d98d95f7f1431893eb137bd833dbfd8469f9c386d72bb4e8f9b9a", "enabled": 1 } }, "cvs": { "100": { "checksum": "sha256:bbc8d76cc8609849d5b078c5b2ac7364470a06d77d67b97d5f58429d7b679e33", "enabled": 1 } }, "cyphesis": { "100": { "checksum": "sha256:b1a41211ae3cf69b819df517eccd0fda2088c27685dad68de64531b9794ec518", "enabled": 1 } }, "cyrus": { "100": { "checksum": "sha256:60defb1f6feeb1d607734c4912e52e03bf5b0c27cb6f31a37fa7e05f3497b323", "enabled": 1 } }, "daemontools": { "100": { "checksum": "sha256:1034e2442c975dd2ccf84791b1a826d02032f13762d57c3485e51e2b9a7dc03f", "enabled": 1 } }, "dbadm": { "100": { "checksum": "sha256:40306590ef444152ae18b65040d85442c14853a9cc4c31b0224c4d19517d66ea", "enabled": 1 } }, "dbskk": { "100": { "checksum": "sha256:24559eff82b251f9814ae88c36a7cbacda1ed419a80145aef545306e88cb0da8", "enabled": 1 } }, "dbus": { "100": { "checksum": "sha256:50ea4eb05a06315449092c939e2307436ac6461e47ca69f0d42cc4e321e86280", "enabled": 1 } }, "dcc": { "100": { "checksum": "sha256:06e414b0a83b49968f62018cecde48dcfe68b2e9d699915367b3e04461188a0d", "enabled": 1 } }, "ddclient": { "100": { "checksum": "sha256:73ca2525a14e3161524f6e8fc0d016430a536002f1cb3833db1334670b458436", "enabled": 1 } }, "denyhosts": { "100": { "checksum": "sha256:1bd00b13b9bda18274a771d66f7cba8fe62e5e95ea8f51415da6b1fa1336df1b", "enabled": 1 } }, "devicekit": { "100": { "checksum": "sha256:03b01b781881cc60438bc357bd60596970b8ac019b415969bca8a08358fcbfd1", "enabled": 1 } }, "dhcp": { "100": { "checksum": "sha256:2ad95a78468f7f4ea9a8c044c79c0a4ca9924b41432390ea2863a85c806c9a00", "enabled": 1 } }, "dictd": { "100": { "checksum": "sha256:c30c819f142d3c719d0ec5741af5a65161770ff140097fe63f7559d55b897500", "enabled": 1 } }, "dirsrv": { "100": { "checksum": "sha256:50efdc68200d27ce1a5db99a780aa7b0e84988669961d436d348c7bb8310d181", "enabled": 1 } }, "dirsrv-admin": { "100": { "checksum": "sha256:8d9234157484f6ae8ba22039b44fa19f4de8137be9321e5da393d72d85d89487", "enabled": 1 } }, "dmesg": { "100": { "checksum": "sha256:8b834312a2cb99ab89862f839a1315e78794dd92758785f84c9559285dfbe679", "enabled": 1 } }, "dmidecode": { "100": { "checksum": "sha256:2c7fb8c6c52f385b819713f0444a96cfd4e65b7dcb3ca79b932cc12ad9ce903d", "enabled": 1 } }, "dnsmasq": { "100": { "checksum": "sha256:44f66c5d4f635600ee9d0ba3fdea3896218f1420b5ead89e0f22d71a447f9e97", "enabled": 1 } }, "dnssec": { "100": { "checksum": "sha256:49427a9e92b87db77706e2b81ece254c99d3cd6ba020211e2afae65fab7ad066", "enabled": 1 } }, "dovecot": { "100": { "checksum": "sha256:cc8c3a2ee0233a7f1fdf38837b72ce5fd15efef782a36ab4b9aa2ec339b46fa6", "enabled": 1 } }, "drbd": { "100": { "checksum": "sha256:b66be23c1ded4e548e5369b744c7c2a4dfd7065582517525221177ca67657525", "enabled": 1 } }, "dspam": { "100": { "checksum": "sha256:5dd7221ba40e9b912367289fed8ca116c14da4fb8bd7f28f421c4008855bb9fc", "enabled": 1 } }, "entropyd": { "100": { "checksum": "sha256:0f68aeeb1da72efb8c9b1bb7db0a4180b6938672b16f33d1abcd65f5481d85a9", "enabled": 1 } }, "exim": { "100": { "checksum": "sha256:f4c4473ee49394e0e4629023772464a046c476f92b4a727acdf9f6c92711b952", "enabled": 1 } }, "fail2ban": { "100": { "checksum": "sha256:2383cb88b81bc5d87be9f3201a42da526532c4ea8e6d3b3f5023005c0ddf6f17", "enabled": 1 } }, "fcoe": { "100": { "checksum": "sha256:913e66ac5f5ce364e5ea556acfbf77845c25a4beb5ee64599613aa00127c1492", "enabled": 1 } }, "fetchmail": { "100": { "checksum": "sha256:63f00993bae4285eff5e993d208ea786785c4331e6947b3a48a97d31145b2e98", "enabled": 1 } }, "finger": { "100": { "checksum": "sha256:16c506d472b007f7d36850810ca0fcfd9482d30ce9c0ba790174b78294fd1d74", "enabled": 1 } }, "firewalld": { "100": { "checksum": "sha256:bbf58446f30b93de19e5a19087ee012f8e347fef5e7e8012e64b31a0ec21ab09", "enabled": 1 } }, "firewallgui": { "100": { "checksum": "sha256:b61ff17eee03141c9c7bd79d63331ecea733cba4b5b43b87d5141a40cdccdd69", "enabled": 1 } }, "firstboot": { "100": { "checksum": "sha256:c5540b8385c84075dd657e390d77ae886aa9d74b65444b9aa1d858f375819a8c", "enabled": 1 } }, "fprintd": { "100": { "checksum": "sha256:c1ffb7734a0359a7390830d9c6477ab61c45fc026368bfd5e2246523a6439464", "enabled": 1 } }, "freeipmi": { "100": { "checksum": "sha256:9af2291d75a2d643f53ff7a98bcabf22effb617329178efea45372d714825de1", "enabled": 1 } }, "freqset": { "100": { "checksum": "sha256:28bf77389f3e41743b30727a891609172a891466e92c28a919f43e628cc23a4d", "enabled": 1 } }, "fstools": { "100": { "checksum": "sha256:140caf542903419ee2471fd99ab06aa45899c400402c2580b395b182f24bd225", "enabled": 1 } }, "ftp": { "100": { "checksum": "sha256:7e8456fdf7807b30e1c257e568ba10305696cf5abdebc70988c288079884d46b", "enabled": 1 } }, "fwupd": { "100": { "checksum": "sha256:1dd6a45b73c7ce77a87af1e87354ada5aa5b2841aaaa045a6b4ae3c4d09f0f8b", "enabled": 1 } }, "games": { "100": { "checksum": "sha256:950d8be99d5349a3d893ba601c518e6b2af0d56c5b55514a45dbd8a3c61c9ecc", "enabled": 1 } }, "gdomap": { "100": { "checksum": "sha256:5040cb99d007fe9368bd37a9a6bf82f891c220ef652443896a0f2f6ca6f818e1", "enabled": 1 } }, "geoclue": { "100": { "checksum": "sha256:f0155b43152b6b4b850d1c4fb7daf16fd77992313b8be314ddb4901314bf913d", "enabled": 1 } }, "getty": { "100": { "checksum": "sha256:a60d07665b0ebd25fd54a9d82dad5eb7acbc11a2842dba56d7b9524d26ce14ce", "enabled": 1 } }, "git": { "100": { "checksum": "sha256:5eaccf209092db49c9a48d84e1387c1de76cb153c774c0bd615c001afab28664", "enabled": 1 } }, "gitosis": { "100": { "checksum": "sha256:b522382b64f36cf387cd892b45e916c861bd0a09697bc983eb55b53b0efd3081", "enabled": 1 } }, "glance": { "100": { "checksum": "sha256:2c51d19fca6ee40e137245ecb425edc77666d75c42ba583bf74cf13f10ace055", "enabled": 1 } }, "gnome": { "100": { "checksum": "sha256:420b9cefa6bdb542f6da10de7b36704a91509cf64cd2497e5693a858cfca5e41", "enabled": 1 } }, "gpg": { "100": { "checksum": "sha256:f821aa6ca5837a2d2de8180e74c267f68da951960c989fb13ebde5833c93738e", "enabled": 1 } }, "gpm": { "100": { "checksum": "sha256:bf30c4945be0065672fb47f70ad251b1079ada339f61f2679293cb0226d0d57a", "enabled": 1 } }, "gpsd": { "100": { "checksum": "sha256:5373b2332959d6c41c32160018274ab61e3f1abd0f0a5cc2302c45b141a39a9b", "enabled": 1 } }, "gssproxy": { "100": { "checksum": "sha256:7528c47be91a81ac19f2f54458309baeb0a232d83a1ccb2bd89fbc8cefb1ddc8", "enabled": 1 } }, "guest": { "100": { "checksum": "sha256:91f43e4d5ae283f0aa13c49efea93293dbdecd2b2f4f75db89371eda65b7523e", "enabled": 1 } }, "hddtemp": { "100": { "checksum": "sha256:f170e1da6acae4fd7108d22c8cf262916e034f0d3edbdebf3265a922a5355373", "enabled": 1 } }, "hostapd": { "100": { "checksum": "sha256:8b15f72328885c08bfda38082a62feeaa2c6692223a4d2bd1a572820d454a742", "enabled": 1 } }, "hostname": { "100": { "checksum": "sha256:e9fc1c4032c0346f751e1ef8ad1b3fe3425401b70a6c4354d4485472288e0bc5", "enabled": 1 } }, "hsqldb": { "100": { "checksum": "sha256:f70b198e5a5157722b69dc89109c4074a475e1085356cc610cc9b700567c154d", "enabled": 1 } }, "hwloc": { "100": { "checksum": "sha256:370e9eea2b927a2715018b667e9a56ad09af301a90811cd9b041da79f5384b38", "enabled": 1 } }, "hypervkvp": { "100": { "checksum": "sha256:b54ce6f4960a02d35e19d60bf8a07f7866514893e3193a5f4822c8580a46caa4", "enabled": 1 } }, "ibacm": { "100": { "checksum": "sha256:663b35f3874583ae074924bc068a8dc4c7c144adb60007da6103d1e3505ee37a", "enabled": 1 } }, "icecast": { "100": { "checksum": "sha256:dedaddef1d7447d25a1e7ff01e60e4545606e556c6770bd3fa94d9331de7a5d7", "enabled": 1 } }, "inetd": { "100": { "checksum": "sha256:ae408578a7160f2feae10269365558c43d9570b392642a92cc20f8ad47c58cce", "enabled": 1 } }, "init": { "100": { "checksum": "sha256:7ff95566a4f2bdb8ca3ec67acdade39e35fdabc57c2f00b989bab3f699f997f8", "enabled": 1 } }, "inn": { "100": { "checksum": "sha256:9ad99284192a443aa582e73b46667388b7a219dafae8dfce71a58a82bbae2f6c", "enabled": 1 } }, "insights_client": { "100": { "checksum": "sha256:0e41289d8dce065dcd41fd6cc1e1282efd4a58e7f9e3a2f1abc32f520fbbcc1e", "enabled": 1 } }, "iodine": { "100": { "checksum": "sha256:32501ab66def044fbc340cb5c656d5743c738bbd6fca5626c36c687419cd8d32", "enabled": 1 } }, "iotop": { "100": { "checksum": "sha256:d15656cd91a4e4e178a13f7cf910cfc552cc30db881a11ec88833f947edb4561", "enabled": 1 } }, "ipmievd": { "100": { "checksum": "sha256:d34fe186922c0e5726ca361343ec3846833ec3e4ab9b019b3d7bac1337383a16", "enabled": 1 } }, "ipsec": { "100": { "checksum": "sha256:d36c66c2c79d338c61c90d4136433e1e3a73435e920eb36d70682dfd5e147e59", "enabled": 1 } }, "iptables": { "100": { "checksum": "sha256:5a674017cc648e3262757464e5413503154cc1f593da545ce2c4f946991012bc", "enabled": 1 } }, "irc": { "100": { "checksum": "sha256:d72428ccbff5521367e00699c142bba64b2bbd44fed35deb29f9530cc0448378", "enabled": 1 } }, "irqbalance": { "100": { "checksum": "sha256:15650b2f39ccdfbcb1e4e867a35fce3c2768097e611e0c8ad9cb79ae6c66dd58", "enabled": 1 } }, "iscsi": { "100": { "checksum": "sha256:ccb27142f793095c79f531aae924baaeee5914c84228a09c09b9eca839f3524e", "enabled": 1 } }, "isns": { "100": { "checksum": "sha256:90b42f610fa328cdfb98bd0450bd052566f203e51e4a913dd6faded6da7fbe2c", "enabled": 1 } }, "jabber": { "100": { "checksum": "sha256:5ad49d140265305dc72781a6826d1de4614a33f83bd512acdc2263038ad41206", "enabled": 1 } }, "jetty": { "100": { "checksum": "sha256:d910afd1bfe836543ded50974dc24ae7bd5fd2609d6a9b2403316dffcd39832d", "enabled": 1 } }, "jockey": { "100": { "checksum": "sha256:d9a67ce1976ed2e79826d25f33dcb0b0bbde6c090600b605bbaaae45856d12f6", "enabled": 1 } }, "journalctl": { "100": { "checksum": "sha256:9ddb71271d0dbe5cede6179c0ca263e297dc6b65197bde2f7b14ce71f8dde369", "enabled": 1 } }, "kdbus": { "100": { "checksum": "sha256:5969c78be4a03cc91e426bc19b13c5188b5bf8ac11f5e2c21c098c3d68a7e3e3", "enabled": 1 } }, "kdump": { "100": { "checksum": "sha256:fdde3852d1decda649133c6345680f9353b86a6da2a98a83a8be101c9c25f103", "enabled": 1 } }, "kdumpgui": { "100": { "checksum": "sha256:66c67280c70a9b897b0f952067438e0eee05f9f48913508b38d745ef88747f32", "enabled": 1 } }, "keepalived": { "100": { "checksum": "sha256:c1177567c7bf67bb2d0de17760cecf56e0bb34f50d6fe060dec64ae97a76ecdb", "enabled": 1 } }, "kerberos": { "100": { "checksum": "sha256:826fbe83705494e009b242b88857c425eacba49aadae506ffa2012c80e60f7ae", "enabled": 1 } }, "keyboardd": { "100": { "checksum": "sha256:f199811d9ddc8db83864a09c543567fcb2f117b3241967b092bff7c9fdbfbfb6", "enabled": 1 } }, "keystone": { "100": { "checksum": "sha256:b0a7227a870ea987035e0cd524ad956a68287d0a67dd7135de41c6d5977ff4c2", "enabled": 1 } }, "kismet": { "100": { "checksum": "sha256:488fb5fd17cf1f630f3e48a853da05f86c06fc58219dc2ae59251865734bf800", "enabled": 1 } }, "kmscon": { "100": { "checksum": "sha256:d64019b11b6a37f6cdc5579d56eb1e19b6a7231501e1cfe2a838d26a2eac6033", "enabled": 1 } }, "kpatch": { "100": { "checksum": "sha256:00070d71dfe2632491305387ffb264127dca4387425015e4cb013d6bce5f95c3", "enabled": 1 } }, "ksmtuned": { "100": { "checksum": "sha256:891f082452240ad2e726bad777ea787d0f0f8695cc2a75f7439a2badda030d24", "enabled": 1 } }, "ktalk": { "100": { "checksum": "sha256:2df6f3dbad4a513ee1c113e496e8d2f5a19f56015f4a21e7478f2f5b53f36359", "enabled": 1 } }, "l2tp": { "100": { "checksum": "sha256:8e4cb0b0e0d1293d669de0b0e50f68d6d6fbe8e8d830a236a1c0e676f2326fb2", "enabled": 1 } }, "ldap": { "100": { "checksum": "sha256:d0177bb5873d0e6f9595020a8f39ba06b19e4636ea610175a3afef4aec2719cb", "enabled": 1 } }, "libraries": { "100": { "checksum": "sha256:6d5f128f2d4fd9137a7c70d0d024703547796a71f70017b3550a31d3450e0435", "enabled": 1 } }, "likewise": { "100": { "checksum": "sha256:e7eebd050230b358b43435d37ce308c3ba15e2516f4045abf7d26f03ebfbc11c", "enabled": 1 } }, "linuxptp": { "100": { "checksum": "sha256:4132cd51913a3044e453ed0b972db2ef511fdc7b2a1b592d1070177651066ab9", "enabled": 1 } }, "lircd": { "100": { "checksum": "sha256:cc81b79d2834e58bef7928f525c1a1eee5547e81d195444b3bc2741e396ae46b", "enabled": 1 } }, "livecd": { "100": { "checksum": "sha256:805c7bc4ded621b44ecf333d558328e115bba652fcbc91f436cefc948497688e", "enabled": 1 } }, "lldpad": { "100": { "checksum": "sha256:358c4b262655cffbf20f7484aedb22f094509f44d52a1fa3efe3edeafd99317e", "enabled": 1 } }, "loadkeys": { "100": { "checksum": "sha256:26f9e78406ecdc968ed670b32db1d10805e66875631558f092f08a6e1f2170dc", "enabled": 1 } }, "locallogin": { "100": { "checksum": "sha256:e07d92775ed25e7a3627bf977452844c67acf473b33075475f433f8be76dd755", "enabled": 1 } }, "lockdev": { "100": { "checksum": "sha256:1f946da2054cc1693209749df12ff01ab6456247d6225733aebb3a7d70a46e20", "enabled": 1 } }, "logadm": { "100": { "checksum": "sha256:70546c4b3d01f15bc7a69747dbb12fc6bcef5d899f6301f62c0c612c7069082a", "enabled": 1 } }, "logging": { "100": { "checksum": "sha256:656067c78ff1246a1a758a213d44307f91cb79336fe74a47015af425e58266fc", "enabled": 1 } }, "logrotate": { "100": { "checksum": "sha256:76cc40f1943fe21959793499bffaf35d0fe53ffc3f6c5a8b31eb96e738a286c2", "enabled": 1 } }, "logwatch": { "100": { "checksum": "sha256:cf4450b03e28762040c29f2a28af238cd4905d1c6bd4c73d656b266c7b9a8a6c", "enabled": 1 } }, "lpd": { "100": { "checksum": "sha256:9358dc35659b9570d3e8119a088b2693d7de505ea25996dc139517a857888857", "enabled": 1 } }, "lsm": { "100": { "checksum": "sha256:1247dc4bccfbc9ee42292db4415b21ae00bdef3dc2faeb267f045413da4a1b1b", "enabled": 1 } }, "lttng-tools": { "100": { "checksum": "sha256:79e4a2224ede13cd5f2c0e6e7c61e83efabaf1d05b86f6f7a710599bfc48edaf", "enabled": 1 } }, "lvm": { "100": { "checksum": "sha256:f56137657dd61a1a8a8844d5d1db01fc03330d17e05457d03f64756b344c32ef", "enabled": 1 } }, "mailman": { "100": { "checksum": "sha256:e47811cf3bd8204eaa02c4aab92f3d426f0a3ef97161e1579845d1e03df1fc1d", "enabled": 1 } }, "mailscanner": { "100": { "checksum": "sha256:8d447072ab5005ead27f1cb4d96dcbedf09a11182f660c6f59c6d56fd81235d8", "enabled": 1 } }, "man2html": { "100": { "checksum": "sha256:224584babd9e83c242d54fd8c5cd03379b0556005268aac22b15734b913f12e6", "enabled": 1 } }, "mandb": { "100": { "checksum": "sha256:ae44b8ec7a90ebbc45fdafe89663197b36e47120ad90eb22b475939055ea6924", "enabled": 1 } }, "mcelog": { "100": { "checksum": "sha256:c5d98ec368b145c74b4bf0ea8da3980b17af0c2d00654c5a6973241625f97b12", "enabled": 1 } }, "mediawiki": { "100": { "checksum": "sha256:43f1c6f7cfdeaa26891824167cf637a8670785c2674b45d85ce4a7ac77190a36", "enabled": 1 } }, "memcached": { "100": { "checksum": "sha256:f0f9c7367e9bd196aa463916bd5aab02f6966dad9564a0f2fd070bb2e8410aeb", "enabled": 1 } }, "milter": { "100": { "checksum": "sha256:db190bacd2b84a29971cd1940cd15d606abbfded5c9b956894717afd91fc7a0d", "enabled": 1 } }, "minidlna": { "100": { "checksum": "sha256:0d6ac660d641c1cf707a814ed08e19b9e21547a3eaa7134cab84dbc5fee6b5b2", "enabled": 1 } }, "minissdpd": { "100": { "checksum": "sha256:dd2ab85bcba6d204f9dbc7304e8a4940e5d1733d4b9cf4fcb0f4072982c585c3", "enabled": 1 } }, "mip6d": { "100": { "checksum": "sha256:406edf2c78ba0e692d5a78f3c5ca8d641d00131b143332adeaad9f325959683a", "enabled": 1 } }, "mirrormanager": { "100": { "checksum": "sha256:7084de59beaaaf4f630357ec53beff8d0a0ee532ac180fe58e23bfe98f1fdaee", "enabled": 1 } }, "miscfiles": { "100": { "checksum": "sha256:7e7e87e302bf847a4c59d69e5af60729e61bada0cc5d6ec17a25a6514476cb48", "enabled": 1 } }, "mock": { "100": { "checksum": "sha256:ae352eccf2f2c9ee8f0d9635517d9ae3c9bba83c617deca8f989e2aae8dd35fa", "enabled": 1 } }, "modemmanager": { "100": { "checksum": "sha256:84a60147d2b0121ff6ede6199583cdb5619480d015b2a675c6a0569f91c12d66", "enabled": 1 } }, "modutils": { "100": { "checksum": "sha256:67c3914aeb25e38fc6bd0793fddc41122dba1547d54e91a78065545fea3b9c87", "enabled": 1 } }, "mojomojo": { "100": { "checksum": "sha256:6030afcea9f8d46f25dd7785737edd25eb0f1e50b76eafe4d9103196b722d47e", "enabled": 1 } }, "mon_statd": { "100": { "checksum": "sha256:6ba3a594d01a11bc32e7cb554f7386314b5089eb4416fb776edb552a7d53c41d", "enabled": 1 } }, "mongodb": { "100": { "checksum": "sha256:1b2d30558bec7fc08d1d388ae2bb0becd2233c99c9fb173fd00809786ce5eed9", "enabled": 1 } }, "motion": { "100": { "checksum": "sha256:346e172be35df168eb0e4fbc8e176b0fda87de9bc5787f7a5ab7667cfe1e3c3b", "enabled": 1 } }, "mount": { "100": { "checksum": "sha256:f66c53d993dcd47ea1ff3d797f8fd69fb8161a4ff8a59f54f66a2de9462a55a7", "enabled": 1 } }, "mozilla": { "100": { "checksum": "sha256:7696dbb77c54531cf2574c7ede9f085cf64611dcf7a612530dce2de19f7a8b9f", "enabled": 1 } }, "mpd": { "100": { "checksum": "sha256:0f67c18c9101b53f57ef857a74d6044701e1d2c347f829a03c0579c545fdbef3", "enabled": 1 } }, "mplayer": { "100": { "checksum": "sha256:f82c0a72506f1011e47ba98e51d5edf906f58fc190d797f5d1a0b8e5cc7d0762", "enabled": 1 } }, "mrtg": { "100": { "checksum": "sha256:afcd9267261b334900420461279b8555fdb4bd783af880fa4606d8afc65e0712", "enabled": 1 } }, "mta": { "100": { "checksum": "sha256:b0f9753424c504a288f55d495105f6d475d69287b718190ae5192cf7d6ddfde6", "enabled": 1 } }, "munin": { "100": { "checksum": "sha256:29f87ec15fa19e975c83288d55e56bab64855a24c4d8826fe4138eda9a46cc97", "enabled": 1 } }, "mysql": { "100": { "checksum": "sha256:b028af8f4e726feb8c26037f7c6d6f97383977bd5ee6141ab4e8e1d096d6481f", "enabled": 1 } }, "mythtv": { "100": { "checksum": "sha256:e025b2dbf50901632da0ee2aa658105a322275eb120d782cbbf25f2895231154", "enabled": 1 } }, "naemon": { "100": { "checksum": "sha256:a19b3b0540dc52d9506ca7e5d804c2fe9115b3ea28bfd9273030e841e12eb277", "enabled": 1 } }, "nagios": { "100": { "checksum": "sha256:39ca80027ac8585f368bcd57f555ba87bf409f7b7d6c4292c09fd06cc1691c80", "enabled": 1 } }, "namespace": { "100": { "checksum": "sha256:ef73850f29b4ff4ff904d506d545bf366fd1e7c2ba82a7a7c9a4513e3eee45d9", "enabled": 1 } }, "ncftool": { "100": { "checksum": "sha256:2c9356101a9ddbec94afdd12ca669ba93a1d422c302f9e17b78b18670617d2a1", "enabled": 1 } }, "netlabel": { "100": { "checksum": "sha256:9a32ce04c1dd8e120588c15b3057f838bedce8f14c91576b667295d47800e0ad", "enabled": 1 } }, "netutils": { "100": { "checksum": "sha256:5e0a20ae09b00fac69ee30a0d55ff73fa692d8350c9c0b0343af61e4f0dd654f", "enabled": 1 } }, "networkmanager": { "100": { "checksum": "sha256:9c67b21155929e43e4efd3fc81a85fddc9f1030b47ee4a275789014c1311b972", "enabled": 1 } }, "ninfod": { "100": { "checksum": "sha256:85cac2885d75522eb07189efcc3feeb7775fc6daf5cf3f1a28a1fd2109fe148c", "enabled": 1 } }, "nis": { "100": { "checksum": "sha256:b5b133d60b98068eb9480c54285050ae9b49d2fb309eac8994cc91c865ee02d4", "enabled": 1 } }, "nova": { "100": { "checksum": "sha256:59919a89d30a5d4b60d6971fa636fb62605d59d214ec614adc279f6cbe2c2b27", "enabled": 1 } }, "nscd": { "100": { "checksum": "sha256:578bc975477539c659f3608b1445a0c7a9bc7c3f2dcf65b3e55f3a3af89ea564", "enabled": 1 } }, "nsd": { "100": { "checksum": "sha256:d5b03cdc6c8bbc222b8e3d30680b1a7d2d1a49837e7d509aafcf6b2a3a32195b", "enabled": 1 } }, "nslcd": { "100": { "checksum": "sha256:18b003071f4c36307616f7d5de8cff6d4e376af31cb96ce1a5ad6ae3011dfd09", "enabled": 1 } }, "ntop": { "100": { "checksum": "sha256:f942c7fbe636b9d60327ef9dade1120340c16a2992a6b50db5fbaecd44ffd63d", "enabled": 1 } }, "ntp": { "100": { "checksum": "sha256:686664a71e74b0edd643ab9d556b1aab092fa707935da5ea928a66f54a3c84e0", "enabled": 1 } }, "numad": { "100": { "checksum": "sha256:dabc5ce6244d0b0939e9a07bd6bc232e8b666529a0b7b29527e586db8224862c", "enabled": 1 } }, "nut": { "100": { "checksum": "sha256:653e708dec531e483992b25944a689ec9369478d039a5ec62c98294ab73ce8c4", "enabled": 1 } }, "nx": { "100": { "checksum": "sha256:4ae55fe839abaaf0ea52b79a5c8f6a906575b83cca29532c2dd52337fb3d5790", "enabled": 1 } }, "obex": { "100": { "checksum": "sha256:7b2c87e864b6008f734e1effa48cee1399f41843b9d80d3fd95fbd19e058598f", "enabled": 1 } }, "oddjob": { "100": { "checksum": "sha256:9de0b544b2373ea0f1b7217f9179898479dbff0da36ea9857783de57d06585cf", "enabled": 1 } }, "opafm": { "100": { "checksum": "sha256:761bf911674d23053eceabbbda8da16c73af5f300929a33a64513dc6e3b2d0af", "enabled": 1 } }, "openct": { "100": { "checksum": "sha256:5674f8e8c975570649e3065460786cb4521a86370bffef5a9de18c69813fe68e", "enabled": 1 } }, "opendnssec": { "100": { "checksum": "sha256:bdef6dbb24ae22548634759ac823a8c3e21fde6368cfdfd742480f7027e63ddd", "enabled": 1 } }, "openfortivpn": { "100": { "checksum": "sha256:1a1bff55993510cb6481383b299e1f1a6349ec76e4947bfc8c5b1347e4d30bf4", "enabled": 1 } }, "openhpid": { "100": { "checksum": "sha256:ad3f3f3ba4442930560b291c022e674e6a50e4a37fe027926299b2f6cdec14bd", "enabled": 1 } }, "openshift": { "100": { "checksum": "sha256:329e4b9d1df5012ace94cbe9cba7dfa7ee7d9f242090072c71aaacbeea78986a", "enabled": 1 } }, "openshift-origin": { "100": { "checksum": "sha256:31cbbb069354f984e4af75b387778fae1ff4dc6c3e60533357d005ffa960b51c", "enabled": 1 } }, "opensm": { "100": { "checksum": "sha256:c0e1bf0a8eb50e0b41fa69bf5b65e2a7c324e4bc7255933a5d2bac3b9ae6f4de", "enabled": 1 } }, "openvpn": { "100": { "checksum": "sha256:a4d12ae8ad77d65d0fcabb20aa4a83886e782d732123f686f88a7d7472384104", "enabled": 1 } }, "openvswitch": { "100": { "checksum": "sha256:a54f8a8ea5abb8a33734ecef9d9ad1c0dd090a6e0c5187e80de52f522d2d5e39", "enabled": 1 } }, "openwsman": { "100": { "checksum": "sha256:d6b7bb8f7749265bdaf938abecb2f8f78c6e9e8dc06c1c26b48da227af5a8654", "enabled": 1 } }, "oracleasm": { "100": { "checksum": "sha256:67e31eec391bac337ebacb78c096589af4b7e8be6aa05c34cf187ba922a2abde", "enabled": 1 } }, "osad": { "100": { "checksum": "sha256:6635ff0231bfc3d88c771553d495941ee0f98871edfe6c86205b087186b3a72f", "enabled": 1 } }, "pads": { "100": { "checksum": "sha256:5b4531e9231d399ebec8e6b6870a812c6a64b2daffde35fa57a009b24a01809f", "enabled": 1 } }, "passenger": { "100": { "checksum": "sha256:912a1c442559d6ab48453d87e2b997bdee3017a54a0b60aeaf7d4603fde0f34b", "enabled": 1 } }, "pcmcia": { "100": { "checksum": "sha256:456b3520c26e5f2a913437318715712ae00f64932a27ab1bb8b8b42e0524fa05", "enabled": 1 } }, "pcp": { "100": { "checksum": "sha256:5302332fba7e6724ab7a3c32bd523b10322c20011c6e42ae4e769a49f3efabdd", "enabled": 1 } }, "pcscd": { "100": { "checksum": "sha256:2ee37df066a9ff80439b08c092809f3661e2f9a8ad02134e839627fd23a20c1f", "enabled": 1 } }, "pdns": { "100": { "checksum": "sha256:a1a10cd52eb9dd15bc1ccfed440f6b3d235edc7405a3932f81805d8d94000245", "enabled": 1 } }, "pegasus": { "100": { "checksum": "sha256:4280c40629dd111fd1c89ff867ac72d1e7ddde49dc3d286637e6a86b868e2303", "enabled": 1 } }, "permissivedomains": { "100": { "checksum": "sha256:2453bad4ace526f3cf2c60b358e95a5476692ef25da107b10f52f3af27c056d2", "enabled": 1 } }, "pesign": { "100": { "checksum": "sha256:6461acd0385c0b1a32bf646fc9e09da0c7ca513954ed8fe2a03f4ee7f6a64fcf", "enabled": 1 } }, "pingd": { "100": { "checksum": "sha256:f7536a518a046b793ea3f74a67d677b878baac44b28268c5ccecbf10715d89ab", "enabled": 1 } }, "piranha": { "100": { "checksum": "sha256:11436fb7942d28e3eca22bc078ee5475f632d8447008a6414f337d4bbc3515dc", "enabled": 1 } }, "pkcs": { "100": { "checksum": "sha256:c70e17d1a4d347b38fdfbb2a5dab292e3e0c538ea52fb6cfdef2714e130da0b1", "enabled": 1 } }, "pkcs11proxyd": { "100": { "checksum": "sha256:c9582c89cac1546fa1e5bf9802c5a322e52e2529256f9e5922d5813e40be3646", "enabled": 1 } }, "pki": { "100": { "checksum": "sha256:ec40fbe6355370fe69a8ff343744654b06d4134c1518c64269be1f3a49083968", "enabled": 1 } }, "plymouthd": { "100": { "checksum": "sha256:7aa52d533e28a3ebf76d879c24bb4e0a58574033d5af6d4d22b716d1156c3f90", "enabled": 1 } }, "podsleuth": { "100": { "checksum": "sha256:b32a5cc38b8edcc76b94862cee0c822a5b4d095329f53ab6f7cb014c76346e8c", "enabled": 1 } }, "policykit": { "100": { "checksum": "sha256:686d9f7652cb2b3d7ce6af2aa620c14a6cbbbdb8d26b3630cfbf6bc34d9e3e6c", "enabled": 1 } }, "polipo": { "100": { "checksum": "sha256:6098bd8a4f449c01dc7e0f4509663994259fe8848f2f21d1319bf7105bbacc4e", "enabled": 1 } }, "portmap": { "100": { "checksum": "sha256:f561aef22cda98a94a74bedda09645e50066a77a23d3bdcbb1143b0c62ffe7b2", "enabled": 1 } }, "portreserve": { "100": { "checksum": "sha256:9de99e881e9e2e7e0b78629eec721840da4aa18f78ff5a06e46b7a596c28a09a", "enabled": 1 } }, "postfix": { "100": { "checksum": "sha256:3101c4c1d54f3e175dc3fcff001c6937a9ffec7781f4095ea38fea88df7e8067", "enabled": 1 } }, "postgresql": { "100": { "checksum": "sha256:a734cc086d7d73ef2ffe7543f82dc50b57619e78e60664cb67a9513790f3335a", "enabled": 1 } }, "postgrey": { "100": { "checksum": "sha256:ef4d03336b66c1184f352f9b3fe8004d870bbf003673d4393bde24ea14b056b8", "enabled": 1 } }, "ppp": { "100": { "checksum": "sha256:83e6712ba7343dc1346e94c51b75b05839f78bd24f9324d984b7aa9631bd0377", "enabled": 1 } }, "prelink": { "100": { "checksum": "sha256:df050b0d180947788ab45862c4627ae640c92cf0f6a994a685e4cb5fe46bef76", "enabled": 1 } }, "prelude": { "100": { "checksum": "sha256:88c5fa3da64c127ed6e688f9eba5e50a8f6f98ea3243d29b8b0bc0375ef95420", "enabled": 1 } }, "privoxy": { "100": { "checksum": "sha256:e4a84567c63c892d4cdda3a9a4b15ad5188c093da679a354f00c43b6376a844d", "enabled": 1 } }, "procmail": { "100": { "checksum": "sha256:98170eed35b67b9097514bcb044a18cc3f757af5f91b5d870ea707d6048cde75", "enabled": 1 } }, "prosody": { "100": { "checksum": "sha256:07e999e033252b28ae41697ddc23b42dbcf4bdc143c9eb1c55475aabc9fc9caf", "enabled": 1 } }, "psad": { "100": { "checksum": "sha256:7fc3410de486bf89c4d35989937f424b435c9c4f5398f47f9c840b146197c6ac", "enabled": 1 } }, "ptchown": { "100": { "checksum": "sha256:129978bcb62fdcaed728fb288b321c204575246eb535354e02bfd83089cb0ded", "enabled": 1 } }, "publicfile": { "100": { "checksum": "sha256:9cc75080e25fb5602ab266f1c0d0f16843bdfc561e7af6dec32d669e31bebe98", "enabled": 1 } }, "pulseaudio": { "100": { "checksum": "sha256:a41fc5d1275d548510a2be0180741f952f0f696f443eaabf03c1abf3f80f499e", "enabled": 1 } }, "puppet": { "100": { "checksum": "sha256:81559a7d5e16e228382840986ae0e414d4a78163a9b51b5d9c05a58e07574e8d", "enabled": 1 } }, "pwauth": { "100": { "checksum": "sha256:8590f80ce91ddd4862ce2beab9ec64deb66d99c5583ff5ee3cbff2e503caaa37", "enabled": 1 } }, "qmail": { "100": { "checksum": "sha256:917a35c0ec48acfb5166c937e97269acac39541acebad9c1c410bfdbcb483da1", "enabled": 1 } }, "qpid": { "100": { "checksum": "sha256:cfdb156d23ae6c99b3dbac171ab1626202bf1ae7671fae9f6d6f7241116638dd", "enabled": 1 } }, "quantum": { "100": { "checksum": "sha256:eb4881c554de7882b4e5590a8efb35a758fc1b3d61bc1502632d6f4e571cb331", "enabled": 1 } }, "quota": { "100": { "checksum": "sha256:27d1fb8e99c6d1c75fc8efa8aeaf4303d0dcd8d03cb2992d968a3186d648f4b9", "enabled": 1 } }, "rabbitmq": { "100": { "checksum": "sha256:f0b2b81a6670b7640d49d49c364635f39272330f08bcdaa23c681bf2ac64e10f", "enabled": 1 } }, "radius": { "100": { "checksum": "sha256:791a60cff31fca43e01aa4bfe3a57c5938015db44fd1f64064778dbbcdb6e2e2", "enabled": 1 } }, "radvd": { "100": { "checksum": "sha256:1cea7f5b37f7a0e722ecbccaa09d95db2b175ec125d62e3898a99081c51c6f96", "enabled": 1 } }, "raid": { "100": { "checksum": "sha256:a94b0b917312a73eda50ea641dee49eb00f49df286133fcdb13267fd49ce5d1f", "enabled": 1 } }, "rasdaemon": { "100": { "checksum": "sha256:159d40315f3f5086a31e6f0a6a90d342783d6f0c97c5feeb9c92808c7345adcf", "enabled": 1 } }, "rdisc": { "100": { "checksum": "sha256:a61f7efd50387ebfd35b675b22a8cba86c6216c0bbd901aab5e8674b5c442777", "enabled": 1 } }, "readahead": { "100": { "checksum": "sha256:276a24e14ef12f5fadaeab2883d501cb096e01a9ce1be2178a5c50ebfa6b3fcb", "enabled": 1 } }, "realmd": { "100": { "checksum": "sha256:61561d5f14d9a6597d6e312f5429947baab045d01a729f7cc34406e859fa0015", "enabled": 1 } }, "redis": { "100": { "checksum": "sha256:f40066828d25674c525148f890d9cc84ddbb203f5a4aaad616ef2cd3a497fdc3", "enabled": 1 } }, "remotelogin": { "100": { "checksum": "sha256:742f881c1a4838ecfc1a55a7f3b78a72267644e3a64e3ec45a191599b5bd8532", "enabled": 1 } }, "restraint": { "400": { "checksum": "sha256:5dd2b902123ef00065db6ec8d173f37baa26dbe43566bd5f06594ef1243fd5fd", "enabled": 1 } }, "rhcs": { "100": { "checksum": "sha256:67f232676ac23535867e2494f04989dbd6b9b6d4bbc67df67dc2edb4d31a8be8", "enabled": 1 } }, "rhev": { "100": { "checksum": "sha256:ee2f26beaa5c6a5d25e03ef9ab30302d6b29b283283683421fab52e29e47fe3d", "enabled": 1 } }, "rhgb": { "100": { "checksum": "sha256:39c550e1c8b149dc6f308b0f9ef238315208453ee064bb1558eff9137531840f", "enabled": 1 } }, "rhnsd": { "100": { "checksum": "sha256:16bff56244925c7696fa2da5a4c986132488c352149cc88181bf6b4143fc80ba", "enabled": 1 } }, "rhsmcertd": { "100": { "checksum": "sha256:e999510837aabb3ce118ad61225a846f687588e9a321ffe675b56511191bc323", "enabled": 1 } }, "rhts": { "400": { "checksum": "sha256:9000bd99784bc22ffda4493b4985e8c5a2e65e87aeaa1cb96ba82d367a27a8be", "enabled": 1 } }, "ricci": { "100": { "checksum": "sha256:c72c61297cf864a1abda8226de08039c8ae0212808d3f7fd8725b53b955d59f6", "enabled": 1 } }, "rkhunter": { "100": { "checksum": "sha256:d48bd9c5789f4adc396773664402ddeab432caa99597267ccdf24220948e5b3c", "enabled": 1 } }, "rkt": { "100": { "checksum": "sha256:a9414e82cadd2876471465737bd8322eb833e296869ebcefcd9e722ff717d350", "enabled": 1 } }, "rlogin": { "100": { "checksum": "sha256:a4b2e25abc4099a0a54821518b7c824a2ddb7544fb0b5ddde9a0a9be159ac1b2", "enabled": 1 } }, "rngd": { "100": { "checksum": "sha256:5c867af2674586cc1c41aa3203e3704a0d1400d344a8e257bc61e9eebb86ad03", "enabled": 1 } }, "rolekit": { "100": { "checksum": "sha256:73382d4b8a12fa161dbb5ba36c94e7f0b1f82b1abdf0a4f07ca6c981e08f271b", "enabled": 1 } }, "roundup": { "100": { "checksum": "sha256:1a2503ebaa997c6b6efd5d2343ea731f73b2f0312f2e8d5578dad2c8a84a94fa", "enabled": 1 } }, "rpc": { "100": { "checksum": "sha256:e423284f5ed36e7b6c52f581b444a981d5d1c8af6c8dabe8c6cb6c71d3f49fb2", "enabled": 1 } }, "rpcbind": { "100": { "checksum": "sha256:53831134210db04fe6e6b0f05e20b8b7307ae8c11e774faec9e1b3aa2b02b5dc", "enabled": 1 } }, "rpm": { "100": { "checksum": "sha256:acbd671bd661f9f2f25d4798f1646a51075f297c8b086ea9bd3133a00e356432", "enabled": 1 } }, "rrdcached": { "100": { "checksum": "sha256:c6110313310591ee2a08b504b04ebd1b98f370b6633172f06ee7c0c7db0a963d", "enabled": 1 } }, "rshd": { "100": { "checksum": "sha256:1340ab5daac926cc1354452869ab5aa78d27ceb110543624d2ffaf93773c394b", "enabled": 1 } }, "rssh": { "100": { "checksum": "sha256:9dabc52612d567e728786c007f5017c7032c02be3a9201521a530fc91ca789f8", "enabled": 1 } }, "rsync": { "100": { "checksum": "sha256:33dffe2764dc45bbc59b406a67187c39864412bac07ee089bda30ef09cb70faa", "enabled": 1 } }, "rtas": { "100": { "checksum": "sha256:9d55dfe843e44e8a93c02ea28b14856edfdb1f820bb647992daa6af11e2dbd37", "enabled": 1 } }, "rtkit": { "100": { "checksum": "sha256:ea77b9f26c8fc61b7fc281099b2f16e75c5b196660fff55a95f96e97935a7a1b", "enabled": 1 } }, "rwho": { "100": { "checksum": "sha256:4468bfdd23924a96b4cf8c6fa1a3fa606fdd8ac69b7cb17c16a6e39a95908921", "enabled": 1 } }, "samba": { "100": { "checksum": "sha256:c97b92abaf053976c89a670d82bf06bc5c7d561ccf03e3ff1ac84be6e01cfc5c", "enabled": 1 } }, "sambagui": { "100": { "checksum": "sha256:18d1a69de368fa621e8ef3234b8ddb40261ced880bb732328a310db5a62a7a0a", "enabled": 1 } }, "sandboxX": { "100": { "checksum": "sha256:711df017c1f168e33245144d67289225439bbed701fb1146cb83e9cd63ce1f7a", "enabled": 1 } }, "sanlock": { "100": { "checksum": "sha256:093d9d9793142bb9a8c4375f5f368ca1a4d9beb0cd05329518f91bb9ea51bd06", "enabled": 1 } }, "sasl": { "100": { "checksum": "sha256:536ce94509d38b40200debf17fbddc16ec9004463fdb3fc42890dde9b3eb56f1", "enabled": 1 } }, "sbd": { "100": { "checksum": "sha256:57ecac942ea46af55728362527d70a3e135c3b4711688ddf62596b9a768d9fb0", "enabled": 1 } }, "sblim": { "100": { "checksum": "sha256:2ab2f52e6bac063f176e007b39cd8a4e43012ea075d82af20fbb3403891b6493", "enabled": 1 } }, "screen": { "100": { "checksum": "sha256:7df09c8fa09e105ecf51fee797975603a2df8d15c3a0bf00fdb1d565fe4a6b91", "enabled": 1 } }, "secadm": { "100": { "checksum": "sha256:9cf04d33aa9dec0b559c892fb20df89fbe1883544d4ac2d6bf6fc319f0a16663", "enabled": 1 } }, "sectoolm": { "100": { "checksum": "sha256:e7f9a696e0958d6bdbd6696e67a9b4af62430456d0f278e290db0ea1ee9750b7", "enabled": 1 } }, "selinuxutil": { "100": { "checksum": "sha256:c72355dc70789deb94777acd0b47c2c3ae628e8d90bffb0e0e320941e5ddf3b7", "enabled": 1 } }, "sendmail": { "100": { "checksum": "sha256:98f68238d6ca96277390c160adeed4e3e382d5ded5a88a3909cfebe986b849be", "enabled": 1 } }, "sensord": { "100": { "checksum": "sha256:10ca96a581ef4b0fa1789160fd71fb340d8b1d13906b42fab6e9119033d4f942", "enabled": 1 } }, "setrans": { "100": { "checksum": "sha256:3a172b4972f9271250b4d228541c78b0243fd0544ac983db0f590e09674f700d", "enabled": 1 } }, "setroubleshoot": { "100": { "checksum": "sha256:f78edfcb470cd9929f45b6db29ae4924a286ab30a03f80b7bdf3699bccb98314", "enabled": 1 } }, "seunshare": { "100": { "checksum": "sha256:ba2043d9665e2fd3a9e2d103671bfe647060b93d9c02eed2dca3066a0ecfb81d", "enabled": 1 } }, "sge": { "100": { "checksum": "sha256:cf843c98ff4113ded675f79df694549b4f848aecb1295f0a510101e301fbd348", "enabled": 1 } }, "shorewall": { "100": { "checksum": "sha256:c7c49d28e52aba4d168e684b9160a225fbecab373bfbb6963bbe89c93ecb867b", "enabled": 1 } }, "slocate": { "100": { "checksum": "sha256:be1825562f583305597e5ceb1298ebb60e42c4f270b4a7e3751cf9d9be1b1fac", "enabled": 1 } }, "slpd": { "100": { "checksum": "sha256:14748519962688e62b7bc7e7c03ad91c1f815c5d33c63f2d60e03340f55609a8", "enabled": 1 } }, "smartmon": { "100": { "checksum": "sha256:9f26cf1e9fa128e98c758a6325525f8547950a2440b6582202228c3c5c2c80d9", "enabled": 1 } }, "smokeping": { "100": { "checksum": "sha256:ae8cbd09d519a42bc01063c4c16f58e96cb3673acb557dcd2d09af444d742db1", "enabled": 1 } }, "smoltclient": { "100": { "checksum": "sha256:8aa5f2749eeaef5ae871dc903dad87611e369c92e9b3fc28b4944f75db785a18", "enabled": 1 } }, "smsd": { "100": { "checksum": "sha256:d36a762c836a0e4305773e352fe0f46657784b5d9bf749f02df9c6d15f68d101", "enabled": 1 } }, "snapper": { "100": { "checksum": "sha256:62bba8f6a236bae902815188cedbb5f3090acf0829247e6808787f8c913d9981", "enabled": 1 } }, "snmp": { "100": { "checksum": "sha256:68b5e9d408704e44ebf29ba76ae18afdcf6d8aef12794e8e9026997376ce12f8", "enabled": 1 } }, "snort": { "100": { "checksum": "sha256:eef39dec8d416650af3f9eeeb518b06dd9a9e09144aa579b6bd6422ba0037d70", "enabled": 1 } }, "sosreport": { "100": { "checksum": "sha256:c19dc2ed34c3d274f8e01647dc2d869ca06d4a9a3009f57c1845fac4d33ed358", "enabled": 1 } }, "soundserver": { "100": { "checksum": "sha256:a46a9508591afb1407fd14441c9c26cd495a3789e3c6792a2eba38a6642e4b97", "enabled": 1 } }, "spamassassin": { "100": { "checksum": "sha256:8255ad891466762e31763d6f4791a32aa1eea1147a812020724eab8eb07c1916", "enabled": 1 } }, "speech-dispatcher": { "100": { "checksum": "sha256:ce5ba130d5d0ae5fafe8f823b824856590f990ad7c08aa0a5930f5060c252021", "enabled": 1 } }, "squid": { "100": { "checksum": "sha256:4170a7354e69ed60e0268389f74042e02a2511a4451ca20b97a63213b8881e1e", "enabled": 1 } }, "ssh": { "100": { "checksum": "sha256:a4b4b395d2185abfd68edce0f813103ccbedd5d9748f9a41d83cc63dd1465109", "enabled": 1 } }, "sslh": { "100": { "checksum": "sha256:5b0cc219f31e88f2fa78bc31d9c6fe6c7af29b4832509635672ca9edc79409c6", "enabled": 1 } }, "sssd": { "100": { "checksum": "sha256:29cd0921e9effe356c856c3319488adf66c794cbb7d1610e5fca2b730b852939", "enabled": 1 } }, "staff": { "100": { "checksum": "sha256:943b25df416f2181aab46b3492aad9336f60a1b5b46187494f43ab516aae9c6a", "enabled": 1 } }, "stapserver": { "100": { "checksum": "sha256:788f2eb60a3d902060a6c5a08b086e2a1e96d213f86b206736da7e37eb21e51d", "enabled": 1 } }, "stratisd": { "100": { "checksum": "sha256:72c10f773d67b4209c39b4bea22e95c66d105f6f13e30f89bcd568eab6c889e3", "enabled": 1 } }, "stunnel": { "100": { "checksum": "sha256:736a46f682ff77d7c2cf54d5c264eb7b149793c12701b96e9be12bb3e6722796", "enabled": 1 } }, "su": { "100": { "checksum": "sha256:0cc5796bfe362c3b28c73f62377c029a5f2321078b6d5f90bad42764415cd038", "enabled": 1 } }, "sudo": { "100": { "checksum": "sha256:d96538a9cbb09fc38ba701cda88b2a0d199ab7826826d0043e4f07b05418bf84", "enabled": 1 } }, "svnserve": { "100": { "checksum": "sha256:a80606afbcc994e6fdc418cd83182f901d3e5b4b7b36fe262c71a25f43f10af1", "enabled": 1 } }, "swift": { "100": { "checksum": "sha256:19dfb362a8f445099eac9281522f0b13794cb9a0893a7acf0b54c15d193ef70e", "enabled": 1 } }, "sysadm": { "100": { "checksum": "sha256:f0e7b74086d47000f8335de5bade5a5a19a5e83bf581f885db92548546b7ea94", "enabled": 1 } }, "sysadm_secadm": { "100": { "checksum": "sha256:4614737ea0603530691e6158eb1bd07efa1992cb7ef52c201df3a637d3184cdf", "enabled": 1 } }, "sysnetwork": { "100": { "checksum": "sha256:f6a5a3b49885a9f780c5a9078cc968673809eaf89ecbe170fbb8a1ed4f521ea2", "enabled": 1 } }, "sysstat": { "100": { "checksum": "sha256:1fadc57b1e46515cbc038e96ae47ab74dd365a910f4d81ec9fb3044c4691260b", "enabled": 1 } }, "systemd": { "100": { "checksum": "sha256:a5f0e5c340eaf127a166cc50be8170bfce80ccee0c14f32e4cc264089350da1a", "enabled": 1 } }, "tangd": { "100": { "checksum": "sha256:fd538dbdeba0b4a1c244ba76b8dfef47f61da5a56f24f39fc24c137a9b3b303a", "enabled": 1 } }, "targetd": { "100": { "checksum": "sha256:bc0f37cdcdd0c9014e89e8be6758f7d9c97c67a4e42652459d6107314f059632", "enabled": 1 } }, "tcpd": { "100": { "checksum": "sha256:c78dcf2b9abf8d5ccf9f32b2debf6181a935a7078fe4a527991ab11d2999c4a9", "enabled": 1 } }, "tcsd": { "100": { "checksum": "sha256:e92fb82a2e509e3595d46dd464dac1029ce3a731f117fa67712d119d2878f195", "enabled": 1 } }, "telepathy": { "100": { "checksum": "sha256:fea41add022251126312da78373cb7fd05df1e9fd27547f1b4fc604a774827a1", "enabled": 1 } }, "telnet": { "100": { "checksum": "sha256:06d4733c0fc7358d738d4dbf53968c9d9017a72b01456be46633364f00a4207d", "enabled": 1 } }, "tftp": { "100": { "checksum": "sha256:8ba2497a28f4c2a31177811fc0a091a3bb9814f9e02cfc8d84c004718f661e5f", "enabled": 1 } }, "tgtd": { "100": { "checksum": "sha256:6ec8d4d38e58efa04572ac713c9148e7182e7d49713ed89955fabdd512b8eea4", "enabled": 1 } }, "thin": { "100": { "checksum": "sha256:c464da2b8e789d74ea2b2914217a194a3c07081b9f383acd2fee9ab77bc525b5", "enabled": 1 } }, "thumb": { "100": { "checksum": "sha256:2ce98252c7ff59539bb38204ee65898ba6cc701c3dc87417c11e2e7124f448a3", "enabled": 1 } }, "timedatex": { "100": { "checksum": "sha256:df36b9f44f28df1b14b4d6bff01de42c414b947a8e6f1e6efdaa7023250709aa", "enabled": 1 } }, "tlp": { "100": { "checksum": "sha256:7b1d2643c7470dc5b80dee41d18482bb6fd6de55371aba888708a28fe0bb0172", "enabled": 1 } }, "tmpreaper": { "100": { "checksum": "sha256:2a54cea48dfbeb1c9dad0e167f70aa17970c4f2c76c560330c467051fe3b574b", "enabled": 1 } }, "tomcat": { "100": { "checksum": "sha256:de3ed9b8d62d29e80e29a051419a648c154c12f6bb188814ca79120ff1dc263b", "enabled": 1 } }, "tor": { "100": { "checksum": "sha256:16c95ae098af2b964a7a94b5bb6cd1c84d5c7f1254d6411209e4d5cfe87677bc", "enabled": 1 } }, "tuned": { "100": { "checksum": "sha256:b90ac3a04d3f04c7284f75802ffd69d6c1c3d5c0e6d08c3d0f2d9270b99dd487", "enabled": 1 } }, "tvtime": { "100": { "checksum": "sha256:8f8a1f1b2fea7a9fb8c3853e02c830f5204f691e9223cbdfbc320ec6914725dc", "enabled": 1 } }, "udev": { "100": { "checksum": "sha256:24410f1221660b8443af29cb55e42180e268fce722ceed2c99aa202e7dd3cc21", "enabled": 1 } }, "ulogd": { "100": { "checksum": "sha256:dba41aee81015b99378cff2273a56effd1202c0c937c05c63a913243b0641cdc", "enabled": 1 } }, "uml": { "100": { "checksum": "sha256:29e7469ef2704943f23c5040531fee8657cfed8440ef44b6268d21e6a9afe309", "enabled": 1 } }, "unconfined": { "100": { "checksum": "sha256:54482715f4fb5bca5c68ff67b9d145d12ad3df1438db97bcadcc32a2fb0f6191", "enabled": 1 } }, "unconfineduser": { "100": { "checksum": "sha256:13e69d4cbec7926c0ac6fb796749b4286462add3051f1e94554f23e637b81277", "enabled": 1 } }, "unlabelednet": { "100": { "checksum": "sha256:cb370bbe8bc0d7bca49a4fd1fad652017f4f8587c7c9d3277155fba32987550e", "enabled": 1 } }, "unprivuser": { "100": { "checksum": "sha256:bbb2700ca73d867432851e12276a932b1553b034b1cc635f5c6681d6b62dcd3a", "enabled": 1 } }, "updfstab": { "100": { "checksum": "sha256:57a37a5c07af0f7ad80f4f01173e6cd6b604659e2d1b5605c2719dff8bbaf2fb", "enabled": 1 } }, "usbmodules": { "100": { "checksum": "sha256:683c0598bdd00543cb696f7ed8cce6b55c658e566141538fc01b3f852af5f697", "enabled": 1 } }, "usbmuxd": { "100": { "checksum": "sha256:852eb8259277c64b80c91bd1dcbbe85f629e7218ab2f51d39324dcd78a4a278e", "enabled": 1 } }, "userdomain": { "100": { "checksum": "sha256:066e429e71ebcf11014f4ff6d7647c9d6d88ff191c64eeb9793021d16f4cde97", "enabled": 1 } }, "userhelper": { "100": { "checksum": "sha256:74b817fb60fd3ed5f074ef8ff399342ddc49fb2c250b08015dc975edd48f4dfd", "enabled": 1 } }, "usermanage": { "100": { "checksum": "sha256:fa589ab303d10fadd28a3e8d27cc9bc2e55a9b28f28c3f4c7e05968cb00a7cdd", "enabled": 1 } }, "usernetctl": { "100": { "checksum": "sha256:c5e4e24e89775d797a8988e2d5f72ec7a7dd8387289ede61af7a3ce2173cf167", "enabled": 1 } }, "uucp": { "100": { "checksum": "sha256:6a3659d3706bc3af4b60e5de7efa9532dcc0c0a6f0c7735ed1300ec2120f9d01", "enabled": 1 } }, "uuidd": { "100": { "checksum": "sha256:f85ad7d20dd77416ab246ee0837b016a648176ec9956f40ff2ac6b3c2924edc5", "enabled": 1 } }, "varnishd": { "100": { "checksum": "sha256:18dab548c81b02f1b0f3efd6e25dd529bb0565e974156d55e42e274d3ccdf704", "enabled": 1 } }, "vdagent": { "100": { "checksum": "sha256:ee8af0b085b727e060ac3c82f1e38c89545505c9b26e849eda22e571064c46e7", "enabled": 1 } }, "vhostmd": { "100": { "checksum": "sha256:0f7c8c575b060e863fe17e7ee8c67cc5cc3ea31da734a5428dc62c15f3b15bf4", "enabled": 1 } }, "virt": { "100": { "checksum": "sha256:df433826471b1c65a3686b57b4b07872a695d900731feb88cd6dfb76ddcbc5d9", "enabled": 1 } }, "vlock": { "100": { "checksum": "sha256:4a9362fc5876897cae7062564d54d7f8ae12413c65c4c7fc6709f6407cc27160", "enabled": 1 } }, "vmtools": { "100": { "checksum": "sha256:fb9dda20b16232ac253b148063c9b267356b6f2831650f4c00fa01a6d0a8024a", "enabled": 1 } }, "vmware": { "100": { "checksum": "sha256:d0ce73ebc7d2f494b669257a9a68106245371b455566654c7062694bcbad35df", "enabled": 1 } }, "vnstatd": { "100": { "checksum": "sha256:1df1aaf42d9c96922226b4828c38b6d315f7a9d3cda60fe54d99be5d618e140d", "enabled": 1 } }, "vpn": { "100": { "checksum": "sha256:9ea8931bf1c97618b2e99afb8c60a13d51a84db878bffa4082f6973e23b13eb1", "enabled": 1 } }, "w3c": { "100": { "checksum": "sha256:43663b66ef8275c639a8076d92fc7da6821e0523c120e2c854839f9dc9d1db66", "enabled": 1 } }, "watchdog": { "100": { "checksum": "sha256:65b78e9b48a6cfe62f6c67c443d3bc667a58d206c09df00870949b6ae7ff8c30", "enabled": 1 } }, "wdmd": { "100": { "checksum": "sha256:65560477bd0ae271799a76f75c5a3d46ef0c29f6922aa38e727c95b7e1095a99", "enabled": 1 } }, "webadm": { "100": { "checksum": "sha256:4d4d609b3be3c2dc659694cfd2076e0c0c0d6446d16a3fb054a9e5f951b29410", "enabled": 1 } }, "webalizer": { "100": { "checksum": "sha256:867139a0cc2cb236ee54575ce6a8568cdbefd6785e8b7f64e09a3041da46b095", "enabled": 1 } }, "wine": { "100": { "checksum": "sha256:419d697ac987518dee6095070e2894c4112b50256e59d2b4f6acac585fb087f8", "enabled": 1 } }, "wireshark": { "100": { "checksum": "sha256:ce85b40df4d548aa55eb54bc546943366b654a3af7f602817f1fc499c0c8039e", "enabled": 1 } }, "xen": { "100": { "checksum": "sha256:f5d46e297e4e8e0a3f76c1fc8ae96db3ebf5b99ab538a54c171e489ac94ae1f0", "enabled": 1 } }, "xguest": { "100": { "checksum": "sha256:aeb8895098531d1607e389703c783a3c1e8a8c1ad962397debe65214ff86e29e", "enabled": 1 } }, "xserver": { "100": { "checksum": "sha256:85f1f1ed778597ec568ab7b9069779c088219d1da283a09382439c6803e7863e", "enabled": 1 } }, "zabbix": { "100": { "checksum": "sha256:476521323be1b84d7ba2539aa208d857678746a76e7e079577d3f46d251637ac", "enabled": 1 } }, "zarafa": { "100": { "checksum": "sha256:7536116b2852a578cbc5d32f7752b6dd3bb1202817db05306e1a16553c1d43b6", "enabled": 1 } }, "zebra": { "100": { "checksum": "sha256:3d18bbdc44c396c7715cce348f9248712132a1c53341d3b5760016d245f86e75", "enabled": 1 } }, "zoneminder": { "100": { "checksum": "sha256:44cf07d7e6b15709d131b8b406032d0e6395a84e1e20bc67f9320a1e97c4dfcc", "enabled": 1 } }, "zosremote": { "100": { "checksum": "sha256:1177170edbd47b6fe17fa022a247d9b75b1fb3a5a49721bcff3c7da4f480c702", "enabled": 1 } } }, "selinux_priorities": true }, "changed": false } TASK [fedora.linux_system_roles.selinux : Load SELinux modules] **************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:115 Wednesday 02 April 2025 12:14:13 -0400 (0:00:03.378) 0:01:36.618 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_modules is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:128 Wednesday 02 April 2025 12:14:13 -0400 (0:00:00.043) 0:01:36.661 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree in check mode] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:136 Wednesday 02 April 2025 12:14:13 -0400 (0:00:00.079) 0:01:36.740 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Wednesday 02 April 2025 12:14:13 -0400 (0:00:00.062) 0:01:36.802 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Wednesday 02 April 2025 12:14:13 -0400 (0:00:00.040) 0:01:36.843 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Wednesday 02 April 2025 12:14:13 -0400 (0:00:00.036) 0:01:36.879 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Wednesday 02 April 2025 12:14:13 -0400 (0:00:00.042) 0:01:36.922 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Wednesday 02 April 2025 12:14:13 -0400 (0:00:00.035) 0:01:36.957 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:14 Wednesday 02 April 2025 12:14:13 -0400 (0:00:00.107) 0:01:37.065 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_spec": { "state": "created" }, "__podman_kube_str": "apiVersion: v1\nkind: Pod\nmetadata:\n labels:\n app: test\n io.containers.autoupdate: registry\n name: nopull\nspec:\n containers:\n - name: nopull\n image: quay.io/libpod/testimage:20210610\n" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:21 Wednesday 02 April 2025 12:14:13 -0400 (0:00:00.052) 0:01:37.117 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_kube": { "apiVersion": "v1", "kind": "Pod", "metadata": { "labels": { "app": "test", "io.containers.autoupdate": "registry" }, "name": "nopull" }, "spec": { "containers": [ { "image": "quay.io/libpod/testimage:20210610", "name": "nopull" } ] } }, "__podman_kube_file": "", "__podman_pull_image": false, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:33 Wednesday 02 April 2025 12:14:14 -0400 (0:00:00.054) 0:01:37.172 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_name": "nopull", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:38 Wednesday 02 April 2025 12:14:14 -0400 (0:00:00.088) 0:01:37.260 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:14:14 -0400 (0:00:00.074) 0:01:37.335 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:14:14 -0400 (0:00:00.044) 0:01:37.380 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:14:14 -0400 (0:00:00.045) 0:01:37.425 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:14:14 -0400 (0:00:00.056) 0:01:37.482 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1743610430.4085276, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610401.5914862, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986657, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "2059311478", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:14:14 -0400 (0:00:00.362) 0:01:37.845 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:14:14 -0400 (0:00:00.041) 0:01:37.886 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:14:14 -0400 (0:00:00.048) 0:01:37.935 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:14:14 -0400 (0:00:00.040) 0:01:37.975 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:14:14 -0400 (0:00:00.040) 0:01:38.015 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:14:14 -0400 (0:00:00.040) 0:01:38.056 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:14:15 -0400 (0:00:00.086) 0:01:38.142 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:14:15 -0400 (0:00:00.041) 0:01:38.183 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if no kube spec is given] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:43 Wednesday 02 April 2025 12:14:15 -0400 (0:00:00.041) 0:01:38.225 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_kube", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:52 Wednesday 02 April 2025 12:14:15 -0400 (0:00:00.040) 0:01:38.265 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": false, "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:60 Wednesday 02 April 2025 12:14:15 -0400 (0:00:00.059) 0:01:38.325 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_path": "/etc/containers/ansible-kubernetes.d" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:64 Wednesday 02 April 2025 12:14:15 -0400 (0:00:00.042) 0:01:38.367 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_file": "/etc/containers/ansible-kubernetes.d/nopull.yml" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:68 Wednesday 02 April 2025 12:14:15 -0400 (0:00:00.048) 0:01:38.416 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Get service name using systemd-escape] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:75 Wednesday 02 April 2025 12:14:15 -0400 (0:00:00.047) 0:01:38.464 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cleanup containers and services] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:83 Wednesday 02 April 2025 12:14:15 -0400 (0:00:00.037) 0:01:38.501 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update containers and services] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:87 Wednesday 02 April 2025 12:14:15 -0400 (0:00:00.037) 0:01:38.538 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:2 Wednesday 02 April 2025 12:14:15 -0400 (0:00:00.086) 0:01:38.624 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:14:15 -0400 (0:00:00.115) 0:01:38.740 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:14:15 -0400 (0:00:00.038) 0:01:38.779 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:14:15 -0400 (0:00:00.038) 0:01:38.818 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the host mount volumes] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:7 Wednesday 02 April 2025 12:14:15 -0400 (0:00:00.039) 0:01:38.857 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'volumes' in __podman_kube['spec']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:18 Wednesday 02 April 2025 12:14:15 -0400 (0:00:00.040) 0:01:38.898 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_volumes | d([]) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:29 Wednesday 02 April 2025 12:14:15 -0400 (0:00:00.051) 0:01:38.949 ******* skipping: [managed-node2] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Check the kubernetes yaml file] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:53 Wednesday 02 April 2025 12:14:15 -0400 (0:00:00.055) 0:01:39.005 ******* ok: [managed-node2] => { "changed": false, "failed_when_result": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Ensure the kubernetes directory is present] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:61 Wednesday 02 April 2025 12:14:16 -0400 (0:00:00.357) 0:01:39.363 ******* changed: [managed-node2] => { "changed": true, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/ansible-kubernetes.d", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure kubernetes yaml files are present] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:70 Wednesday 02 April 2025 12:14:16 -0400 (0:00:00.373) 0:01:39.736 ******* changed: [managed-node2] => { "changed": true, "checksum": "d5dc917e3cae36de03aa971a17ac473f86fdf934", "dest": "/etc/containers/ansible-kubernetes.d/nopull.yml", "gid": 0, "group": "root", "md5sum": "1eceaf0da0bbf69a778deb11f0449417", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 217, "src": "/root/.ansible/tmp/ansible-tmp-1743610456.6449945-9359-205024044517212/source", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Update containers/pods] *************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:80 Wednesday 02 April 2025 12:14:17 -0400 (0:00:00.704) 0:01:40.440 ******* [WARNING]: Using a variable for a task's 'args' is unsafe in some situations (see https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat- unsafe) changed: [managed-node2] => { "actions": [ "/usr/bin/podman play kube --start=false /etc/containers/ansible-kubernetes.d/nopull.yml" ], "changed": true } STDOUT: Pod: 2ece7bcadf2eddade61e07eca32f44abb4ac4beb0eae18d4bfd24840a8730932 Container: 01b0a82f30321cdedaff0ac6854c5d34ccfff5538f0d3a641938779544a5f4ab STDERR: Trying to pull quay.io/libpod/testimage:20210610... Getting image source signatures Copying blob sha256:9afcdfe780b4ea44cc52d22e3f93ccf212388a90370773571ce034a62e14174e Copying config sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f Writing manifest to image destination TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:88 Wednesday 02 April 2025 12:14:20 -0400 (0:00:02.834) 0:01:43.275 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Enable service] *********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:100 Wednesday 02 April 2025 12:14:20 -0400 (0:00:00.141) 0:01:43.417 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:113 Wednesday 02 April 2025 12:14:20 -0400 (0:00:00.061) 0:01:43.479 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:127 Wednesday 02 April 2025 12:14:20 -0400 (0:00:00.052) 0:01:43.531 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Wednesday 02 April 2025 12:14:20 -0400 (0:00:00.055) 0:01:43.587 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:198 Wednesday 02 April 2025 12:14:20 -0400 (0:00:00.036) 0:01:43.624 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:204 Wednesday 02 April 2025 12:14:20 -0400 (0:00:00.036) 0:01:43.660 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:213 Wednesday 02 April 2025 12:14:20 -0400 (0:00:00.035) 0:01:43.696 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Verify image not pulled] ************************************************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:235 Wednesday 02 April 2025 12:14:20 -0400 (0:00:00.059) 0:01:43.755 ******* ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Run role - verify continue if pull fails] ******************************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:239 Wednesday 02 April 2025 12:14:20 -0400 (0:00:00.066) 0:01:43.822 ******* TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Wednesday 02 April 2025 12:14:20 -0400 (0:00:00.277) 0:01:44.100 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Wednesday 02 April 2025 12:14:21 -0400 (0:00:00.116) 0:01:44.216 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Wednesday 02 April 2025 12:14:21 -0400 (0:00:00.079) 0:01:44.296 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Wednesday 02 April 2025 12:14:21 -0400 (0:00:00.071) 0:01:44.368 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Wednesday 02 April 2025 12:14:21 -0400 (0:00:00.068) 0:01:44.436 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Wednesday 02 April 2025 12:14:21 -0400 (0:00:00.070) 0:01:44.506 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Wednesday 02 April 2025 12:14:21 -0400 (0:00:00.072) 0:01:44.579 ******* ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node2] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Wednesday 02 April 2025 12:14:21 -0400 (0:00:00.150) 0:01:44.729 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Wednesday 02 April 2025 12:14:23 -0400 (0:00:01.497) 0:01:46.227 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Wednesday 02 April 2025 12:14:23 -0400 (0:00:00.136) 0:01:46.363 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Wednesday 02 April 2025 12:14:23 -0400 (0:00:00.075) 0:01:46.439 ******* skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Wednesday 02 April 2025 12:14:23 -0400 (0:00:00.066) 0:01:46.505 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Wednesday 02 April 2025 12:14:23 -0400 (0:00:00.066) 0:01:46.572 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Wednesday 02 April 2025 12:14:23 -0400 (0:00:00.066) 0:01:46.638 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.025364", "end": "2025-04-02 12:14:23.834802", "rc": 0, "start": "2025-04-02 12:14:23.809438" } STDOUT: podman version 4.9.4-dev TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Wednesday 02 April 2025 12:14:23 -0400 (0:00:00.403) 0:01:47.042 ******* ok: [managed-node2] => { "ansible_facts": { "podman_version": "4.9.4-dev" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Wednesday 02 April 2025 12:14:23 -0400 (0:00:00.043) 0:01:47.086 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Wednesday 02 April 2025 12:14:23 -0400 (0:00:00.051) 0:01:47.138 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "(podman_quadlet_specs | length > 0) or (podman_secrets | length > 0)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Wednesday 02 April 2025 12:14:24 -0400 (0:00:00.044) 0:01:47.182 ******* META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Wednesday 02 April 2025 12:14:24 -0400 (0:00:00.080) 0:01:47.263 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Wednesday 02 April 2025 12:14:24 -0400 (0:00:00.146) 0:01:47.409 ******* META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Wednesday 02 April 2025 12:14:24 -0400 (0:00:00.101) 0:01:47.511 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:14:24 -0400 (0:00:00.130) 0:01:47.641 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:14:24 -0400 (0:00:00.079) 0:01:47.721 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:14:24 -0400 (0:00:00.076) 0:01:47.797 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:14:24 -0400 (0:00:00.081) 0:01:47.879 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1743610430.4085276, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610401.5914862, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986657, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "2059311478", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:14:25 -0400 (0:00:00.376) 0:01:48.256 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:14:25 -0400 (0:00:00.042) 0:01:48.298 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:14:25 -0400 (0:00:00.043) 0:01:48.341 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:14:25 -0400 (0:00:00.091) 0:01:48.433 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:14:25 -0400 (0:00:00.042) 0:01:48.475 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:14:25 -0400 (0:00:00.049) 0:01:48.525 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:14:25 -0400 (0:00:00.068) 0:01:48.593 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:14:25 -0400 (0:00:00.066) 0:01:48.660 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Wednesday 02 April 2025 12:14:25 -0400 (0:00:00.068) 0:01:48.728 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Wednesday 02 April 2025 12:14:25 -0400 (0:00:00.084) 0:01:48.813 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Wednesday 02 April 2025 12:14:25 -0400 (0:00:00.129) 0:01:48.943 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Wednesday 02 April 2025 12:14:25 -0400 (0:00:00.067) 0:01:49.011 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Wednesday 02 April 2025 12:14:25 -0400 (0:00:00.067) 0:01:49.078 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Wednesday 02 April 2025 12:14:26 -0400 (0:00:00.220) 0:01:49.299 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Wednesday 02 April 2025 12:14:26 -0400 (0:00:00.053) 0:01:49.353 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Wednesday 02 April 2025 12:14:26 -0400 (0:00:00.050) 0:01:49.403 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Wednesday 02 April 2025 12:14:26 -0400 (0:00:00.096) 0:01:49.500 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Wednesday 02 April 2025 12:14:26 -0400 (0:00:00.041) 0:01:49.541 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Wednesday 02 April 2025 12:14:26 -0400 (0:00:00.040) 0:01:49.581 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Wednesday 02 April 2025 12:14:26 -0400 (0:00:00.080) 0:01:49.662 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Wednesday 02 April 2025 12:14:26 -0400 (0:00:00.040) 0:01:49.703 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Wednesday 02 April 2025 12:14:26 -0400 (0:00:00.089) 0:01:49.792 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Wednesday 02 April 2025 12:14:26 -0400 (0:00:00.040) 0:01:49.833 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Wednesday 02 April 2025 12:14:26 -0400 (0:00:00.040) 0:01:49.873 ******* TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Wednesday 02 April 2025 12:14:26 -0400 (0:00:00.121) 0:01:49.994 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Wednesday 02 April 2025 12:14:26 -0400 (0:00:00.072) 0:01:50.067 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Wednesday 02 April 2025 12:14:26 -0400 (0:00:00.049) 0:01:50.117 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Wednesday 02 April 2025 12:14:27 -0400 (0:00:00.043) 0:01:50.160 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Wednesday 02 April 2025 12:14:27 -0400 (0:00:00.042) 0:01:50.203 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Wednesday 02 April 2025 12:14:27 -0400 (0:00:00.052) 0:01:50.256 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 Wednesday 02 April 2025 12:14:27 -0400 (0:00:00.096) 0:01:50.353 ******* ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43 Wednesday 02 April 2025 12:14:30 -0400 (0:00:02.847) 0:01:53.200 ******* skipping: [managed-node2] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48 Wednesday 02 April 2025 12:14:30 -0400 (0:00:00.066) 0:01:53.267 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53 Wednesday 02 April 2025 12:14:30 -0400 (0:00:00.070) 0:01:53.337 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Collect service facts] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Wednesday 02 April 2025 12:14:30 -0400 (0:00:00.066) 0:01:53.404 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9 Wednesday 02 April 2025 12:14:30 -0400 (0:00:00.064) 0:01:53.468 ******* skipping: [managed-node2] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22 Wednesday 02 April 2025 12:14:30 -0400 (0:00:00.076) 0:01:53.545 ******* ok: [managed-node2] => { "changed": false, "name": "firewalld", "status": { "ActiveEnterTimestamp": "Wed 2025-04-02 12:13:57 EDT", "ActiveEnterTimestampMonotonic": "326139129", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "basic.target sysinit.target polkit.service dbus.socket dbus.service system.slice", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Wed 2025-04-02 12:13:56 EDT", "AssertTimestampMonotonic": "325830531", "Before": "shutdown.target network-pre.target multi-user.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Wed 2025-04-02 12:13:56 EDT", "ConditionTimestampMonotonic": "325830530", "ConfigurationDirectoryMode": "0755", "Conflicts": "ebtables.service iptables.service ip6tables.service shutdown.target ipset.service nftables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "man:firewalld(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "12711", "ExecMainStartTimestamp": "Wed 2025-04-02 12:13:56 EDT", "ExecMainStartTimestampMonotonic": "325832688", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[Wed 2025-04-02 12:13:56 EDT] ; stop_time=[n/a] ; pid=12711 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Wed 2025-04-02 12:13:56 EDT", "InactiveExitTimestampMonotonic": "325832722", "InvocationID": "41b8036e16214fc68c244a42727639e9", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "12711", "MemoryAccounting": "yes", "MemoryCurrent": "40091648", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Wed 2025-04-02 12:13:57 EDT", "StateChangeTimestampMonotonic": "326139129", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogTimestamp": "Wed 2025-04-02 12:13:57 EDT", "WatchdogTimestampMonotonic": "326139126", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 Wednesday 02 April 2025 12:14:30 -0400 (0:00:00.574) 0:01:54.119 ******* ok: [managed-node2] => { "changed": false, "enabled": true, "name": "firewalld", "state": "started", "status": { "ActiveEnterTimestamp": "Wed 2025-04-02 12:13:57 EDT", "ActiveEnterTimestampMonotonic": "326139129", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "basic.target sysinit.target polkit.service dbus.socket dbus.service system.slice", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Wed 2025-04-02 12:13:56 EDT", "AssertTimestampMonotonic": "325830531", "Before": "shutdown.target network-pre.target multi-user.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Wed 2025-04-02 12:13:56 EDT", "ConditionTimestampMonotonic": "325830530", "ConfigurationDirectoryMode": "0755", "Conflicts": "ebtables.service iptables.service ip6tables.service shutdown.target ipset.service nftables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "man:firewalld(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "12711", "ExecMainStartTimestamp": "Wed 2025-04-02 12:13:56 EDT", "ExecMainStartTimestampMonotonic": "325832688", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[Wed 2025-04-02 12:13:56 EDT] ; stop_time=[n/a] ; pid=12711 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Wed 2025-04-02 12:13:56 EDT", "InactiveExitTimestampMonotonic": "325832722", "InvocationID": "41b8036e16214fc68c244a42727639e9", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "12711", "MemoryAccounting": "yes", "MemoryCurrent": "40091648", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Wed 2025-04-02 12:13:57 EDT", "StateChangeTimestampMonotonic": "326139129", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogTimestamp": "Wed 2025-04-02 12:13:57 EDT", "WatchdogTimestampMonotonic": "326139126", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34 Wednesday 02 April 2025 12:14:31 -0400 (0:00:00.553) 0:01:54.672 ******* ok: [managed-node2] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/libexec/platform-python", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43 Wednesday 02 April 2025 12:14:31 -0400 (0:00:00.084) 0:01:54.757 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55 Wednesday 02 April 2025 12:14:31 -0400 (0:00:00.062) 0:01:54.819 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Wednesday 02 April 2025 12:14:31 -0400 (0:00:00.139) 0:01:54.959 ******* ok: [managed-node2] => (item={'port': '15001-15003/tcp', 'state': 'enabled'}) => { "__firewall_changed": false, "ansible_loop_var": "item", "changed": false, "item": { "port": "15001-15003/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120 Wednesday 02 April 2025 12:14:32 -0400 (0:00:00.610) 0:01:55.570 ******* skipping: [managed-node2] => (item={'port': '15001-15003/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "'detailed' in fw[0]", "item": { "port": "15001-15003/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Wednesday 02 April 2025 12:14:32 -0400 (0:00:00.093) 0:01:55.663 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'detailed' in fw[0]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139 Wednesday 02 April 2025 12:14:32 -0400 (0:00:00.080) 0:01:55.743 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144 Wednesday 02 April 2025 12:14:32 -0400 (0:00:00.069) 0:01:55.812 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153 Wednesday 02 April 2025 12:14:32 -0400 (0:00:00.069) 0:01:55.881 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163 Wednesday 02 April 2025 12:14:32 -0400 (0:00:00.065) 0:01:55.946 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169 Wednesday 02 April 2025 12:14:32 -0400 (0:00:00.066) 0:01:56.013 ******* skipping: [managed-node2] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Wednesday 02 April 2025 12:14:32 -0400 (0:00:00.112) 0:01:56.126 ******* redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.seboolean to ansible.posix.seboolean TASK [fedora.linux_system_roles.selinux : Set ansible_facts required by role and install packages] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:2 Wednesday 02 April 2025 12:14:33 -0400 (0:00:00.250) 0:01:56.377 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml for managed-node2 TASK [fedora.linux_system_roles.selinux : Ensure ansible_facts used by role] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:2 Wednesday 02 April 2025 12:14:33 -0400 (0:00:00.070) 0:01:56.447 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Ensure SELinux packages] ************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:7 Wednesday 02 April 2025 12:14:33 -0400 (0:00:00.052) 0:01:56.500 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml for managed-node2 TASK [fedora.linux_system_roles.selinux : Check if system is ostree] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:5 Wednesday 02 April 2025 12:14:33 -0400 (0:00:00.074) 0:01:56.575 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set flag to indicate system is ostree] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:10 Wednesday 02 April 2025 12:14:33 -0400 (0:00:00.045) 0:01:56.620 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:17 Wednesday 02 April 2025 12:14:33 -0400 (0:00:00.044) 0:01:56.664 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set flag if transactional-update exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:22 Wednesday 02 April 2025 12:14:33 -0400 (0:00:00.048) 0:01:56.713 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux python2 tools] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:26 Wednesday 02 April 2025 12:14:33 -0400 (0:00:00.068) 0:01:56.781 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_python_version is version('3', '<')", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35 Wednesday 02 April 2025 12:14:33 -0400 (0:00:00.069) 0:01:56.851 ******* ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:46 Wednesday 02 April 2025 12:14:36 -0400 (0:00:03.039) 0:01:59.891 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_os_family == \"Suse\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux tool semanage] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 Wednesday 02 April 2025 12:14:36 -0400 (0:00:00.072) 0:01:59.963 ******* ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.selinux : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:72 Wednesday 02 April 2025 12:14:39 -0400 (0:00:02.918) 0:02:02.882 ******* skipping: [managed-node2] => { "false_condition": "__selinux_is_transactional | d(false)" } TASK [fedora.linux_system_roles.selinux : Reboot transactional update systems] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:77 Wednesday 02 April 2025 12:14:39 -0400 (0:00:00.072) 0:02:02.954 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Fail if reboot is needed and not set] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:82 Wednesday 02 April 2025 12:14:39 -0400 (0:00:00.065) 0:02:03.020 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Refresh facts] *********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:89 Wednesday 02 April 2025 12:14:39 -0400 (0:00:00.067) 0:02:03.087 ******* ok: [managed-node2] TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if enabled] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:5 Wednesday 02 April 2025 12:14:40 -0400 (0:00:00.751) 0:02:03.839 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_selinux.status == \"enabled\" and (selinux_state or selinux_policy)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if disabled] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:13 Wednesday 02 April 2025 12:14:40 -0400 (0:00:00.049) 0:02:03.889 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_selinux.status == \"disabled\" and selinux_state", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set selinux_reboot_required] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:21 Wednesday 02 April 2025 12:14:40 -0400 (0:00:00.043) 0:02:03.932 ******* ok: [managed-node2] => { "ansible_facts": { "selinux_reboot_required": false }, "changed": false } TASK [fedora.linux_system_roles.selinux : Fail if reboot is required] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:25 Wednesday 02 April 2025 12:14:40 -0400 (0:00:00.048) 0:02:03.981 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_reboot_required", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Warn if SELinux is disabled] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:30 Wednesday 02 April 2025 12:14:40 -0400 (0:00:00.038) 0:02:04.019 ******* skipping: [managed-node2] => { "false_condition": "ansible_selinux.status == \"disabled\"" } TASK [fedora.linux_system_roles.selinux : Drop all local modifications] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:35 Wednesday 02 April 2025 12:14:40 -0400 (0:00:00.097) 0:02:04.117 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_all_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux boolean local modifications] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:43 Wednesday 02 April 2025 12:14:41 -0400 (0:00:00.060) 0:02:04.177 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_booleans_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux file context local modifications] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:48 Wednesday 02 April 2025 12:14:41 -0400 (0:00:00.068) 0:02:04.246 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_fcontexts_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux port local modifications] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:53 Wednesday 02 April 2025 12:14:41 -0400 (0:00:00.066) 0:02:04.312 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_ports_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux login local modifications] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:58 Wednesday 02 April 2025 12:14:41 -0400 (0:00:00.069) 0:02:04.381 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_logins_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set SELinux booleans] **************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:63 Wednesday 02 April 2025 12:14:41 -0400 (0:00:00.062) 0:02:04.444 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set SELinux file contexts] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:74 Wednesday 02 April 2025 12:14:41 -0400 (0:00:00.067) 0:02:04.511 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set an SELinux label on a port] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:87 Wednesday 02 April 2025 12:14:41 -0400 (0:00:00.069) 0:02:04.580 ******* ok: [managed-node2] => (item={'ports': '15001-15003', 'setype': 'http_port_t'}) => { "__selinux_item": { "ports": "15001-15003", "setype": "http_port_t" }, "ansible_loop_var": "__selinux_item", "changed": false, "ports": [ "15001-15003" ], "proto": "tcp", "setype": "http_port_t", "state": "present" } TASK [fedora.linux_system_roles.selinux : Set linux user to SELinux user mapping] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:99 Wednesday 02 April 2025 12:14:42 -0400 (0:00:01.032) 0:02:05.613 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Get SELinux modules facts] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:112 Wednesday 02 April 2025 12:14:42 -0400 (0:00:00.062) 0:02:05.676 ******* ok: [managed-node2] => { "ansible_facts": { "selinux_checksums": true, "selinux_installed_modules": { "abrt": { "100": { "checksum": "sha256:13dad22da122be9f7d5df4dbedae6a515323542fdc1a7e466d7a1a3d36d29731", "enabled": 1 } }, "accountsd": { "100": { "checksum": "sha256:8bd25829d921be0b5adf92ddaca7ab94cedca1d57796749cfa63571b6550e3da", "enabled": 1 } }, "acct": { "100": { "checksum": "sha256:2699d826efd46176017695c768804c505a54b277b05f1feb9c43a613bab4e6aa", "enabled": 1 } }, "afs": { "100": { "checksum": "sha256:99920dd4e0855870f7e6f9666928d13fe18ddccca9d38b92ea70a6ce3c8c7539", "enabled": 1 } }, "aiccu": { "100": { "checksum": "sha256:a7aedc8354b4335412871adfd2ab5b0c6da1ea63c8dd797718e4214a5d511bb5", "enabled": 1 } }, "aide": { "100": { "checksum": "sha256:8adb5c3a5ed74695e975eecbf290640b179eb6345a7740745ecfe3164efe209f", "enabled": 1 } }, "ajaxterm": { "100": { "checksum": "sha256:d3a03c2837d5dde7145e27902ff8578e00734ab34e8ea1a45aee58b83e9ad6d1", "enabled": 1 } }, "alsa": { "100": { "checksum": "sha256:202f94345fba8f4bc942dc9b75bbb6eea3b4cb02411cf6ed79858d72aa883c89", "enabled": 1 } }, "amanda": { "100": { "checksum": "sha256:f9a99d97370017307349a154ce479969395bbbfe434e4829573269f770efdd0d", "enabled": 1 } }, "amtu": { "100": { "checksum": "sha256:bc9934a2ae61fa117614f201479966d788484f3a7382de4ebad99790a465e2b7", "enabled": 1 } }, "anaconda": { "100": { "checksum": "sha256:b8aabc624243533d483c3dd5574a490a43e7ec0f2f7940798c12b4089bbd0642", "enabled": 1 } }, "antivirus": { "100": { "checksum": "sha256:1de6460ccaea5a5749eba17489b9765035c8202eb9492485ff39157564001a2c", "enabled": 1 } }, "apache": { "100": { "checksum": "sha256:1a0c38364558bebdae3efaa1fcf8be232184dcddcaab345bba1c40bf239dd0ed", "enabled": 1 } }, "apcupsd": { "100": { "checksum": "sha256:175308edb201092c22791f419d32da3f661e7ccfb9c5d5855ad753405c10023b", "enabled": 1 } }, "apm": { "100": { "checksum": "sha256:a1410f65d6bf017caedaffaa59016877686099fb7df3c4d801136de79a61795e", "enabled": 1 } }, "application": { "100": { "checksum": "sha256:a8e9d90aa1188068ca66be55c4d8abf9982666171bbdd8d4da1f2a254c34a080", "enabled": 1 } }, "arpwatch": { "100": { "checksum": "sha256:2cb8afd237d6bc5693e5d54be1a455b6ed632fbbe76cea406163f9c48d00e79f", "enabled": 1 } }, "asterisk": { "100": { "checksum": "sha256:0b66b387174001e926cf1454c3516bb32d96610a0f598065fe6d7a917ca897fe", "enabled": 1 } }, "auditadm": { "100": { "checksum": "sha256:dcd9e7f5e71fb9f7aace30b5755efcbf85fe88f884d4253cc9abcad1c44e5f4d", "enabled": 1 } }, "authconfig": { "100": { "checksum": "sha256:bdb8072e463c84cb01e6933093428be2b6ee5299d82e26730b12dd2b66d89355", "enabled": 1 } }, "authlogin": { "100": { "checksum": "sha256:a89b04c7a40bb373de2bbb0a2210cca454e7d4a805321fbe65462ae5551db656", "enabled": 1 } }, "automount": { "100": { "checksum": "sha256:41ec4e0c5c46118cb4dfa8c8b1834f330dce4ffdea3d534a8d5007a63b3e5262", "enabled": 1 } }, "avahi": { "100": { "checksum": "sha256:7628cb8340258102798a6e36902d0210e2051ffb9fb4f7a1e4c62a612edfe6fa", "enabled": 1 } }, "awstats": { "100": { "checksum": "sha256:9b92e64a3331076ad443862aa2ba98a2c4d9b00638bf19bb9726f572dee5eff4", "enabled": 1 } }, "bacula": { "100": { "checksum": "sha256:32cedcc57f6a973ac5adc16d8df343fc1ca4b3716f7cdcdae0d2490a6e5765ac", "enabled": 1 } }, "base": { "100": { "checksum": "sha256:d99ed290beecf2b10a557a21b06b63cabc28dab4050f2e7197d2cb9e30519fd3", "enabled": 1 } }, "bcfg2": { "100": { "checksum": "sha256:ea510637d47b7fabc3f617f8a6f3ca3172bf9215c2d6b64ad19cd5d8819c8b6b", "enabled": 1 } }, "bind": { "100": { "checksum": "sha256:39520749f8aba46f975a87187975d8dcd014ad67d22515951f51fa3fd1b0478f", "enabled": 1 } }, "bitlbee": { "100": { "checksum": "sha256:bf04e481614825a35c26a547b19098ff1c8acd0d915c5b4f938b9fa595459d00", "enabled": 1 } }, "blkmapd": { "100": { "checksum": "sha256:ca870c95742bf987a2e739286cbcb998b58c091a422251fdd8de57228b28fd96", "enabled": 1 } }, "blueman": { "100": { "checksum": "sha256:7e4b2b3df3962273436b561c806c816fe4b1e5d6781efa33a7109b05f796edd7", "enabled": 1 } }, "bluetooth": { "100": { "checksum": "sha256:da457ef2ce595c3bf9f70697029ea90e96472ae562f685a7f919a7778a778d09", "enabled": 1 } }, "boinc": { "100": { "checksum": "sha256:d74bd3b6b3850c30b5bbf77822ab82b43f36600e4f76cd68674ef361328afb05", "enabled": 1 } }, "boltd": { "100": { "checksum": "sha256:4ccf41e247c5a7066042a0ebaae492805a1d640f777e8e771701f340a76bce30", "enabled": 1 } }, "bootloader": { "100": { "checksum": "sha256:46e55021d6c4cede091a992ab33521bb1aba4ca1d44879d778973b279204933c", "enabled": 1 } }, "brctl": { "100": { "checksum": "sha256:f9645adde2441e43369a255c6a194f01c6f5800347ad710ce3e147df884b98aa", "enabled": 1 } }, "brltty": { "100": { "checksum": "sha256:603734d4772f482f282eb217c03647f705d66de27fc927c64e02787369b0f78a", "enabled": 1 } }, "bugzilla": { "100": { "checksum": "sha256:326d2a188603c908cdae3c9dcdae6bda37b98ec4cc23f3b31878e2bbd0cd33b2", "enabled": 1 } }, "bumblebee": { "100": { "checksum": "sha256:e8ca8d5318a68243441fdb993fbab6d566f7462fd5557b55733f8ddbfcc4b276", "enabled": 1 } }, "cachefilesd": { "100": { "checksum": "sha256:86fe9c1aa8b2d7a6bdd9bd8d0c7a41a7ae0e4e14e32eaea6cb920367c2f495d7", "enabled": 1 } }, "calamaris": { "100": { "checksum": "sha256:1069377693a5d730d57e4ddd6f73ce20b67b595aae90a16459e852d238163b48", "enabled": 1 } }, "callweaver": { "100": { "checksum": "sha256:880b626c3d04c5669d64ee617ee36a18566e91adeaac67b9527b0a795543575e", "enabled": 1 } }, "canna": { "100": { "checksum": "sha256:b9256764ca5e34142e8cffea57fafc2fa66f78dc8c05761f97fa9becd1d77311", "enabled": 1 } }, "ccs": { "100": { "checksum": "sha256:ad293ee5e252966d14fa6bf09240f143460df4b928672a33a398a5793777c4e4", "enabled": 1 } }, "cdrecord": { "100": { "checksum": "sha256:dda8d62c3bf2503ff9762bd031c35a76cac8059d08592fe23e4d3fe11b0ac8cc", "enabled": 1 } }, "certmaster": { "100": { "checksum": "sha256:b431dd84f2c6b971bc573674fa6c4ee2fedf910b0123ba5d9acb5011c208fd72", "enabled": 1 } }, "certmonger": { "100": { "checksum": "sha256:965ec65dfc98cbabce2350bd52fa7ce92c2f4ab4704348f1555f2a3d9edfd1b8", "enabled": 1 } }, "certwatch": { "100": { "checksum": "sha256:77f0299f67e43927eacb553d1002beeebc3098b4bee64d8dc3dadb8fd23fbb5c", "enabled": 1 } }, "cfengine": { "100": { "checksum": "sha256:c78b908838f1d64ee9ebb0a51b7fa438527716936471a573e1b4b7c393bd6b8d", "enabled": 1 } }, "cgdcbxd": { "100": { "checksum": "sha256:5d3633e0b77db69721e4f64167d7e5f7779c3e5fa76e095d25f8467f2a0bdfec", "enabled": 1 } }, "cgroup": { "100": { "checksum": "sha256:9368c6c54bd5ec6f20e4c3b47c86e60af07346c4e86e525b6bd7288b54b7e774", "enabled": 1 } }, "chrome": { "100": { "checksum": "sha256:d31ce9d2fe78cafcd5e3c8decf22ae1e9ea6f74026ca65b6320afe9a33cd609a", "enabled": 1 } }, "chronyd": { "100": { "checksum": "sha256:7d9624729861397cf7720c2324c65489a3d30485e6a884ab1ff9a8ca22efa678", "enabled": 1 } }, "cinder": { "100": { "checksum": "sha256:fc169721c78f5b0857ed8312e59ba4c134b685c4c322dae242b92e815e35e6fb", "enabled": 1 } }, "cipe": { "100": { "checksum": "sha256:02c20398b9eff51ed431b7ad739a6015d2451b4bf6e3e5da380606d85a77852c", "enabled": 1 } }, "clock": { "100": { "checksum": "sha256:4f90655d2243cfc32ea7436a953cccb8a34af895f83361235a3a5cda40dbc75f", "enabled": 1 } }, "clogd": { "100": { "checksum": "sha256:ba78a422a10b65591c48cb038f8a55614944163f3140275852d293fb0c548bfa", "enabled": 1 } }, "cloudform": { "100": { "checksum": "sha256:481f5fbc7810a5a81851edbe5a6b124141257f5fbbb83d8830ae0a34924ed3d9", "enabled": 1 } }, "cmirrord": { "100": { "checksum": "sha256:8f8fb986f15b8b7c5c250d250fdbbb2f78874e13394105c9c486488a16e94c91", "enabled": 1 } }, "cobbler": { "100": { "checksum": "sha256:e0e264b9cc83962dbbb27c152a72f01c6a355467c4e845b52e65c8b88d8d75d6", "enabled": 1 } }, "cockpit": { "100": { "checksum": "sha256:cb7fccd94903a6e256a586d758085f6f59c0f8b1c5b4cb99536915526d2224ec", "enabled": 1 } }, "collectd": { "100": { "checksum": "sha256:7f08e2e248d33162dc9b237c37ed3a3dba0511bbcc71d87482e95093fb8c6456", "enabled": 1 } }, "colord": { "100": { "checksum": "sha256:86e58c9f12c519a2c3b090b64a276722374054ea900c775b2f8ab4ef2867dcf0", "enabled": 1 } }, "comsat": { "100": { "checksum": "sha256:1d57ffaad6b96e3ca8ac82c23b52d58d81e1f69f5d54a648a16da8ffa8070e53", "enabled": 1 } }, "condor": { "100": { "checksum": "sha256:dbc3f2f0c12f9aeed14056fd7e7c46a4ecab3569198f891643172cd032f3fc00", "enabled": 1 } }, "conman": { "100": { "checksum": "sha256:1270caf15af248a487cd5ce728daae2699ffd9139823c805ec49213ab1c835cb", "enabled": 1 } }, "conntrackd": { "100": { "checksum": "sha256:56fd7d7a550dbc4188b93afd0fde8c706623b3a5d26db265ee016967ba4ddfee", "enabled": 1 } }, "consolekit": { "100": { "checksum": "sha256:5bd7a7acc191766583d933b04321e64657138959bf40a4d2986b013b942c4ba8", "enabled": 1 } }, "container": { "200": { "checksum": "sha256:301be7dafa07cdc68b4e5ade7e1a07017fab3efd85986bdfab7faa9466a95836", "enabled": 1 } }, "couchdb": { "100": { "checksum": "sha256:12b2e3e7314bda4e76d3883901e6470927e85343f742fb44b174ce968f1ad8b5", "enabled": 1 } }, "courier": { "100": { "checksum": "sha256:40ae5f173004741838002644e5bff73cf16f2f3a1928c45fa17674f9a0df5148", "enabled": 1 } }, "cpucontrol": { "100": { "checksum": "sha256:1485a6d64d00619898d2789d27391f2a57a7fb1f0e8c73daf59baca8641564a3", "enabled": 1 } }, "cpufreqselector": { "100": { "checksum": "sha256:687564eb09acf3e7f1475fe2a133941c36999bd037aa8a794feea2d9f2c26385", "enabled": 1 } }, "cpuplug": { "100": { "checksum": "sha256:c16e376ff6c51da1911e68a8a7d42f5730eda45febfd0875c78cac4b9cf6e78c", "enabled": 1 } }, "cron": { "100": { "checksum": "sha256:6be0252b3c6bcbfb4c51dfd3ae1ae262f5de153234917ac4d342b18ae0292060", "enabled": 1 } }, "ctdb": { "100": { "checksum": "sha256:06dd65a4361bf8076c14b322dd30003295c0b9d75bf1ae610961b13a1f9431da", "enabled": 1 } }, "cups": { "100": { "checksum": "sha256:3d5e5bbf131d98d95f7f1431893eb137bd833dbfd8469f9c386d72bb4e8f9b9a", "enabled": 1 } }, "cvs": { "100": { "checksum": "sha256:bbc8d76cc8609849d5b078c5b2ac7364470a06d77d67b97d5f58429d7b679e33", "enabled": 1 } }, "cyphesis": { "100": { "checksum": "sha256:b1a41211ae3cf69b819df517eccd0fda2088c27685dad68de64531b9794ec518", "enabled": 1 } }, "cyrus": { "100": { "checksum": "sha256:60defb1f6feeb1d607734c4912e52e03bf5b0c27cb6f31a37fa7e05f3497b323", "enabled": 1 } }, "daemontools": { "100": { "checksum": "sha256:1034e2442c975dd2ccf84791b1a826d02032f13762d57c3485e51e2b9a7dc03f", "enabled": 1 } }, "dbadm": { "100": { "checksum": "sha256:40306590ef444152ae18b65040d85442c14853a9cc4c31b0224c4d19517d66ea", "enabled": 1 } }, "dbskk": { "100": { "checksum": "sha256:24559eff82b251f9814ae88c36a7cbacda1ed419a80145aef545306e88cb0da8", "enabled": 1 } }, "dbus": { "100": { "checksum": "sha256:50ea4eb05a06315449092c939e2307436ac6461e47ca69f0d42cc4e321e86280", "enabled": 1 } }, "dcc": { "100": { "checksum": "sha256:06e414b0a83b49968f62018cecde48dcfe68b2e9d699915367b3e04461188a0d", "enabled": 1 } }, "ddclient": { "100": { "checksum": "sha256:73ca2525a14e3161524f6e8fc0d016430a536002f1cb3833db1334670b458436", "enabled": 1 } }, "denyhosts": { "100": { "checksum": "sha256:1bd00b13b9bda18274a771d66f7cba8fe62e5e95ea8f51415da6b1fa1336df1b", "enabled": 1 } }, "devicekit": { "100": { "checksum": "sha256:03b01b781881cc60438bc357bd60596970b8ac019b415969bca8a08358fcbfd1", "enabled": 1 } }, "dhcp": { "100": { "checksum": "sha256:2ad95a78468f7f4ea9a8c044c79c0a4ca9924b41432390ea2863a85c806c9a00", "enabled": 1 } }, "dictd": { "100": { "checksum": "sha256:c30c819f142d3c719d0ec5741af5a65161770ff140097fe63f7559d55b897500", "enabled": 1 } }, "dirsrv": { "100": { "checksum": "sha256:50efdc68200d27ce1a5db99a780aa7b0e84988669961d436d348c7bb8310d181", "enabled": 1 } }, "dirsrv-admin": { "100": { "checksum": "sha256:8d9234157484f6ae8ba22039b44fa19f4de8137be9321e5da393d72d85d89487", "enabled": 1 } }, "dmesg": { "100": { "checksum": "sha256:8b834312a2cb99ab89862f839a1315e78794dd92758785f84c9559285dfbe679", "enabled": 1 } }, "dmidecode": { "100": { "checksum": "sha256:2c7fb8c6c52f385b819713f0444a96cfd4e65b7dcb3ca79b932cc12ad9ce903d", "enabled": 1 } }, "dnsmasq": { "100": { "checksum": "sha256:44f66c5d4f635600ee9d0ba3fdea3896218f1420b5ead89e0f22d71a447f9e97", "enabled": 1 } }, "dnssec": { "100": { "checksum": "sha256:49427a9e92b87db77706e2b81ece254c99d3cd6ba020211e2afae65fab7ad066", "enabled": 1 } }, "dovecot": { "100": { "checksum": "sha256:cc8c3a2ee0233a7f1fdf38837b72ce5fd15efef782a36ab4b9aa2ec339b46fa6", "enabled": 1 } }, "drbd": { "100": { "checksum": "sha256:b66be23c1ded4e548e5369b744c7c2a4dfd7065582517525221177ca67657525", "enabled": 1 } }, "dspam": { "100": { "checksum": "sha256:5dd7221ba40e9b912367289fed8ca116c14da4fb8bd7f28f421c4008855bb9fc", "enabled": 1 } }, "entropyd": { "100": { "checksum": "sha256:0f68aeeb1da72efb8c9b1bb7db0a4180b6938672b16f33d1abcd65f5481d85a9", "enabled": 1 } }, "exim": { "100": { "checksum": "sha256:f4c4473ee49394e0e4629023772464a046c476f92b4a727acdf9f6c92711b952", "enabled": 1 } }, "fail2ban": { "100": { "checksum": "sha256:2383cb88b81bc5d87be9f3201a42da526532c4ea8e6d3b3f5023005c0ddf6f17", "enabled": 1 } }, "fcoe": { "100": { "checksum": "sha256:913e66ac5f5ce364e5ea556acfbf77845c25a4beb5ee64599613aa00127c1492", "enabled": 1 } }, "fetchmail": { "100": { "checksum": "sha256:63f00993bae4285eff5e993d208ea786785c4331e6947b3a48a97d31145b2e98", "enabled": 1 } }, "finger": { "100": { "checksum": "sha256:16c506d472b007f7d36850810ca0fcfd9482d30ce9c0ba790174b78294fd1d74", "enabled": 1 } }, "firewalld": { "100": { "checksum": "sha256:bbf58446f30b93de19e5a19087ee012f8e347fef5e7e8012e64b31a0ec21ab09", "enabled": 1 } }, "firewallgui": { "100": { "checksum": "sha256:b61ff17eee03141c9c7bd79d63331ecea733cba4b5b43b87d5141a40cdccdd69", "enabled": 1 } }, "firstboot": { "100": { "checksum": "sha256:c5540b8385c84075dd657e390d77ae886aa9d74b65444b9aa1d858f375819a8c", "enabled": 1 } }, "fprintd": { "100": { "checksum": "sha256:c1ffb7734a0359a7390830d9c6477ab61c45fc026368bfd5e2246523a6439464", "enabled": 1 } }, "freeipmi": { "100": { "checksum": "sha256:9af2291d75a2d643f53ff7a98bcabf22effb617329178efea45372d714825de1", "enabled": 1 } }, "freqset": { "100": { "checksum": "sha256:28bf77389f3e41743b30727a891609172a891466e92c28a919f43e628cc23a4d", "enabled": 1 } }, "fstools": { "100": { "checksum": "sha256:140caf542903419ee2471fd99ab06aa45899c400402c2580b395b182f24bd225", "enabled": 1 } }, "ftp": { "100": { "checksum": "sha256:7e8456fdf7807b30e1c257e568ba10305696cf5abdebc70988c288079884d46b", "enabled": 1 } }, "fwupd": { "100": { "checksum": "sha256:1dd6a45b73c7ce77a87af1e87354ada5aa5b2841aaaa045a6b4ae3c4d09f0f8b", "enabled": 1 } }, "games": { "100": { "checksum": "sha256:950d8be99d5349a3d893ba601c518e6b2af0d56c5b55514a45dbd8a3c61c9ecc", "enabled": 1 } }, "gdomap": { "100": { "checksum": "sha256:5040cb99d007fe9368bd37a9a6bf82f891c220ef652443896a0f2f6ca6f818e1", "enabled": 1 } }, "geoclue": { "100": { "checksum": "sha256:f0155b43152b6b4b850d1c4fb7daf16fd77992313b8be314ddb4901314bf913d", "enabled": 1 } }, "getty": { "100": { "checksum": "sha256:a60d07665b0ebd25fd54a9d82dad5eb7acbc11a2842dba56d7b9524d26ce14ce", "enabled": 1 } }, "git": { "100": { "checksum": "sha256:5eaccf209092db49c9a48d84e1387c1de76cb153c774c0bd615c001afab28664", "enabled": 1 } }, "gitosis": { "100": { "checksum": "sha256:b522382b64f36cf387cd892b45e916c861bd0a09697bc983eb55b53b0efd3081", "enabled": 1 } }, "glance": { "100": { "checksum": "sha256:2c51d19fca6ee40e137245ecb425edc77666d75c42ba583bf74cf13f10ace055", "enabled": 1 } }, "gnome": { "100": { "checksum": "sha256:420b9cefa6bdb542f6da10de7b36704a91509cf64cd2497e5693a858cfca5e41", "enabled": 1 } }, "gpg": { "100": { "checksum": "sha256:f821aa6ca5837a2d2de8180e74c267f68da951960c989fb13ebde5833c93738e", "enabled": 1 } }, "gpm": { "100": { "checksum": "sha256:bf30c4945be0065672fb47f70ad251b1079ada339f61f2679293cb0226d0d57a", "enabled": 1 } }, "gpsd": { "100": { "checksum": "sha256:5373b2332959d6c41c32160018274ab61e3f1abd0f0a5cc2302c45b141a39a9b", "enabled": 1 } }, "gssproxy": { "100": { "checksum": "sha256:7528c47be91a81ac19f2f54458309baeb0a232d83a1ccb2bd89fbc8cefb1ddc8", "enabled": 1 } }, "guest": { "100": { "checksum": "sha256:91f43e4d5ae283f0aa13c49efea93293dbdecd2b2f4f75db89371eda65b7523e", "enabled": 1 } }, "hddtemp": { "100": { "checksum": "sha256:f170e1da6acae4fd7108d22c8cf262916e034f0d3edbdebf3265a922a5355373", "enabled": 1 } }, "hostapd": { "100": { "checksum": "sha256:8b15f72328885c08bfda38082a62feeaa2c6692223a4d2bd1a572820d454a742", "enabled": 1 } }, "hostname": { "100": { "checksum": "sha256:e9fc1c4032c0346f751e1ef8ad1b3fe3425401b70a6c4354d4485472288e0bc5", "enabled": 1 } }, "hsqldb": { "100": { "checksum": "sha256:f70b198e5a5157722b69dc89109c4074a475e1085356cc610cc9b700567c154d", "enabled": 1 } }, "hwloc": { "100": { "checksum": "sha256:370e9eea2b927a2715018b667e9a56ad09af301a90811cd9b041da79f5384b38", "enabled": 1 } }, "hypervkvp": { "100": { "checksum": "sha256:b54ce6f4960a02d35e19d60bf8a07f7866514893e3193a5f4822c8580a46caa4", "enabled": 1 } }, "ibacm": { "100": { "checksum": "sha256:663b35f3874583ae074924bc068a8dc4c7c144adb60007da6103d1e3505ee37a", "enabled": 1 } }, "icecast": { "100": { "checksum": "sha256:dedaddef1d7447d25a1e7ff01e60e4545606e556c6770bd3fa94d9331de7a5d7", "enabled": 1 } }, "inetd": { "100": { "checksum": "sha256:ae408578a7160f2feae10269365558c43d9570b392642a92cc20f8ad47c58cce", "enabled": 1 } }, "init": { "100": { "checksum": "sha256:7ff95566a4f2bdb8ca3ec67acdade39e35fdabc57c2f00b989bab3f699f997f8", "enabled": 1 } }, "inn": { "100": { "checksum": "sha256:9ad99284192a443aa582e73b46667388b7a219dafae8dfce71a58a82bbae2f6c", "enabled": 1 } }, "insights_client": { "100": { "checksum": "sha256:0e41289d8dce065dcd41fd6cc1e1282efd4a58e7f9e3a2f1abc32f520fbbcc1e", "enabled": 1 } }, "iodine": { "100": { "checksum": "sha256:32501ab66def044fbc340cb5c656d5743c738bbd6fca5626c36c687419cd8d32", "enabled": 1 } }, "iotop": { "100": { "checksum": "sha256:d15656cd91a4e4e178a13f7cf910cfc552cc30db881a11ec88833f947edb4561", "enabled": 1 } }, "ipmievd": { "100": { "checksum": "sha256:d34fe186922c0e5726ca361343ec3846833ec3e4ab9b019b3d7bac1337383a16", "enabled": 1 } }, "ipsec": { "100": { "checksum": "sha256:d36c66c2c79d338c61c90d4136433e1e3a73435e920eb36d70682dfd5e147e59", "enabled": 1 } }, "iptables": { "100": { "checksum": "sha256:5a674017cc648e3262757464e5413503154cc1f593da545ce2c4f946991012bc", "enabled": 1 } }, "irc": { "100": { "checksum": "sha256:d72428ccbff5521367e00699c142bba64b2bbd44fed35deb29f9530cc0448378", "enabled": 1 } }, "irqbalance": { "100": { "checksum": "sha256:15650b2f39ccdfbcb1e4e867a35fce3c2768097e611e0c8ad9cb79ae6c66dd58", "enabled": 1 } }, "iscsi": { "100": { "checksum": "sha256:ccb27142f793095c79f531aae924baaeee5914c84228a09c09b9eca839f3524e", "enabled": 1 } }, "isns": { "100": { "checksum": "sha256:90b42f610fa328cdfb98bd0450bd052566f203e51e4a913dd6faded6da7fbe2c", "enabled": 1 } }, "jabber": { "100": { "checksum": "sha256:5ad49d140265305dc72781a6826d1de4614a33f83bd512acdc2263038ad41206", "enabled": 1 } }, "jetty": { "100": { "checksum": "sha256:d910afd1bfe836543ded50974dc24ae7bd5fd2609d6a9b2403316dffcd39832d", "enabled": 1 } }, "jockey": { "100": { "checksum": "sha256:d9a67ce1976ed2e79826d25f33dcb0b0bbde6c090600b605bbaaae45856d12f6", "enabled": 1 } }, "journalctl": { "100": { "checksum": "sha256:9ddb71271d0dbe5cede6179c0ca263e297dc6b65197bde2f7b14ce71f8dde369", "enabled": 1 } }, "kdbus": { "100": { "checksum": "sha256:5969c78be4a03cc91e426bc19b13c5188b5bf8ac11f5e2c21c098c3d68a7e3e3", "enabled": 1 } }, "kdump": { "100": { "checksum": "sha256:fdde3852d1decda649133c6345680f9353b86a6da2a98a83a8be101c9c25f103", "enabled": 1 } }, "kdumpgui": { "100": { "checksum": "sha256:66c67280c70a9b897b0f952067438e0eee05f9f48913508b38d745ef88747f32", "enabled": 1 } }, "keepalived": { "100": { "checksum": "sha256:c1177567c7bf67bb2d0de17760cecf56e0bb34f50d6fe060dec64ae97a76ecdb", "enabled": 1 } }, "kerberos": { "100": { "checksum": "sha256:826fbe83705494e009b242b88857c425eacba49aadae506ffa2012c80e60f7ae", "enabled": 1 } }, "keyboardd": { "100": { "checksum": "sha256:f199811d9ddc8db83864a09c543567fcb2f117b3241967b092bff7c9fdbfbfb6", "enabled": 1 } }, "keystone": { "100": { "checksum": "sha256:b0a7227a870ea987035e0cd524ad956a68287d0a67dd7135de41c6d5977ff4c2", "enabled": 1 } }, "kismet": { "100": { "checksum": "sha256:488fb5fd17cf1f630f3e48a853da05f86c06fc58219dc2ae59251865734bf800", "enabled": 1 } }, "kmscon": { "100": { "checksum": "sha256:d64019b11b6a37f6cdc5579d56eb1e19b6a7231501e1cfe2a838d26a2eac6033", "enabled": 1 } }, "kpatch": { "100": { "checksum": "sha256:00070d71dfe2632491305387ffb264127dca4387425015e4cb013d6bce5f95c3", "enabled": 1 } }, "ksmtuned": { "100": { "checksum": "sha256:891f082452240ad2e726bad777ea787d0f0f8695cc2a75f7439a2badda030d24", "enabled": 1 } }, "ktalk": { "100": { "checksum": "sha256:2df6f3dbad4a513ee1c113e496e8d2f5a19f56015f4a21e7478f2f5b53f36359", "enabled": 1 } }, "l2tp": { "100": { "checksum": "sha256:8e4cb0b0e0d1293d669de0b0e50f68d6d6fbe8e8d830a236a1c0e676f2326fb2", "enabled": 1 } }, "ldap": { "100": { "checksum": "sha256:d0177bb5873d0e6f9595020a8f39ba06b19e4636ea610175a3afef4aec2719cb", "enabled": 1 } }, "libraries": { "100": { "checksum": "sha256:6d5f128f2d4fd9137a7c70d0d024703547796a71f70017b3550a31d3450e0435", "enabled": 1 } }, "likewise": { "100": { "checksum": "sha256:e7eebd050230b358b43435d37ce308c3ba15e2516f4045abf7d26f03ebfbc11c", "enabled": 1 } }, "linuxptp": { "100": { "checksum": "sha256:4132cd51913a3044e453ed0b972db2ef511fdc7b2a1b592d1070177651066ab9", "enabled": 1 } }, "lircd": { "100": { "checksum": "sha256:cc81b79d2834e58bef7928f525c1a1eee5547e81d195444b3bc2741e396ae46b", "enabled": 1 } }, "livecd": { "100": { "checksum": "sha256:805c7bc4ded621b44ecf333d558328e115bba652fcbc91f436cefc948497688e", "enabled": 1 } }, "lldpad": { "100": { "checksum": "sha256:358c4b262655cffbf20f7484aedb22f094509f44d52a1fa3efe3edeafd99317e", "enabled": 1 } }, "loadkeys": { "100": { "checksum": "sha256:26f9e78406ecdc968ed670b32db1d10805e66875631558f092f08a6e1f2170dc", "enabled": 1 } }, "locallogin": { "100": { "checksum": "sha256:e07d92775ed25e7a3627bf977452844c67acf473b33075475f433f8be76dd755", "enabled": 1 } }, "lockdev": { "100": { "checksum": "sha256:1f946da2054cc1693209749df12ff01ab6456247d6225733aebb3a7d70a46e20", "enabled": 1 } }, "logadm": { "100": { "checksum": "sha256:70546c4b3d01f15bc7a69747dbb12fc6bcef5d899f6301f62c0c612c7069082a", "enabled": 1 } }, "logging": { "100": { "checksum": "sha256:656067c78ff1246a1a758a213d44307f91cb79336fe74a47015af425e58266fc", "enabled": 1 } }, "logrotate": { "100": { "checksum": "sha256:76cc40f1943fe21959793499bffaf35d0fe53ffc3f6c5a8b31eb96e738a286c2", "enabled": 1 } }, "logwatch": { "100": { "checksum": "sha256:cf4450b03e28762040c29f2a28af238cd4905d1c6bd4c73d656b266c7b9a8a6c", "enabled": 1 } }, "lpd": { "100": { "checksum": "sha256:9358dc35659b9570d3e8119a088b2693d7de505ea25996dc139517a857888857", "enabled": 1 } }, "lsm": { "100": { "checksum": "sha256:1247dc4bccfbc9ee42292db4415b21ae00bdef3dc2faeb267f045413da4a1b1b", "enabled": 1 } }, "lttng-tools": { "100": { "checksum": "sha256:79e4a2224ede13cd5f2c0e6e7c61e83efabaf1d05b86f6f7a710599bfc48edaf", "enabled": 1 } }, "lvm": { "100": { "checksum": "sha256:f56137657dd61a1a8a8844d5d1db01fc03330d17e05457d03f64756b344c32ef", "enabled": 1 } }, "mailman": { "100": { "checksum": "sha256:e47811cf3bd8204eaa02c4aab92f3d426f0a3ef97161e1579845d1e03df1fc1d", "enabled": 1 } }, "mailscanner": { "100": { "checksum": "sha256:8d447072ab5005ead27f1cb4d96dcbedf09a11182f660c6f59c6d56fd81235d8", "enabled": 1 } }, "man2html": { "100": { "checksum": "sha256:224584babd9e83c242d54fd8c5cd03379b0556005268aac22b15734b913f12e6", "enabled": 1 } }, "mandb": { "100": { "checksum": "sha256:ae44b8ec7a90ebbc45fdafe89663197b36e47120ad90eb22b475939055ea6924", "enabled": 1 } }, "mcelog": { "100": { "checksum": "sha256:c5d98ec368b145c74b4bf0ea8da3980b17af0c2d00654c5a6973241625f97b12", "enabled": 1 } }, "mediawiki": { "100": { "checksum": "sha256:43f1c6f7cfdeaa26891824167cf637a8670785c2674b45d85ce4a7ac77190a36", "enabled": 1 } }, "memcached": { "100": { "checksum": "sha256:f0f9c7367e9bd196aa463916bd5aab02f6966dad9564a0f2fd070bb2e8410aeb", "enabled": 1 } }, "milter": { "100": { "checksum": "sha256:db190bacd2b84a29971cd1940cd15d606abbfded5c9b956894717afd91fc7a0d", "enabled": 1 } }, "minidlna": { "100": { "checksum": "sha256:0d6ac660d641c1cf707a814ed08e19b9e21547a3eaa7134cab84dbc5fee6b5b2", "enabled": 1 } }, "minissdpd": { "100": { "checksum": "sha256:dd2ab85bcba6d204f9dbc7304e8a4940e5d1733d4b9cf4fcb0f4072982c585c3", "enabled": 1 } }, "mip6d": { "100": { "checksum": "sha256:406edf2c78ba0e692d5a78f3c5ca8d641d00131b143332adeaad9f325959683a", "enabled": 1 } }, "mirrormanager": { "100": { "checksum": "sha256:7084de59beaaaf4f630357ec53beff8d0a0ee532ac180fe58e23bfe98f1fdaee", "enabled": 1 } }, "miscfiles": { "100": { "checksum": "sha256:7e7e87e302bf847a4c59d69e5af60729e61bada0cc5d6ec17a25a6514476cb48", "enabled": 1 } }, "mock": { "100": { "checksum": "sha256:ae352eccf2f2c9ee8f0d9635517d9ae3c9bba83c617deca8f989e2aae8dd35fa", "enabled": 1 } }, "modemmanager": { "100": { "checksum": "sha256:84a60147d2b0121ff6ede6199583cdb5619480d015b2a675c6a0569f91c12d66", "enabled": 1 } }, "modutils": { "100": { "checksum": "sha256:67c3914aeb25e38fc6bd0793fddc41122dba1547d54e91a78065545fea3b9c87", "enabled": 1 } }, "mojomojo": { "100": { "checksum": "sha256:6030afcea9f8d46f25dd7785737edd25eb0f1e50b76eafe4d9103196b722d47e", "enabled": 1 } }, "mon_statd": { "100": { "checksum": "sha256:6ba3a594d01a11bc32e7cb554f7386314b5089eb4416fb776edb552a7d53c41d", "enabled": 1 } }, "mongodb": { "100": { "checksum": "sha256:1b2d30558bec7fc08d1d388ae2bb0becd2233c99c9fb173fd00809786ce5eed9", "enabled": 1 } }, "motion": { "100": { "checksum": "sha256:346e172be35df168eb0e4fbc8e176b0fda87de9bc5787f7a5ab7667cfe1e3c3b", "enabled": 1 } }, "mount": { "100": { "checksum": "sha256:f66c53d993dcd47ea1ff3d797f8fd69fb8161a4ff8a59f54f66a2de9462a55a7", "enabled": 1 } }, "mozilla": { "100": { "checksum": "sha256:7696dbb77c54531cf2574c7ede9f085cf64611dcf7a612530dce2de19f7a8b9f", "enabled": 1 } }, "mpd": { "100": { "checksum": "sha256:0f67c18c9101b53f57ef857a74d6044701e1d2c347f829a03c0579c545fdbef3", "enabled": 1 } }, "mplayer": { "100": { "checksum": "sha256:f82c0a72506f1011e47ba98e51d5edf906f58fc190d797f5d1a0b8e5cc7d0762", "enabled": 1 } }, "mrtg": { "100": { "checksum": "sha256:afcd9267261b334900420461279b8555fdb4bd783af880fa4606d8afc65e0712", "enabled": 1 } }, "mta": { "100": { "checksum": "sha256:b0f9753424c504a288f55d495105f6d475d69287b718190ae5192cf7d6ddfde6", "enabled": 1 } }, "munin": { "100": { "checksum": "sha256:29f87ec15fa19e975c83288d55e56bab64855a24c4d8826fe4138eda9a46cc97", "enabled": 1 } }, "mysql": { "100": { "checksum": "sha256:b028af8f4e726feb8c26037f7c6d6f97383977bd5ee6141ab4e8e1d096d6481f", "enabled": 1 } }, "mythtv": { "100": { "checksum": "sha256:e025b2dbf50901632da0ee2aa658105a322275eb120d782cbbf25f2895231154", "enabled": 1 } }, "naemon": { "100": { "checksum": "sha256:a19b3b0540dc52d9506ca7e5d804c2fe9115b3ea28bfd9273030e841e12eb277", "enabled": 1 } }, "nagios": { "100": { "checksum": "sha256:39ca80027ac8585f368bcd57f555ba87bf409f7b7d6c4292c09fd06cc1691c80", "enabled": 1 } }, "namespace": { "100": { "checksum": "sha256:ef73850f29b4ff4ff904d506d545bf366fd1e7c2ba82a7a7c9a4513e3eee45d9", "enabled": 1 } }, "ncftool": { "100": { "checksum": "sha256:2c9356101a9ddbec94afdd12ca669ba93a1d422c302f9e17b78b18670617d2a1", "enabled": 1 } }, "netlabel": { "100": { "checksum": "sha256:9a32ce04c1dd8e120588c15b3057f838bedce8f14c91576b667295d47800e0ad", "enabled": 1 } }, "netutils": { "100": { "checksum": "sha256:5e0a20ae09b00fac69ee30a0d55ff73fa692d8350c9c0b0343af61e4f0dd654f", "enabled": 1 } }, "networkmanager": { "100": { "checksum": "sha256:9c67b21155929e43e4efd3fc81a85fddc9f1030b47ee4a275789014c1311b972", "enabled": 1 } }, "ninfod": { "100": { "checksum": "sha256:85cac2885d75522eb07189efcc3feeb7775fc6daf5cf3f1a28a1fd2109fe148c", "enabled": 1 } }, "nis": { "100": { "checksum": "sha256:b5b133d60b98068eb9480c54285050ae9b49d2fb309eac8994cc91c865ee02d4", "enabled": 1 } }, "nova": { "100": { "checksum": "sha256:59919a89d30a5d4b60d6971fa636fb62605d59d214ec614adc279f6cbe2c2b27", "enabled": 1 } }, "nscd": { "100": { "checksum": "sha256:578bc975477539c659f3608b1445a0c7a9bc7c3f2dcf65b3e55f3a3af89ea564", "enabled": 1 } }, "nsd": { "100": { "checksum": "sha256:d5b03cdc6c8bbc222b8e3d30680b1a7d2d1a49837e7d509aafcf6b2a3a32195b", "enabled": 1 } }, "nslcd": { "100": { "checksum": "sha256:18b003071f4c36307616f7d5de8cff6d4e376af31cb96ce1a5ad6ae3011dfd09", "enabled": 1 } }, "ntop": { "100": { "checksum": "sha256:f942c7fbe636b9d60327ef9dade1120340c16a2992a6b50db5fbaecd44ffd63d", "enabled": 1 } }, "ntp": { "100": { "checksum": "sha256:686664a71e74b0edd643ab9d556b1aab092fa707935da5ea928a66f54a3c84e0", "enabled": 1 } }, "numad": { "100": { "checksum": "sha256:dabc5ce6244d0b0939e9a07bd6bc232e8b666529a0b7b29527e586db8224862c", "enabled": 1 } }, "nut": { "100": { "checksum": "sha256:653e708dec531e483992b25944a689ec9369478d039a5ec62c98294ab73ce8c4", "enabled": 1 } }, "nx": { "100": { "checksum": "sha256:4ae55fe839abaaf0ea52b79a5c8f6a906575b83cca29532c2dd52337fb3d5790", "enabled": 1 } }, "obex": { "100": { "checksum": "sha256:7b2c87e864b6008f734e1effa48cee1399f41843b9d80d3fd95fbd19e058598f", "enabled": 1 } }, "oddjob": { "100": { "checksum": "sha256:9de0b544b2373ea0f1b7217f9179898479dbff0da36ea9857783de57d06585cf", "enabled": 1 } }, "opafm": { "100": { "checksum": "sha256:761bf911674d23053eceabbbda8da16c73af5f300929a33a64513dc6e3b2d0af", "enabled": 1 } }, "openct": { "100": { "checksum": "sha256:5674f8e8c975570649e3065460786cb4521a86370bffef5a9de18c69813fe68e", "enabled": 1 } }, "opendnssec": { "100": { "checksum": "sha256:bdef6dbb24ae22548634759ac823a8c3e21fde6368cfdfd742480f7027e63ddd", "enabled": 1 } }, "openfortivpn": { "100": { "checksum": "sha256:1a1bff55993510cb6481383b299e1f1a6349ec76e4947bfc8c5b1347e4d30bf4", "enabled": 1 } }, "openhpid": { "100": { "checksum": "sha256:ad3f3f3ba4442930560b291c022e674e6a50e4a37fe027926299b2f6cdec14bd", "enabled": 1 } }, "openshift": { "100": { "checksum": "sha256:329e4b9d1df5012ace94cbe9cba7dfa7ee7d9f242090072c71aaacbeea78986a", "enabled": 1 } }, "openshift-origin": { "100": { "checksum": "sha256:31cbbb069354f984e4af75b387778fae1ff4dc6c3e60533357d005ffa960b51c", "enabled": 1 } }, "opensm": { "100": { "checksum": "sha256:c0e1bf0a8eb50e0b41fa69bf5b65e2a7c324e4bc7255933a5d2bac3b9ae6f4de", "enabled": 1 } }, "openvpn": { "100": { "checksum": "sha256:a4d12ae8ad77d65d0fcabb20aa4a83886e782d732123f686f88a7d7472384104", "enabled": 1 } }, "openvswitch": { "100": { "checksum": "sha256:a54f8a8ea5abb8a33734ecef9d9ad1c0dd090a6e0c5187e80de52f522d2d5e39", "enabled": 1 } }, "openwsman": { "100": { "checksum": "sha256:d6b7bb8f7749265bdaf938abecb2f8f78c6e9e8dc06c1c26b48da227af5a8654", "enabled": 1 } }, "oracleasm": { "100": { "checksum": "sha256:67e31eec391bac337ebacb78c096589af4b7e8be6aa05c34cf187ba922a2abde", "enabled": 1 } }, "osad": { "100": { "checksum": "sha256:6635ff0231bfc3d88c771553d495941ee0f98871edfe6c86205b087186b3a72f", "enabled": 1 } }, "pads": { "100": { "checksum": "sha256:5b4531e9231d399ebec8e6b6870a812c6a64b2daffde35fa57a009b24a01809f", "enabled": 1 } }, "passenger": { "100": { "checksum": "sha256:912a1c442559d6ab48453d87e2b997bdee3017a54a0b60aeaf7d4603fde0f34b", "enabled": 1 } }, "pcmcia": { "100": { "checksum": "sha256:456b3520c26e5f2a913437318715712ae00f64932a27ab1bb8b8b42e0524fa05", "enabled": 1 } }, "pcp": { "100": { "checksum": "sha256:5302332fba7e6724ab7a3c32bd523b10322c20011c6e42ae4e769a49f3efabdd", "enabled": 1 } }, "pcscd": { "100": { "checksum": "sha256:2ee37df066a9ff80439b08c092809f3661e2f9a8ad02134e839627fd23a20c1f", "enabled": 1 } }, "pdns": { "100": { "checksum": "sha256:a1a10cd52eb9dd15bc1ccfed440f6b3d235edc7405a3932f81805d8d94000245", "enabled": 1 } }, "pegasus": { "100": { "checksum": "sha256:4280c40629dd111fd1c89ff867ac72d1e7ddde49dc3d286637e6a86b868e2303", "enabled": 1 } }, "permissivedomains": { "100": { "checksum": "sha256:2453bad4ace526f3cf2c60b358e95a5476692ef25da107b10f52f3af27c056d2", "enabled": 1 } }, "pesign": { "100": { "checksum": "sha256:6461acd0385c0b1a32bf646fc9e09da0c7ca513954ed8fe2a03f4ee7f6a64fcf", "enabled": 1 } }, "pingd": { "100": { "checksum": "sha256:f7536a518a046b793ea3f74a67d677b878baac44b28268c5ccecbf10715d89ab", "enabled": 1 } }, "piranha": { "100": { "checksum": "sha256:11436fb7942d28e3eca22bc078ee5475f632d8447008a6414f337d4bbc3515dc", "enabled": 1 } }, "pkcs": { "100": { "checksum": "sha256:c70e17d1a4d347b38fdfbb2a5dab292e3e0c538ea52fb6cfdef2714e130da0b1", "enabled": 1 } }, "pkcs11proxyd": { "100": { "checksum": "sha256:c9582c89cac1546fa1e5bf9802c5a322e52e2529256f9e5922d5813e40be3646", "enabled": 1 } }, "pki": { "100": { "checksum": "sha256:ec40fbe6355370fe69a8ff343744654b06d4134c1518c64269be1f3a49083968", "enabled": 1 } }, "plymouthd": { "100": { "checksum": "sha256:7aa52d533e28a3ebf76d879c24bb4e0a58574033d5af6d4d22b716d1156c3f90", "enabled": 1 } }, "podsleuth": { "100": { "checksum": "sha256:b32a5cc38b8edcc76b94862cee0c822a5b4d095329f53ab6f7cb014c76346e8c", "enabled": 1 } }, "policykit": { "100": { "checksum": "sha256:686d9f7652cb2b3d7ce6af2aa620c14a6cbbbdb8d26b3630cfbf6bc34d9e3e6c", "enabled": 1 } }, "polipo": { "100": { "checksum": "sha256:6098bd8a4f449c01dc7e0f4509663994259fe8848f2f21d1319bf7105bbacc4e", "enabled": 1 } }, "portmap": { "100": { "checksum": "sha256:f561aef22cda98a94a74bedda09645e50066a77a23d3bdcbb1143b0c62ffe7b2", "enabled": 1 } }, "portreserve": { "100": { "checksum": "sha256:9de99e881e9e2e7e0b78629eec721840da4aa18f78ff5a06e46b7a596c28a09a", "enabled": 1 } }, "postfix": { "100": { "checksum": "sha256:3101c4c1d54f3e175dc3fcff001c6937a9ffec7781f4095ea38fea88df7e8067", "enabled": 1 } }, "postgresql": { "100": { "checksum": "sha256:a734cc086d7d73ef2ffe7543f82dc50b57619e78e60664cb67a9513790f3335a", "enabled": 1 } }, "postgrey": { "100": { "checksum": "sha256:ef4d03336b66c1184f352f9b3fe8004d870bbf003673d4393bde24ea14b056b8", "enabled": 1 } }, "ppp": { "100": { "checksum": "sha256:83e6712ba7343dc1346e94c51b75b05839f78bd24f9324d984b7aa9631bd0377", "enabled": 1 } }, "prelink": { "100": { "checksum": "sha256:df050b0d180947788ab45862c4627ae640c92cf0f6a994a685e4cb5fe46bef76", "enabled": 1 } }, "prelude": { "100": { "checksum": "sha256:88c5fa3da64c127ed6e688f9eba5e50a8f6f98ea3243d29b8b0bc0375ef95420", "enabled": 1 } }, "privoxy": { "100": { "checksum": "sha256:e4a84567c63c892d4cdda3a9a4b15ad5188c093da679a354f00c43b6376a844d", "enabled": 1 } }, "procmail": { "100": { "checksum": "sha256:98170eed35b67b9097514bcb044a18cc3f757af5f91b5d870ea707d6048cde75", "enabled": 1 } }, "prosody": { "100": { "checksum": "sha256:07e999e033252b28ae41697ddc23b42dbcf4bdc143c9eb1c55475aabc9fc9caf", "enabled": 1 } }, "psad": { "100": { "checksum": "sha256:7fc3410de486bf89c4d35989937f424b435c9c4f5398f47f9c840b146197c6ac", "enabled": 1 } }, "ptchown": { "100": { "checksum": "sha256:129978bcb62fdcaed728fb288b321c204575246eb535354e02bfd83089cb0ded", "enabled": 1 } }, "publicfile": { "100": { "checksum": "sha256:9cc75080e25fb5602ab266f1c0d0f16843bdfc561e7af6dec32d669e31bebe98", "enabled": 1 } }, "pulseaudio": { "100": { "checksum": "sha256:a41fc5d1275d548510a2be0180741f952f0f696f443eaabf03c1abf3f80f499e", "enabled": 1 } }, "puppet": { "100": { "checksum": "sha256:81559a7d5e16e228382840986ae0e414d4a78163a9b51b5d9c05a58e07574e8d", "enabled": 1 } }, "pwauth": { "100": { "checksum": "sha256:8590f80ce91ddd4862ce2beab9ec64deb66d99c5583ff5ee3cbff2e503caaa37", "enabled": 1 } }, "qmail": { "100": { "checksum": "sha256:917a35c0ec48acfb5166c937e97269acac39541acebad9c1c410bfdbcb483da1", "enabled": 1 } }, "qpid": { "100": { "checksum": "sha256:cfdb156d23ae6c99b3dbac171ab1626202bf1ae7671fae9f6d6f7241116638dd", "enabled": 1 } }, "quantum": { "100": { "checksum": "sha256:eb4881c554de7882b4e5590a8efb35a758fc1b3d61bc1502632d6f4e571cb331", "enabled": 1 } }, "quota": { "100": { "checksum": "sha256:27d1fb8e99c6d1c75fc8efa8aeaf4303d0dcd8d03cb2992d968a3186d648f4b9", "enabled": 1 } }, "rabbitmq": { "100": { "checksum": "sha256:f0b2b81a6670b7640d49d49c364635f39272330f08bcdaa23c681bf2ac64e10f", "enabled": 1 } }, "radius": { "100": { "checksum": "sha256:791a60cff31fca43e01aa4bfe3a57c5938015db44fd1f64064778dbbcdb6e2e2", "enabled": 1 } }, "radvd": { "100": { "checksum": "sha256:1cea7f5b37f7a0e722ecbccaa09d95db2b175ec125d62e3898a99081c51c6f96", "enabled": 1 } }, "raid": { "100": { "checksum": "sha256:a94b0b917312a73eda50ea641dee49eb00f49df286133fcdb13267fd49ce5d1f", "enabled": 1 } }, "rasdaemon": { "100": { "checksum": "sha256:159d40315f3f5086a31e6f0a6a90d342783d6f0c97c5feeb9c92808c7345adcf", "enabled": 1 } }, "rdisc": { "100": { "checksum": "sha256:a61f7efd50387ebfd35b675b22a8cba86c6216c0bbd901aab5e8674b5c442777", "enabled": 1 } }, "readahead": { "100": { "checksum": "sha256:276a24e14ef12f5fadaeab2883d501cb096e01a9ce1be2178a5c50ebfa6b3fcb", "enabled": 1 } }, "realmd": { "100": { "checksum": "sha256:61561d5f14d9a6597d6e312f5429947baab045d01a729f7cc34406e859fa0015", "enabled": 1 } }, "redis": { "100": { "checksum": "sha256:f40066828d25674c525148f890d9cc84ddbb203f5a4aaad616ef2cd3a497fdc3", "enabled": 1 } }, "remotelogin": { "100": { "checksum": "sha256:742f881c1a4838ecfc1a55a7f3b78a72267644e3a64e3ec45a191599b5bd8532", "enabled": 1 } }, "restraint": { "400": { "checksum": "sha256:5dd2b902123ef00065db6ec8d173f37baa26dbe43566bd5f06594ef1243fd5fd", "enabled": 1 } }, "rhcs": { "100": { "checksum": "sha256:67f232676ac23535867e2494f04989dbd6b9b6d4bbc67df67dc2edb4d31a8be8", "enabled": 1 } }, "rhev": { "100": { "checksum": "sha256:ee2f26beaa5c6a5d25e03ef9ab30302d6b29b283283683421fab52e29e47fe3d", "enabled": 1 } }, "rhgb": { "100": { "checksum": "sha256:39c550e1c8b149dc6f308b0f9ef238315208453ee064bb1558eff9137531840f", "enabled": 1 } }, "rhnsd": { "100": { "checksum": "sha256:16bff56244925c7696fa2da5a4c986132488c352149cc88181bf6b4143fc80ba", "enabled": 1 } }, "rhsmcertd": { "100": { "checksum": "sha256:e999510837aabb3ce118ad61225a846f687588e9a321ffe675b56511191bc323", "enabled": 1 } }, "rhts": { "400": { "checksum": "sha256:9000bd99784bc22ffda4493b4985e8c5a2e65e87aeaa1cb96ba82d367a27a8be", "enabled": 1 } }, "ricci": { "100": { "checksum": "sha256:c72c61297cf864a1abda8226de08039c8ae0212808d3f7fd8725b53b955d59f6", "enabled": 1 } }, "rkhunter": { "100": { "checksum": "sha256:d48bd9c5789f4adc396773664402ddeab432caa99597267ccdf24220948e5b3c", "enabled": 1 } }, "rkt": { "100": { "checksum": "sha256:a9414e82cadd2876471465737bd8322eb833e296869ebcefcd9e722ff717d350", "enabled": 1 } }, "rlogin": { "100": { "checksum": "sha256:a4b2e25abc4099a0a54821518b7c824a2ddb7544fb0b5ddde9a0a9be159ac1b2", "enabled": 1 } }, "rngd": { "100": { "checksum": "sha256:5c867af2674586cc1c41aa3203e3704a0d1400d344a8e257bc61e9eebb86ad03", "enabled": 1 } }, "rolekit": { "100": { "checksum": "sha256:73382d4b8a12fa161dbb5ba36c94e7f0b1f82b1abdf0a4f07ca6c981e08f271b", "enabled": 1 } }, "roundup": { "100": { "checksum": "sha256:1a2503ebaa997c6b6efd5d2343ea731f73b2f0312f2e8d5578dad2c8a84a94fa", "enabled": 1 } }, "rpc": { "100": { "checksum": "sha256:e423284f5ed36e7b6c52f581b444a981d5d1c8af6c8dabe8c6cb6c71d3f49fb2", "enabled": 1 } }, "rpcbind": { "100": { "checksum": "sha256:53831134210db04fe6e6b0f05e20b8b7307ae8c11e774faec9e1b3aa2b02b5dc", "enabled": 1 } }, "rpm": { "100": { "checksum": "sha256:acbd671bd661f9f2f25d4798f1646a51075f297c8b086ea9bd3133a00e356432", "enabled": 1 } }, "rrdcached": { "100": { "checksum": "sha256:c6110313310591ee2a08b504b04ebd1b98f370b6633172f06ee7c0c7db0a963d", "enabled": 1 } }, "rshd": { "100": { "checksum": "sha256:1340ab5daac926cc1354452869ab5aa78d27ceb110543624d2ffaf93773c394b", "enabled": 1 } }, "rssh": { "100": { "checksum": "sha256:9dabc52612d567e728786c007f5017c7032c02be3a9201521a530fc91ca789f8", "enabled": 1 } }, "rsync": { "100": { "checksum": "sha256:33dffe2764dc45bbc59b406a67187c39864412bac07ee089bda30ef09cb70faa", "enabled": 1 } }, "rtas": { "100": { "checksum": "sha256:9d55dfe843e44e8a93c02ea28b14856edfdb1f820bb647992daa6af11e2dbd37", "enabled": 1 } }, "rtkit": { "100": { "checksum": "sha256:ea77b9f26c8fc61b7fc281099b2f16e75c5b196660fff55a95f96e97935a7a1b", "enabled": 1 } }, "rwho": { "100": { "checksum": "sha256:4468bfdd23924a96b4cf8c6fa1a3fa606fdd8ac69b7cb17c16a6e39a95908921", "enabled": 1 } }, "samba": { "100": { "checksum": "sha256:c97b92abaf053976c89a670d82bf06bc5c7d561ccf03e3ff1ac84be6e01cfc5c", "enabled": 1 } }, "sambagui": { "100": { "checksum": "sha256:18d1a69de368fa621e8ef3234b8ddb40261ced880bb732328a310db5a62a7a0a", "enabled": 1 } }, "sandboxX": { "100": { "checksum": "sha256:711df017c1f168e33245144d67289225439bbed701fb1146cb83e9cd63ce1f7a", "enabled": 1 } }, "sanlock": { "100": { "checksum": "sha256:093d9d9793142bb9a8c4375f5f368ca1a4d9beb0cd05329518f91bb9ea51bd06", "enabled": 1 } }, "sasl": { "100": { "checksum": "sha256:536ce94509d38b40200debf17fbddc16ec9004463fdb3fc42890dde9b3eb56f1", "enabled": 1 } }, "sbd": { "100": { "checksum": "sha256:57ecac942ea46af55728362527d70a3e135c3b4711688ddf62596b9a768d9fb0", "enabled": 1 } }, "sblim": { "100": { "checksum": "sha256:2ab2f52e6bac063f176e007b39cd8a4e43012ea075d82af20fbb3403891b6493", "enabled": 1 } }, "screen": { "100": { "checksum": "sha256:7df09c8fa09e105ecf51fee797975603a2df8d15c3a0bf00fdb1d565fe4a6b91", "enabled": 1 } }, "secadm": { "100": { "checksum": "sha256:9cf04d33aa9dec0b559c892fb20df89fbe1883544d4ac2d6bf6fc319f0a16663", "enabled": 1 } }, "sectoolm": { "100": { "checksum": "sha256:e7f9a696e0958d6bdbd6696e67a9b4af62430456d0f278e290db0ea1ee9750b7", "enabled": 1 } }, "selinuxutil": { "100": { "checksum": "sha256:c72355dc70789deb94777acd0b47c2c3ae628e8d90bffb0e0e320941e5ddf3b7", "enabled": 1 } }, "sendmail": { "100": { "checksum": "sha256:98f68238d6ca96277390c160adeed4e3e382d5ded5a88a3909cfebe986b849be", "enabled": 1 } }, "sensord": { "100": { "checksum": "sha256:10ca96a581ef4b0fa1789160fd71fb340d8b1d13906b42fab6e9119033d4f942", "enabled": 1 } }, "setrans": { "100": { "checksum": "sha256:3a172b4972f9271250b4d228541c78b0243fd0544ac983db0f590e09674f700d", "enabled": 1 } }, "setroubleshoot": { "100": { "checksum": "sha256:f78edfcb470cd9929f45b6db29ae4924a286ab30a03f80b7bdf3699bccb98314", "enabled": 1 } }, "seunshare": { "100": { "checksum": "sha256:ba2043d9665e2fd3a9e2d103671bfe647060b93d9c02eed2dca3066a0ecfb81d", "enabled": 1 } }, "sge": { "100": { "checksum": "sha256:cf843c98ff4113ded675f79df694549b4f848aecb1295f0a510101e301fbd348", "enabled": 1 } }, "shorewall": { "100": { "checksum": "sha256:c7c49d28e52aba4d168e684b9160a225fbecab373bfbb6963bbe89c93ecb867b", "enabled": 1 } }, "slocate": { "100": { "checksum": "sha256:be1825562f583305597e5ceb1298ebb60e42c4f270b4a7e3751cf9d9be1b1fac", "enabled": 1 } }, "slpd": { "100": { "checksum": "sha256:14748519962688e62b7bc7e7c03ad91c1f815c5d33c63f2d60e03340f55609a8", "enabled": 1 } }, "smartmon": { "100": { "checksum": "sha256:9f26cf1e9fa128e98c758a6325525f8547950a2440b6582202228c3c5c2c80d9", "enabled": 1 } }, "smokeping": { "100": { "checksum": "sha256:ae8cbd09d519a42bc01063c4c16f58e96cb3673acb557dcd2d09af444d742db1", "enabled": 1 } }, "smoltclient": { "100": { "checksum": "sha256:8aa5f2749eeaef5ae871dc903dad87611e369c92e9b3fc28b4944f75db785a18", "enabled": 1 } }, "smsd": { "100": { "checksum": "sha256:d36a762c836a0e4305773e352fe0f46657784b5d9bf749f02df9c6d15f68d101", "enabled": 1 } }, "snapper": { "100": { "checksum": "sha256:62bba8f6a236bae902815188cedbb5f3090acf0829247e6808787f8c913d9981", "enabled": 1 } }, "snmp": { "100": { "checksum": "sha256:68b5e9d408704e44ebf29ba76ae18afdcf6d8aef12794e8e9026997376ce12f8", "enabled": 1 } }, "snort": { "100": { "checksum": "sha256:eef39dec8d416650af3f9eeeb518b06dd9a9e09144aa579b6bd6422ba0037d70", "enabled": 1 } }, "sosreport": { "100": { "checksum": "sha256:c19dc2ed34c3d274f8e01647dc2d869ca06d4a9a3009f57c1845fac4d33ed358", "enabled": 1 } }, "soundserver": { "100": { "checksum": "sha256:a46a9508591afb1407fd14441c9c26cd495a3789e3c6792a2eba38a6642e4b97", "enabled": 1 } }, "spamassassin": { "100": { "checksum": "sha256:8255ad891466762e31763d6f4791a32aa1eea1147a812020724eab8eb07c1916", "enabled": 1 } }, "speech-dispatcher": { "100": { "checksum": "sha256:ce5ba130d5d0ae5fafe8f823b824856590f990ad7c08aa0a5930f5060c252021", "enabled": 1 } }, "squid": { "100": { "checksum": "sha256:4170a7354e69ed60e0268389f74042e02a2511a4451ca20b97a63213b8881e1e", "enabled": 1 } }, "ssh": { "100": { "checksum": "sha256:a4b4b395d2185abfd68edce0f813103ccbedd5d9748f9a41d83cc63dd1465109", "enabled": 1 } }, "sslh": { "100": { "checksum": "sha256:5b0cc219f31e88f2fa78bc31d9c6fe6c7af29b4832509635672ca9edc79409c6", "enabled": 1 } }, "sssd": { "100": { "checksum": "sha256:29cd0921e9effe356c856c3319488adf66c794cbb7d1610e5fca2b730b852939", "enabled": 1 } }, "staff": { "100": { "checksum": "sha256:943b25df416f2181aab46b3492aad9336f60a1b5b46187494f43ab516aae9c6a", "enabled": 1 } }, "stapserver": { "100": { "checksum": "sha256:788f2eb60a3d902060a6c5a08b086e2a1e96d213f86b206736da7e37eb21e51d", "enabled": 1 } }, "stratisd": { "100": { "checksum": "sha256:72c10f773d67b4209c39b4bea22e95c66d105f6f13e30f89bcd568eab6c889e3", "enabled": 1 } }, "stunnel": { "100": { "checksum": "sha256:736a46f682ff77d7c2cf54d5c264eb7b149793c12701b96e9be12bb3e6722796", "enabled": 1 } }, "su": { "100": { "checksum": "sha256:0cc5796bfe362c3b28c73f62377c029a5f2321078b6d5f90bad42764415cd038", "enabled": 1 } }, "sudo": { "100": { "checksum": "sha256:d96538a9cbb09fc38ba701cda88b2a0d199ab7826826d0043e4f07b05418bf84", "enabled": 1 } }, "svnserve": { "100": { "checksum": "sha256:a80606afbcc994e6fdc418cd83182f901d3e5b4b7b36fe262c71a25f43f10af1", "enabled": 1 } }, "swift": { "100": { "checksum": "sha256:19dfb362a8f445099eac9281522f0b13794cb9a0893a7acf0b54c15d193ef70e", "enabled": 1 } }, "sysadm": { "100": { "checksum": "sha256:f0e7b74086d47000f8335de5bade5a5a19a5e83bf581f885db92548546b7ea94", "enabled": 1 } }, "sysadm_secadm": { "100": { "checksum": "sha256:4614737ea0603530691e6158eb1bd07efa1992cb7ef52c201df3a637d3184cdf", "enabled": 1 } }, "sysnetwork": { "100": { "checksum": "sha256:f6a5a3b49885a9f780c5a9078cc968673809eaf89ecbe170fbb8a1ed4f521ea2", "enabled": 1 } }, "sysstat": { "100": { "checksum": "sha256:1fadc57b1e46515cbc038e96ae47ab74dd365a910f4d81ec9fb3044c4691260b", "enabled": 1 } }, "systemd": { "100": { "checksum": "sha256:a5f0e5c340eaf127a166cc50be8170bfce80ccee0c14f32e4cc264089350da1a", "enabled": 1 } }, "tangd": { "100": { "checksum": "sha256:fd538dbdeba0b4a1c244ba76b8dfef47f61da5a56f24f39fc24c137a9b3b303a", "enabled": 1 } }, "targetd": { "100": { "checksum": "sha256:bc0f37cdcdd0c9014e89e8be6758f7d9c97c67a4e42652459d6107314f059632", "enabled": 1 } }, "tcpd": { "100": { "checksum": "sha256:c78dcf2b9abf8d5ccf9f32b2debf6181a935a7078fe4a527991ab11d2999c4a9", "enabled": 1 } }, "tcsd": { "100": { "checksum": "sha256:e92fb82a2e509e3595d46dd464dac1029ce3a731f117fa67712d119d2878f195", "enabled": 1 } }, "telepathy": { "100": { "checksum": "sha256:fea41add022251126312da78373cb7fd05df1e9fd27547f1b4fc604a774827a1", "enabled": 1 } }, "telnet": { "100": { "checksum": "sha256:06d4733c0fc7358d738d4dbf53968c9d9017a72b01456be46633364f00a4207d", "enabled": 1 } }, "tftp": { "100": { "checksum": "sha256:8ba2497a28f4c2a31177811fc0a091a3bb9814f9e02cfc8d84c004718f661e5f", "enabled": 1 } }, "tgtd": { "100": { "checksum": "sha256:6ec8d4d38e58efa04572ac713c9148e7182e7d49713ed89955fabdd512b8eea4", "enabled": 1 } }, "thin": { "100": { "checksum": "sha256:c464da2b8e789d74ea2b2914217a194a3c07081b9f383acd2fee9ab77bc525b5", "enabled": 1 } }, "thumb": { "100": { "checksum": "sha256:2ce98252c7ff59539bb38204ee65898ba6cc701c3dc87417c11e2e7124f448a3", "enabled": 1 } }, "timedatex": { "100": { "checksum": "sha256:df36b9f44f28df1b14b4d6bff01de42c414b947a8e6f1e6efdaa7023250709aa", "enabled": 1 } }, "tlp": { "100": { "checksum": "sha256:7b1d2643c7470dc5b80dee41d18482bb6fd6de55371aba888708a28fe0bb0172", "enabled": 1 } }, "tmpreaper": { "100": { "checksum": "sha256:2a54cea48dfbeb1c9dad0e167f70aa17970c4f2c76c560330c467051fe3b574b", "enabled": 1 } }, "tomcat": { "100": { "checksum": "sha256:de3ed9b8d62d29e80e29a051419a648c154c12f6bb188814ca79120ff1dc263b", "enabled": 1 } }, "tor": { "100": { "checksum": "sha256:16c95ae098af2b964a7a94b5bb6cd1c84d5c7f1254d6411209e4d5cfe87677bc", "enabled": 1 } }, "tuned": { "100": { "checksum": "sha256:b90ac3a04d3f04c7284f75802ffd69d6c1c3d5c0e6d08c3d0f2d9270b99dd487", "enabled": 1 } }, "tvtime": { "100": { "checksum": "sha256:8f8a1f1b2fea7a9fb8c3853e02c830f5204f691e9223cbdfbc320ec6914725dc", "enabled": 1 } }, "udev": { "100": { "checksum": "sha256:24410f1221660b8443af29cb55e42180e268fce722ceed2c99aa202e7dd3cc21", "enabled": 1 } }, "ulogd": { "100": { "checksum": "sha256:dba41aee81015b99378cff2273a56effd1202c0c937c05c63a913243b0641cdc", "enabled": 1 } }, "uml": { "100": { "checksum": "sha256:29e7469ef2704943f23c5040531fee8657cfed8440ef44b6268d21e6a9afe309", "enabled": 1 } }, "unconfined": { "100": { "checksum": "sha256:54482715f4fb5bca5c68ff67b9d145d12ad3df1438db97bcadcc32a2fb0f6191", "enabled": 1 } }, "unconfineduser": { "100": { "checksum": "sha256:13e69d4cbec7926c0ac6fb796749b4286462add3051f1e94554f23e637b81277", "enabled": 1 } }, "unlabelednet": { "100": { "checksum": "sha256:cb370bbe8bc0d7bca49a4fd1fad652017f4f8587c7c9d3277155fba32987550e", "enabled": 1 } }, "unprivuser": { "100": { "checksum": "sha256:bbb2700ca73d867432851e12276a932b1553b034b1cc635f5c6681d6b62dcd3a", "enabled": 1 } }, "updfstab": { "100": { "checksum": "sha256:57a37a5c07af0f7ad80f4f01173e6cd6b604659e2d1b5605c2719dff8bbaf2fb", "enabled": 1 } }, "usbmodules": { "100": { "checksum": "sha256:683c0598bdd00543cb696f7ed8cce6b55c658e566141538fc01b3f852af5f697", "enabled": 1 } }, "usbmuxd": { "100": { "checksum": "sha256:852eb8259277c64b80c91bd1dcbbe85f629e7218ab2f51d39324dcd78a4a278e", "enabled": 1 } }, "userdomain": { "100": { "checksum": "sha256:066e429e71ebcf11014f4ff6d7647c9d6d88ff191c64eeb9793021d16f4cde97", "enabled": 1 } }, "userhelper": { "100": { "checksum": "sha256:74b817fb60fd3ed5f074ef8ff399342ddc49fb2c250b08015dc975edd48f4dfd", "enabled": 1 } }, "usermanage": { "100": { "checksum": "sha256:fa589ab303d10fadd28a3e8d27cc9bc2e55a9b28f28c3f4c7e05968cb00a7cdd", "enabled": 1 } }, "usernetctl": { "100": { "checksum": "sha256:c5e4e24e89775d797a8988e2d5f72ec7a7dd8387289ede61af7a3ce2173cf167", "enabled": 1 } }, "uucp": { "100": { "checksum": "sha256:6a3659d3706bc3af4b60e5de7efa9532dcc0c0a6f0c7735ed1300ec2120f9d01", "enabled": 1 } }, "uuidd": { "100": { "checksum": "sha256:f85ad7d20dd77416ab246ee0837b016a648176ec9956f40ff2ac6b3c2924edc5", "enabled": 1 } }, "varnishd": { "100": { "checksum": "sha256:18dab548c81b02f1b0f3efd6e25dd529bb0565e974156d55e42e274d3ccdf704", "enabled": 1 } }, "vdagent": { "100": { "checksum": "sha256:ee8af0b085b727e060ac3c82f1e38c89545505c9b26e849eda22e571064c46e7", "enabled": 1 } }, "vhostmd": { "100": { "checksum": "sha256:0f7c8c575b060e863fe17e7ee8c67cc5cc3ea31da734a5428dc62c15f3b15bf4", "enabled": 1 } }, "virt": { "100": { "checksum": "sha256:df433826471b1c65a3686b57b4b07872a695d900731feb88cd6dfb76ddcbc5d9", "enabled": 1 } }, "vlock": { "100": { "checksum": "sha256:4a9362fc5876897cae7062564d54d7f8ae12413c65c4c7fc6709f6407cc27160", "enabled": 1 } }, "vmtools": { "100": { "checksum": "sha256:fb9dda20b16232ac253b148063c9b267356b6f2831650f4c00fa01a6d0a8024a", "enabled": 1 } }, "vmware": { "100": { "checksum": "sha256:d0ce73ebc7d2f494b669257a9a68106245371b455566654c7062694bcbad35df", "enabled": 1 } }, "vnstatd": { "100": { "checksum": "sha256:1df1aaf42d9c96922226b4828c38b6d315f7a9d3cda60fe54d99be5d618e140d", "enabled": 1 } }, "vpn": { "100": { "checksum": "sha256:9ea8931bf1c97618b2e99afb8c60a13d51a84db878bffa4082f6973e23b13eb1", "enabled": 1 } }, "w3c": { "100": { "checksum": "sha256:43663b66ef8275c639a8076d92fc7da6821e0523c120e2c854839f9dc9d1db66", "enabled": 1 } }, "watchdog": { "100": { "checksum": "sha256:65b78e9b48a6cfe62f6c67c443d3bc667a58d206c09df00870949b6ae7ff8c30", "enabled": 1 } }, "wdmd": { "100": { "checksum": "sha256:65560477bd0ae271799a76f75c5a3d46ef0c29f6922aa38e727c95b7e1095a99", "enabled": 1 } }, "webadm": { "100": { "checksum": "sha256:4d4d609b3be3c2dc659694cfd2076e0c0c0d6446d16a3fb054a9e5f951b29410", "enabled": 1 } }, "webalizer": { "100": { "checksum": "sha256:867139a0cc2cb236ee54575ce6a8568cdbefd6785e8b7f64e09a3041da46b095", "enabled": 1 } }, "wine": { "100": { "checksum": "sha256:419d697ac987518dee6095070e2894c4112b50256e59d2b4f6acac585fb087f8", "enabled": 1 } }, "wireshark": { "100": { "checksum": "sha256:ce85b40df4d548aa55eb54bc546943366b654a3af7f602817f1fc499c0c8039e", "enabled": 1 } }, "xen": { "100": { "checksum": "sha256:f5d46e297e4e8e0a3f76c1fc8ae96db3ebf5b99ab538a54c171e489ac94ae1f0", "enabled": 1 } }, "xguest": { "100": { "checksum": "sha256:aeb8895098531d1607e389703c783a3c1e8a8c1ad962397debe65214ff86e29e", "enabled": 1 } }, "xserver": { "100": { "checksum": "sha256:85f1f1ed778597ec568ab7b9069779c088219d1da283a09382439c6803e7863e", "enabled": 1 } }, "zabbix": { "100": { "checksum": "sha256:476521323be1b84d7ba2539aa208d857678746a76e7e079577d3f46d251637ac", "enabled": 1 } }, "zarafa": { "100": { "checksum": "sha256:7536116b2852a578cbc5d32f7752b6dd3bb1202817db05306e1a16553c1d43b6", "enabled": 1 } }, "zebra": { "100": { "checksum": "sha256:3d18bbdc44c396c7715cce348f9248712132a1c53341d3b5760016d245f86e75", "enabled": 1 } }, "zoneminder": { "100": { "checksum": "sha256:44cf07d7e6b15709d131b8b406032d0e6395a84e1e20bc67f9320a1e97c4dfcc", "enabled": 1 } }, "zosremote": { "100": { "checksum": "sha256:1177170edbd47b6fe17fa022a247d9b75b1fb3a5a49721bcff3c7da4f480c702", "enabled": 1 } } }, "selinux_priorities": true }, "changed": false } TASK [fedora.linux_system_roles.selinux : Load SELinux modules] **************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:115 Wednesday 02 April 2025 12:14:45 -0400 (0:00:03.337) 0:02:09.014 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_modules is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:128 Wednesday 02 April 2025 12:14:45 -0400 (0:00:00.047) 0:02:09.061 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree in check mode] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:136 Wednesday 02 April 2025 12:14:45 -0400 (0:00:00.039) 0:02:09.101 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Wednesday 02 April 2025 12:14:46 -0400 (0:00:00.064) 0:02:09.165 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Wednesday 02 April 2025 12:14:46 -0400 (0:00:00.041) 0:02:09.207 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Wednesday 02 April 2025 12:14:46 -0400 (0:00:00.038) 0:02:09.245 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Wednesday 02 April 2025 12:14:46 -0400 (0:00:00.037) 0:02:09.282 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Wednesday 02 April 2025 12:14:46 -0400 (0:00:00.037) 0:02:09.319 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:14 Wednesday 02 April 2025 12:14:46 -0400 (0:00:00.109) 0:02:09.429 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_spec": { "state": "created" }, "__podman_kube_str": "apiVersion: v1\nkind: Pod\nmetadata:\n labels:\n app: test\n io.containers.autoupdate: registry\n name: bogus\nspec:\n containers:\n - name: bogus\n image: >-\n quay.io/linux-system-roles/this_is_a_bogus_image:latest\n" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:21 Wednesday 02 April 2025 12:14:46 -0400 (0:00:00.109) 0:02:09.539 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": true, "__podman_kube": { "apiVersion": "v1", "kind": "Pod", "metadata": { "labels": { "app": "test", "io.containers.autoupdate": "registry" }, "name": "bogus" }, "spec": { "containers": [ { "image": "quay.io/linux-system-roles/this_is_a_bogus_image:latest", "name": "bogus" } ] } }, "__podman_kube_file": "", "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:33 Wednesday 02 April 2025 12:14:46 -0400 (0:00:00.057) 0:02:09.596 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_name": "bogus", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:38 Wednesday 02 April 2025 12:14:46 -0400 (0:00:00.044) 0:02:09.641 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:14:46 -0400 (0:00:00.086) 0:02:09.727 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:14:46 -0400 (0:00:00.046) 0:02:09.774 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:14:46 -0400 (0:00:00.045) 0:02:09.819 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:14:46 -0400 (0:00:00.057) 0:02:09.877 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1743610430.4085276, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610401.5914862, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986657, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "2059311478", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:14:47 -0400 (0:00:00.369) 0:02:10.247 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:14:47 -0400 (0:00:00.040) 0:02:10.288 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:14:47 -0400 (0:00:00.041) 0:02:10.329 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:14:47 -0400 (0:00:00.096) 0:02:10.426 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:14:47 -0400 (0:00:00.042) 0:02:10.468 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:14:47 -0400 (0:00:00.042) 0:02:10.511 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:14:47 -0400 (0:00:00.042) 0:02:10.554 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:14:47 -0400 (0:00:00.042) 0:02:10.596 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if no kube spec is given] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:43 Wednesday 02 April 2025 12:14:47 -0400 (0:00:00.042) 0:02:10.639 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_kube", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:52 Wednesday 02 April 2025 12:14:47 -0400 (0:00:00.038) 0:02:10.677 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": false, "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:60 Wednesday 02 April 2025 12:14:47 -0400 (0:00:00.074) 0:02:10.752 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_path": "/etc/containers/ansible-kubernetes.d" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:64 Wednesday 02 April 2025 12:14:47 -0400 (0:00:00.070) 0:02:10.822 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_file": "/etc/containers/ansible-kubernetes.d/bogus.yml" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:68 Wednesday 02 April 2025 12:14:47 -0400 (0:00:00.049) 0:02:10.872 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Get service name using systemd-escape] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:75 Wednesday 02 April 2025 12:14:47 -0400 (0:00:00.053) 0:02:10.926 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cleanup containers and services] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:83 Wednesday 02 April 2025 12:14:47 -0400 (0:00:00.107) 0:02:11.033 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update containers and services] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:87 Wednesday 02 April 2025 12:14:47 -0400 (0:00:00.038) 0:02:11.072 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:2 Wednesday 02 April 2025 12:14:48 -0400 (0:00:00.088) 0:02:11.160 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:14:48 -0400 (0:00:00.072) 0:02:11.232 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:14:48 -0400 (0:00:00.040) 0:02:11.273 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:14:48 -0400 (0:00:00.039) 0:02:11.312 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the host mount volumes] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:7 Wednesday 02 April 2025 12:14:48 -0400 (0:00:00.039) 0:02:11.352 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'volumes' in __podman_kube['spec']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:18 Wednesday 02 April 2025 12:14:48 -0400 (0:00:00.041) 0:02:11.393 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_volumes | d([]) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:29 Wednesday 02 April 2025 12:14:48 -0400 (0:00:00.043) 0:02:11.437 ******* ok: [managed-node2] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Check the kubernetes yaml file] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:53 Wednesday 02 April 2025 12:14:49 -0400 (0:00:00.997) 0:02:12.435 ******* ok: [managed-node2] => { "changed": false, "failed_when_result": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Ensure the kubernetes directory is present] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:61 Wednesday 02 April 2025 12:14:49 -0400 (0:00:00.363) 0:02:12.799 ******* ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/ansible-kubernetes.d", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 24, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure kubernetes yaml files are present] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:70 Wednesday 02 April 2025 12:14:50 -0400 (0:00:00.366) 0:02:13.165 ******* changed: [managed-node2] => { "changed": true, "checksum": "f8266a972ed3be7e204d2a67883fe3a22b8dbf18", "dest": "/etc/containers/ansible-kubernetes.d/bogus.yml", "gid": 0, "group": "root", "md5sum": "22799c1e99f8fb14db15efc7dbc7ba4c", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 237, "src": "/root/.ansible/tmp/ansible-tmp-1743610490.0738642-10334-156459945632354/source", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Update containers/pods] *************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:80 Wednesday 02 April 2025 12:14:50 -0400 (0:00:00.726) 0:02:13.891 ******* fatal: [managed-node2]: FAILED! => { "changed": false } MSG: Output: Error=Trying to pull quay.io/linux-system-roles/this_is_a_bogus_image:latest... Error: initializing source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: reading manifest latest in quay.io/linux-system-roles/this_is_a_bogus_image: unauthorized: access to the requested resource is not authorized TASK [Verify image not pulled] ************************************************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:262 Wednesday 02 April 2025 12:14:53 -0400 (0:00:02.615) 0:02:16.507 ******* ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Cleanup] ***************************************************************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:269 Wednesday 02 April 2025 12:14:53 -0400 (0:00:00.047) 0:02:16.555 ******* TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Wednesday 02 April 2025 12:14:53 -0400 (0:00:00.254) 0:02:16.810 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Wednesday 02 April 2025 12:14:53 -0400 (0:00:00.084) 0:02:16.894 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Wednesday 02 April 2025 12:14:53 -0400 (0:00:00.127) 0:02:17.021 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Wednesday 02 April 2025 12:14:53 -0400 (0:00:00.048) 0:02:17.070 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Wednesday 02 April 2025 12:14:53 -0400 (0:00:00.045) 0:02:17.115 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Wednesday 02 April 2025 12:14:54 -0400 (0:00:00.045) 0:02:17.161 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Wednesday 02 April 2025 12:14:54 -0400 (0:00:00.045) 0:02:17.206 ******* [WARNING]: TASK: fedora.linux_system_roles.podman : Set platform/version specific variables: The loop variable 'item' is already in use. You should set the `loop_var` value in the `loop_control` option for the task to something else to avoid variable collisions and unexpected behavior. ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node2] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Wednesday 02 April 2025 12:14:54 -0400 (0:00:00.096) 0:02:17.303 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Wednesday 02 April 2025 12:14:55 -0400 (0:00:01.444) 0:02:18.748 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Wednesday 02 April 2025 12:14:55 -0400 (0:00:00.042) 0:02:18.791 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Wednesday 02 April 2025 12:14:55 -0400 (0:00:00.075) 0:02:18.866 ******* skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Wednesday 02 April 2025 12:14:55 -0400 (0:00:00.162) 0:02:19.029 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Wednesday 02 April 2025 12:14:55 -0400 (0:00:00.068) 0:02:19.098 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Wednesday 02 April 2025 12:14:56 -0400 (0:00:00.068) 0:02:19.167 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.026092", "end": "2025-04-02 12:14:56.364131", "rc": 0, "start": "2025-04-02 12:14:56.338039" } STDOUT: podman version 4.9.4-dev TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Wednesday 02 April 2025 12:14:56 -0400 (0:00:00.435) 0:02:19.602 ******* ok: [managed-node2] => { "ansible_facts": { "podman_version": "4.9.4-dev" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Wednesday 02 April 2025 12:14:56 -0400 (0:00:00.073) 0:02:19.675 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Wednesday 02 April 2025 12:14:56 -0400 (0:00:00.074) 0:02:19.749 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "(podman_quadlet_specs | length > 0) or (podman_secrets | length > 0)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Wednesday 02 April 2025 12:14:56 -0400 (0:00:00.069) 0:02:19.819 ******* META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Wednesday 02 April 2025 12:14:56 -0400 (0:00:00.093) 0:02:19.913 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Wednesday 02 April 2025 12:14:56 -0400 (0:00:00.098) 0:02:20.011 ******* META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Wednesday 02 April 2025 12:14:57 -0400 (0:00:00.186) 0:02:20.198 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:14:57 -0400 (0:00:00.138) 0:02:20.337 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:14:57 -0400 (0:00:00.081) 0:02:20.419 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:14:57 -0400 (0:00:00.080) 0:02:20.500 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:14:57 -0400 (0:00:00.064) 0:02:20.564 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1743610430.4085276, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610401.5914862, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986657, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "2059311478", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:14:57 -0400 (0:00:00.367) 0:02:20.931 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:14:57 -0400 (0:00:00.043) 0:02:20.975 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:14:57 -0400 (0:00:00.043) 0:02:21.018 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:14:57 -0400 (0:00:00.051) 0:02:21.069 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:14:57 -0400 (0:00:00.042) 0:02:21.112 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:14:58 -0400 (0:00:00.042) 0:02:21.154 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:14:58 -0400 (0:00:00.104) 0:02:21.258 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:14:58 -0400 (0:00:00.042) 0:02:21.301 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Wednesday 02 April 2025 12:14:58 -0400 (0:00:00.043) 0:02:21.344 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Wednesday 02 April 2025 12:14:58 -0400 (0:00:00.053) 0:02:21.397 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Wednesday 02 April 2025 12:14:58 -0400 (0:00:00.078) 0:02:21.476 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Wednesday 02 April 2025 12:14:58 -0400 (0:00:00.041) 0:02:21.518 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Wednesday 02 April 2025 12:14:58 -0400 (0:00:00.041) 0:02:21.559 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Wednesday 02 April 2025 12:14:58 -0400 (0:00:00.078) 0:02:21.638 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Wednesday 02 April 2025 12:14:58 -0400 (0:00:00.097) 0:02:21.735 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Wednesday 02 April 2025 12:14:58 -0400 (0:00:00.041) 0:02:21.776 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Wednesday 02 April 2025 12:14:58 -0400 (0:00:00.079) 0:02:21.856 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Wednesday 02 April 2025 12:14:58 -0400 (0:00:00.041) 0:02:21.897 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Wednesday 02 April 2025 12:14:58 -0400 (0:00:00.041) 0:02:21.938 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Wednesday 02 April 2025 12:14:58 -0400 (0:00:00.083) 0:02:22.021 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Wednesday 02 April 2025 12:14:58 -0400 (0:00:00.050) 0:02:22.072 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Wednesday 02 April 2025 12:14:58 -0400 (0:00:00.040) 0:02:22.112 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Wednesday 02 April 2025 12:14:59 -0400 (0:00:00.041) 0:02:22.154 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Wednesday 02 April 2025 12:14:59 -0400 (0:00:00.100) 0:02:22.254 ******* TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Wednesday 02 April 2025 12:14:59 -0400 (0:00:00.170) 0:02:22.424 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Wednesday 02 April 2025 12:14:59 -0400 (0:00:00.126) 0:02:22.550 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Wednesday 02 April 2025 12:14:59 -0400 (0:00:00.080) 0:02:22.631 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Wednesday 02 April 2025 12:14:59 -0400 (0:00:00.072) 0:02:22.703 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Wednesday 02 April 2025 12:14:59 -0400 (0:00:00.072) 0:02:22.776 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Wednesday 02 April 2025 12:14:59 -0400 (0:00:00.048) 0:02:22.824 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 Wednesday 02 April 2025 12:14:59 -0400 (0:00:00.053) 0:02:22.878 ******* ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43 Wednesday 02 April 2025 12:15:02 -0400 (0:00:02.858) 0:02:25.736 ******* skipping: [managed-node2] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48 Wednesday 02 April 2025 12:15:02 -0400 (0:00:00.155) 0:02:25.892 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53 Wednesday 02 April 2025 12:15:02 -0400 (0:00:00.070) 0:02:25.962 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Collect service facts] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Wednesday 02 April 2025 12:15:02 -0400 (0:00:00.068) 0:02:26.031 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9 Wednesday 02 April 2025 12:15:02 -0400 (0:00:00.051) 0:02:26.082 ******* [WARNING]: TASK: fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services: The loop variable 'item' is already in use. You should set the `loop_var` value in the `loop_control` option for the task to something else to avoid variable collisions and unexpected behavior. skipping: [managed-node2] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22 Wednesday 02 April 2025 12:15:03 -0400 (0:00:00.059) 0:02:26.142 ******* ok: [managed-node2] => { "changed": false, "name": "firewalld", "status": { "ActiveEnterTimestamp": "Wed 2025-04-02 12:13:57 EDT", "ActiveEnterTimestampMonotonic": "326139129", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "basic.target sysinit.target polkit.service dbus.socket dbus.service system.slice", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Wed 2025-04-02 12:13:56 EDT", "AssertTimestampMonotonic": "325830531", "Before": "shutdown.target network-pre.target multi-user.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Wed 2025-04-02 12:13:56 EDT", "ConditionTimestampMonotonic": "325830530", "ConfigurationDirectoryMode": "0755", "Conflicts": "ebtables.service iptables.service ip6tables.service shutdown.target ipset.service nftables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "man:firewalld(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "12711", "ExecMainStartTimestamp": "Wed 2025-04-02 12:13:56 EDT", "ExecMainStartTimestampMonotonic": "325832688", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[Wed 2025-04-02 12:13:56 EDT] ; stop_time=[n/a] ; pid=12711 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Wed 2025-04-02 12:13:56 EDT", "InactiveExitTimestampMonotonic": "325832722", "InvocationID": "41b8036e16214fc68c244a42727639e9", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "12711", "MemoryAccounting": "yes", "MemoryCurrent": "40091648", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Wed 2025-04-02 12:13:57 EDT", "StateChangeTimestampMonotonic": "326139129", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogTimestamp": "Wed 2025-04-02 12:13:57 EDT", "WatchdogTimestampMonotonic": "326139126", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 Wednesday 02 April 2025 12:15:03 -0400 (0:00:00.540) 0:02:26.682 ******* ok: [managed-node2] => { "changed": false, "enabled": true, "name": "firewalld", "state": "started", "status": { "ActiveEnterTimestamp": "Wed 2025-04-02 12:13:57 EDT", "ActiveEnterTimestampMonotonic": "326139129", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "basic.target sysinit.target polkit.service dbus.socket dbus.service system.slice", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Wed 2025-04-02 12:13:56 EDT", "AssertTimestampMonotonic": "325830531", "Before": "shutdown.target network-pre.target multi-user.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Wed 2025-04-02 12:13:56 EDT", "ConditionTimestampMonotonic": "325830530", "ConfigurationDirectoryMode": "0755", "Conflicts": "ebtables.service iptables.service ip6tables.service shutdown.target ipset.service nftables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "man:firewalld(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "12711", "ExecMainStartTimestamp": "Wed 2025-04-02 12:13:56 EDT", "ExecMainStartTimestampMonotonic": "325832688", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[Wed 2025-04-02 12:13:56 EDT] ; stop_time=[n/a] ; pid=12711 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Wed 2025-04-02 12:13:56 EDT", "InactiveExitTimestampMonotonic": "325832722", "InvocationID": "41b8036e16214fc68c244a42727639e9", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "12711", "MemoryAccounting": "yes", "MemoryCurrent": "40091648", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Wed 2025-04-02 12:13:57 EDT", "StateChangeTimestampMonotonic": "326139129", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogTimestamp": "Wed 2025-04-02 12:13:57 EDT", "WatchdogTimestampMonotonic": "326139126", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34 Wednesday 02 April 2025 12:15:04 -0400 (0:00:00.550) 0:02:27.233 ******* ok: [managed-node2] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/libexec/platform-python", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43 Wednesday 02 April 2025 12:15:04 -0400 (0:00:00.060) 0:02:27.293 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55 Wednesday 02 April 2025 12:15:04 -0400 (0:00:00.050) 0:02:27.344 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Wednesday 02 April 2025 12:15:04 -0400 (0:00:00.043) 0:02:27.387 ******* [WARNING]: TASK: fedora.linux_system_roles.firewall : Configure firewall: The loop variable 'item' is already in use. You should set the `loop_var` value in the `loop_control` option for the task to something else to avoid variable collisions and unexpected behavior. ok: [managed-node2] => (item={'port': '15001-15003/tcp', 'state': 'enabled'}) => { "__firewall_changed": false, "ansible_loop_var": "item", "changed": false, "item": { "port": "15001-15003/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120 Wednesday 02 April 2025 12:15:04 -0400 (0:00:00.592) 0:02:27.980 ******* [WARNING]: TASK: fedora.linux_system_roles.firewall : Gather firewall config information: The loop variable 'item' is already in use. You should set the `loop_var` value in the `loop_control` option for the task to something else to avoid variable collisions and unexpected behavior. skipping: [managed-node2] => (item={'port': '15001-15003/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "'detailed' in fw[0]", "item": { "port": "15001-15003/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Wednesday 02 April 2025 12:15:05 -0400 (0:00:00.188) 0:02:28.169 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'detailed' in fw[0]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139 Wednesday 02 April 2025 12:15:05 -0400 (0:00:00.071) 0:02:28.240 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144 Wednesday 02 April 2025 12:15:05 -0400 (0:00:00.047) 0:02:28.287 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153 Wednesday 02 April 2025 12:15:05 -0400 (0:00:00.051) 0:02:28.339 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163 Wednesday 02 April 2025 12:15:05 -0400 (0:00:00.046) 0:02:28.386 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169 Wednesday 02 April 2025 12:15:05 -0400 (0:00:00.043) 0:02:28.430 ******* skipping: [managed-node2] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Wednesday 02 April 2025 12:15:05 -0400 (0:00:00.065) 0:02:28.495 ******* redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.seboolean to ansible.posix.seboolean TASK [fedora.linux_system_roles.selinux : Set ansible_facts required by role and install packages] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:2 Wednesday 02 April 2025 12:15:05 -0400 (0:00:00.144) 0:02:28.640 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml for managed-node2 TASK [fedora.linux_system_roles.selinux : Ensure ansible_facts used by role] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:2 Wednesday 02 April 2025 12:15:05 -0400 (0:00:00.137) 0:02:28.778 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Ensure SELinux packages] ************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:7 Wednesday 02 April 2025 12:15:05 -0400 (0:00:00.051) 0:02:28.829 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml for managed-node2 TASK [fedora.linux_system_roles.selinux : Check if system is ostree] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:5 Wednesday 02 April 2025 12:15:05 -0400 (0:00:00.076) 0:02:28.905 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set flag to indicate system is ostree] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:10 Wednesday 02 April 2025 12:15:05 -0400 (0:00:00.046) 0:02:28.952 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:17 Wednesday 02 April 2025 12:15:05 -0400 (0:00:00.045) 0:02:28.997 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set flag if transactional-update exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:22 Wednesday 02 April 2025 12:15:05 -0400 (0:00:00.046) 0:02:29.044 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux python2 tools] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:26 Wednesday 02 April 2025 12:15:05 -0400 (0:00:00.045) 0:02:29.089 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_python_version is version('3', '<')", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35 Wednesday 02 April 2025 12:15:05 -0400 (0:00:00.044) 0:02:29.134 ******* ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:46 Wednesday 02 April 2025 12:15:08 -0400 (0:00:02.871) 0:02:32.006 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_os_family == \"Suse\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux tool semanage] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 Wednesday 02 April 2025 12:15:08 -0400 (0:00:00.071) 0:02:32.077 ******* ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.selinux : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:72 Wednesday 02 April 2025 12:15:11 -0400 (0:00:02.963) 0:02:35.040 ******* skipping: [managed-node2] => { "false_condition": "__selinux_is_transactional | d(false)" } TASK [fedora.linux_system_roles.selinux : Reboot transactional update systems] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:77 Wednesday 02 April 2025 12:15:11 -0400 (0:00:00.044) 0:02:35.085 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Fail if reboot is needed and not set] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:82 Wednesday 02 April 2025 12:15:11 -0400 (0:00:00.042) 0:02:35.127 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Refresh facts] *********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:89 Wednesday 02 April 2025 12:15:12 -0400 (0:00:00.041) 0:02:35.169 ******* ok: [managed-node2] TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if enabled] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:5 Wednesday 02 April 2025 12:15:12 -0400 (0:00:00.745) 0:02:35.915 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_selinux.status == \"enabled\" and (selinux_state or selinux_policy)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if disabled] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:13 Wednesday 02 April 2025 12:15:12 -0400 (0:00:00.049) 0:02:35.964 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_selinux.status == \"disabled\" and selinux_state", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set selinux_reboot_required] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:21 Wednesday 02 April 2025 12:15:12 -0400 (0:00:00.043) 0:02:36.008 ******* ok: [managed-node2] => { "ansible_facts": { "selinux_reboot_required": false }, "changed": false } TASK [fedora.linux_system_roles.selinux : Fail if reboot is required] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:25 Wednesday 02 April 2025 12:15:12 -0400 (0:00:00.050) 0:02:36.058 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_reboot_required", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Warn if SELinux is disabled] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:30 Wednesday 02 April 2025 12:15:12 -0400 (0:00:00.039) 0:02:36.098 ******* skipping: [managed-node2] => { "false_condition": "ansible_selinux.status == \"disabled\"" } TASK [fedora.linux_system_roles.selinux : Drop all local modifications] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:35 Wednesday 02 April 2025 12:15:12 -0400 (0:00:00.040) 0:02:36.139 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_all_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux boolean local modifications] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:43 Wednesday 02 April 2025 12:15:13 -0400 (0:00:00.039) 0:02:36.178 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_booleans_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux file context local modifications] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:48 Wednesday 02 April 2025 12:15:13 -0400 (0:00:00.126) 0:02:36.305 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_fcontexts_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux port local modifications] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:53 Wednesday 02 April 2025 12:15:13 -0400 (0:00:00.067) 0:02:36.373 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_ports_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux login local modifications] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:58 Wednesday 02 April 2025 12:15:13 -0400 (0:00:00.077) 0:02:36.450 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_logins_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set SELinux booleans] **************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:63 Wednesday 02 April 2025 12:15:13 -0400 (0:00:00.067) 0:02:36.518 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set SELinux file contexts] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:74 Wednesday 02 April 2025 12:15:13 -0400 (0:00:00.067) 0:02:36.586 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set an SELinux label on a port] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:87 Wednesday 02 April 2025 12:15:13 -0400 (0:00:00.067) 0:02:36.653 ******* ok: [managed-node2] => (item={'ports': '15001-15003', 'setype': 'http_port_t'}) => { "__selinux_item": { "ports": "15001-15003", "setype": "http_port_t" }, "ansible_loop_var": "__selinux_item", "changed": false, "ports": [ "15001-15003" ], "proto": "tcp", "setype": "http_port_t", "state": "present" } TASK [fedora.linux_system_roles.selinux : Set linux user to SELinux user mapping] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:99 Wednesday 02 April 2025 12:15:14 -0400 (0:00:01.026) 0:02:37.680 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Get SELinux modules facts] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:112 Wednesday 02 April 2025 12:15:14 -0400 (0:00:00.067) 0:02:37.747 ******* ok: [managed-node2] => { "ansible_facts": { "selinux_checksums": true, "selinux_installed_modules": { "abrt": { "100": { "checksum": "sha256:13dad22da122be9f7d5df4dbedae6a515323542fdc1a7e466d7a1a3d36d29731", "enabled": 1 } }, "accountsd": { "100": { "checksum": "sha256:8bd25829d921be0b5adf92ddaca7ab94cedca1d57796749cfa63571b6550e3da", "enabled": 1 } }, "acct": { "100": { "checksum": "sha256:2699d826efd46176017695c768804c505a54b277b05f1feb9c43a613bab4e6aa", "enabled": 1 } }, "afs": { "100": { "checksum": "sha256:99920dd4e0855870f7e6f9666928d13fe18ddccca9d38b92ea70a6ce3c8c7539", "enabled": 1 } }, "aiccu": { "100": { "checksum": "sha256:a7aedc8354b4335412871adfd2ab5b0c6da1ea63c8dd797718e4214a5d511bb5", "enabled": 1 } }, "aide": { "100": { "checksum": "sha256:8adb5c3a5ed74695e975eecbf290640b179eb6345a7740745ecfe3164efe209f", "enabled": 1 } }, "ajaxterm": { "100": { "checksum": "sha256:d3a03c2837d5dde7145e27902ff8578e00734ab34e8ea1a45aee58b83e9ad6d1", "enabled": 1 } }, "alsa": { "100": { "checksum": "sha256:202f94345fba8f4bc942dc9b75bbb6eea3b4cb02411cf6ed79858d72aa883c89", "enabled": 1 } }, "amanda": { "100": { "checksum": "sha256:f9a99d97370017307349a154ce479969395bbbfe434e4829573269f770efdd0d", "enabled": 1 } }, "amtu": { "100": { "checksum": "sha256:bc9934a2ae61fa117614f201479966d788484f3a7382de4ebad99790a465e2b7", "enabled": 1 } }, "anaconda": { "100": { "checksum": "sha256:b8aabc624243533d483c3dd5574a490a43e7ec0f2f7940798c12b4089bbd0642", "enabled": 1 } }, "antivirus": { "100": { "checksum": "sha256:1de6460ccaea5a5749eba17489b9765035c8202eb9492485ff39157564001a2c", "enabled": 1 } }, "apache": { "100": { "checksum": "sha256:1a0c38364558bebdae3efaa1fcf8be232184dcddcaab345bba1c40bf239dd0ed", "enabled": 1 } }, "apcupsd": { "100": { "checksum": "sha256:175308edb201092c22791f419d32da3f661e7ccfb9c5d5855ad753405c10023b", "enabled": 1 } }, "apm": { "100": { "checksum": "sha256:a1410f65d6bf017caedaffaa59016877686099fb7df3c4d801136de79a61795e", "enabled": 1 } }, "application": { "100": { "checksum": "sha256:a8e9d90aa1188068ca66be55c4d8abf9982666171bbdd8d4da1f2a254c34a080", "enabled": 1 } }, "arpwatch": { "100": { "checksum": "sha256:2cb8afd237d6bc5693e5d54be1a455b6ed632fbbe76cea406163f9c48d00e79f", "enabled": 1 } }, "asterisk": { "100": { "checksum": "sha256:0b66b387174001e926cf1454c3516bb32d96610a0f598065fe6d7a917ca897fe", "enabled": 1 } }, "auditadm": { "100": { "checksum": "sha256:dcd9e7f5e71fb9f7aace30b5755efcbf85fe88f884d4253cc9abcad1c44e5f4d", "enabled": 1 } }, "authconfig": { "100": { "checksum": "sha256:bdb8072e463c84cb01e6933093428be2b6ee5299d82e26730b12dd2b66d89355", "enabled": 1 } }, "authlogin": { "100": { "checksum": "sha256:a89b04c7a40bb373de2bbb0a2210cca454e7d4a805321fbe65462ae5551db656", "enabled": 1 } }, "automount": { "100": { "checksum": "sha256:41ec4e0c5c46118cb4dfa8c8b1834f330dce4ffdea3d534a8d5007a63b3e5262", "enabled": 1 } }, "avahi": { "100": { "checksum": "sha256:7628cb8340258102798a6e36902d0210e2051ffb9fb4f7a1e4c62a612edfe6fa", "enabled": 1 } }, "awstats": { "100": { "checksum": "sha256:9b92e64a3331076ad443862aa2ba98a2c4d9b00638bf19bb9726f572dee5eff4", "enabled": 1 } }, "bacula": { "100": { "checksum": "sha256:32cedcc57f6a973ac5adc16d8df343fc1ca4b3716f7cdcdae0d2490a6e5765ac", "enabled": 1 } }, "base": { "100": { "checksum": "sha256:d99ed290beecf2b10a557a21b06b63cabc28dab4050f2e7197d2cb9e30519fd3", "enabled": 1 } }, "bcfg2": { "100": { "checksum": "sha256:ea510637d47b7fabc3f617f8a6f3ca3172bf9215c2d6b64ad19cd5d8819c8b6b", "enabled": 1 } }, "bind": { "100": { "checksum": "sha256:39520749f8aba46f975a87187975d8dcd014ad67d22515951f51fa3fd1b0478f", "enabled": 1 } }, "bitlbee": { "100": { "checksum": "sha256:bf04e481614825a35c26a547b19098ff1c8acd0d915c5b4f938b9fa595459d00", "enabled": 1 } }, "blkmapd": { "100": { "checksum": "sha256:ca870c95742bf987a2e739286cbcb998b58c091a422251fdd8de57228b28fd96", "enabled": 1 } }, "blueman": { "100": { "checksum": "sha256:7e4b2b3df3962273436b561c806c816fe4b1e5d6781efa33a7109b05f796edd7", "enabled": 1 } }, "bluetooth": { "100": { "checksum": "sha256:da457ef2ce595c3bf9f70697029ea90e96472ae562f685a7f919a7778a778d09", "enabled": 1 } }, "boinc": { "100": { "checksum": "sha256:d74bd3b6b3850c30b5bbf77822ab82b43f36600e4f76cd68674ef361328afb05", "enabled": 1 } }, "boltd": { "100": { "checksum": "sha256:4ccf41e247c5a7066042a0ebaae492805a1d640f777e8e771701f340a76bce30", "enabled": 1 } }, "bootloader": { "100": { "checksum": "sha256:46e55021d6c4cede091a992ab33521bb1aba4ca1d44879d778973b279204933c", "enabled": 1 } }, "brctl": { "100": { "checksum": "sha256:f9645adde2441e43369a255c6a194f01c6f5800347ad710ce3e147df884b98aa", "enabled": 1 } }, "brltty": { "100": { "checksum": "sha256:603734d4772f482f282eb217c03647f705d66de27fc927c64e02787369b0f78a", "enabled": 1 } }, "bugzilla": { "100": { "checksum": "sha256:326d2a188603c908cdae3c9dcdae6bda37b98ec4cc23f3b31878e2bbd0cd33b2", "enabled": 1 } }, "bumblebee": { "100": { "checksum": "sha256:e8ca8d5318a68243441fdb993fbab6d566f7462fd5557b55733f8ddbfcc4b276", "enabled": 1 } }, "cachefilesd": { "100": { "checksum": "sha256:86fe9c1aa8b2d7a6bdd9bd8d0c7a41a7ae0e4e14e32eaea6cb920367c2f495d7", "enabled": 1 } }, "calamaris": { "100": { "checksum": "sha256:1069377693a5d730d57e4ddd6f73ce20b67b595aae90a16459e852d238163b48", "enabled": 1 } }, "callweaver": { "100": { "checksum": "sha256:880b626c3d04c5669d64ee617ee36a18566e91adeaac67b9527b0a795543575e", "enabled": 1 } }, "canna": { "100": { "checksum": "sha256:b9256764ca5e34142e8cffea57fafc2fa66f78dc8c05761f97fa9becd1d77311", "enabled": 1 } }, "ccs": { "100": { "checksum": "sha256:ad293ee5e252966d14fa6bf09240f143460df4b928672a33a398a5793777c4e4", "enabled": 1 } }, "cdrecord": { "100": { "checksum": "sha256:dda8d62c3bf2503ff9762bd031c35a76cac8059d08592fe23e4d3fe11b0ac8cc", "enabled": 1 } }, "certmaster": { "100": { "checksum": "sha256:b431dd84f2c6b971bc573674fa6c4ee2fedf910b0123ba5d9acb5011c208fd72", "enabled": 1 } }, "certmonger": { "100": { "checksum": "sha256:965ec65dfc98cbabce2350bd52fa7ce92c2f4ab4704348f1555f2a3d9edfd1b8", "enabled": 1 } }, "certwatch": { "100": { "checksum": "sha256:77f0299f67e43927eacb553d1002beeebc3098b4bee64d8dc3dadb8fd23fbb5c", "enabled": 1 } }, "cfengine": { "100": { "checksum": "sha256:c78b908838f1d64ee9ebb0a51b7fa438527716936471a573e1b4b7c393bd6b8d", "enabled": 1 } }, "cgdcbxd": { "100": { "checksum": "sha256:5d3633e0b77db69721e4f64167d7e5f7779c3e5fa76e095d25f8467f2a0bdfec", "enabled": 1 } }, "cgroup": { "100": { "checksum": "sha256:9368c6c54bd5ec6f20e4c3b47c86e60af07346c4e86e525b6bd7288b54b7e774", "enabled": 1 } }, "chrome": { "100": { "checksum": "sha256:d31ce9d2fe78cafcd5e3c8decf22ae1e9ea6f74026ca65b6320afe9a33cd609a", "enabled": 1 } }, "chronyd": { "100": { "checksum": "sha256:7d9624729861397cf7720c2324c65489a3d30485e6a884ab1ff9a8ca22efa678", "enabled": 1 } }, "cinder": { "100": { "checksum": "sha256:fc169721c78f5b0857ed8312e59ba4c134b685c4c322dae242b92e815e35e6fb", "enabled": 1 } }, "cipe": { "100": { "checksum": "sha256:02c20398b9eff51ed431b7ad739a6015d2451b4bf6e3e5da380606d85a77852c", "enabled": 1 } }, "clock": { "100": { "checksum": "sha256:4f90655d2243cfc32ea7436a953cccb8a34af895f83361235a3a5cda40dbc75f", "enabled": 1 } }, "clogd": { "100": { "checksum": "sha256:ba78a422a10b65591c48cb038f8a55614944163f3140275852d293fb0c548bfa", "enabled": 1 } }, "cloudform": { "100": { "checksum": "sha256:481f5fbc7810a5a81851edbe5a6b124141257f5fbbb83d8830ae0a34924ed3d9", "enabled": 1 } }, "cmirrord": { "100": { "checksum": "sha256:8f8fb986f15b8b7c5c250d250fdbbb2f78874e13394105c9c486488a16e94c91", "enabled": 1 } }, "cobbler": { "100": { "checksum": "sha256:e0e264b9cc83962dbbb27c152a72f01c6a355467c4e845b52e65c8b88d8d75d6", "enabled": 1 } }, "cockpit": { "100": { "checksum": "sha256:cb7fccd94903a6e256a586d758085f6f59c0f8b1c5b4cb99536915526d2224ec", "enabled": 1 } }, "collectd": { "100": { "checksum": "sha256:7f08e2e248d33162dc9b237c37ed3a3dba0511bbcc71d87482e95093fb8c6456", "enabled": 1 } }, "colord": { "100": { "checksum": "sha256:86e58c9f12c519a2c3b090b64a276722374054ea900c775b2f8ab4ef2867dcf0", "enabled": 1 } }, "comsat": { "100": { "checksum": "sha256:1d57ffaad6b96e3ca8ac82c23b52d58d81e1f69f5d54a648a16da8ffa8070e53", "enabled": 1 } }, "condor": { "100": { "checksum": "sha256:dbc3f2f0c12f9aeed14056fd7e7c46a4ecab3569198f891643172cd032f3fc00", "enabled": 1 } }, "conman": { "100": { "checksum": "sha256:1270caf15af248a487cd5ce728daae2699ffd9139823c805ec49213ab1c835cb", "enabled": 1 } }, "conntrackd": { "100": { "checksum": "sha256:56fd7d7a550dbc4188b93afd0fde8c706623b3a5d26db265ee016967ba4ddfee", "enabled": 1 } }, "consolekit": { "100": { "checksum": "sha256:5bd7a7acc191766583d933b04321e64657138959bf40a4d2986b013b942c4ba8", "enabled": 1 } }, "container": { "200": { "checksum": "sha256:301be7dafa07cdc68b4e5ade7e1a07017fab3efd85986bdfab7faa9466a95836", "enabled": 1 } }, "couchdb": { "100": { "checksum": "sha256:12b2e3e7314bda4e76d3883901e6470927e85343f742fb44b174ce968f1ad8b5", "enabled": 1 } }, "courier": { "100": { "checksum": "sha256:40ae5f173004741838002644e5bff73cf16f2f3a1928c45fa17674f9a0df5148", "enabled": 1 } }, "cpucontrol": { "100": { "checksum": "sha256:1485a6d64d00619898d2789d27391f2a57a7fb1f0e8c73daf59baca8641564a3", "enabled": 1 } }, "cpufreqselector": { "100": { "checksum": "sha256:687564eb09acf3e7f1475fe2a133941c36999bd037aa8a794feea2d9f2c26385", "enabled": 1 } }, "cpuplug": { "100": { "checksum": "sha256:c16e376ff6c51da1911e68a8a7d42f5730eda45febfd0875c78cac4b9cf6e78c", "enabled": 1 } }, "cron": { "100": { "checksum": "sha256:6be0252b3c6bcbfb4c51dfd3ae1ae262f5de153234917ac4d342b18ae0292060", "enabled": 1 } }, "ctdb": { "100": { "checksum": "sha256:06dd65a4361bf8076c14b322dd30003295c0b9d75bf1ae610961b13a1f9431da", "enabled": 1 } }, "cups": { "100": { "checksum": "sha256:3d5e5bbf131d98d95f7f1431893eb137bd833dbfd8469f9c386d72bb4e8f9b9a", "enabled": 1 } }, "cvs": { "100": { "checksum": "sha256:bbc8d76cc8609849d5b078c5b2ac7364470a06d77d67b97d5f58429d7b679e33", "enabled": 1 } }, "cyphesis": { "100": { "checksum": "sha256:b1a41211ae3cf69b819df517eccd0fda2088c27685dad68de64531b9794ec518", "enabled": 1 } }, "cyrus": { "100": { "checksum": "sha256:60defb1f6feeb1d607734c4912e52e03bf5b0c27cb6f31a37fa7e05f3497b323", "enabled": 1 } }, "daemontools": { "100": { "checksum": "sha256:1034e2442c975dd2ccf84791b1a826d02032f13762d57c3485e51e2b9a7dc03f", "enabled": 1 } }, "dbadm": { "100": { "checksum": "sha256:40306590ef444152ae18b65040d85442c14853a9cc4c31b0224c4d19517d66ea", "enabled": 1 } }, "dbskk": { "100": { "checksum": "sha256:24559eff82b251f9814ae88c36a7cbacda1ed419a80145aef545306e88cb0da8", "enabled": 1 } }, "dbus": { "100": { "checksum": "sha256:50ea4eb05a06315449092c939e2307436ac6461e47ca69f0d42cc4e321e86280", "enabled": 1 } }, "dcc": { "100": { "checksum": "sha256:06e414b0a83b49968f62018cecde48dcfe68b2e9d699915367b3e04461188a0d", "enabled": 1 } }, "ddclient": { "100": { "checksum": "sha256:73ca2525a14e3161524f6e8fc0d016430a536002f1cb3833db1334670b458436", "enabled": 1 } }, "denyhosts": { "100": { "checksum": "sha256:1bd00b13b9bda18274a771d66f7cba8fe62e5e95ea8f51415da6b1fa1336df1b", "enabled": 1 } }, "devicekit": { "100": { "checksum": "sha256:03b01b781881cc60438bc357bd60596970b8ac019b415969bca8a08358fcbfd1", "enabled": 1 } }, "dhcp": { "100": { "checksum": "sha256:2ad95a78468f7f4ea9a8c044c79c0a4ca9924b41432390ea2863a85c806c9a00", "enabled": 1 } }, "dictd": { "100": { "checksum": "sha256:c30c819f142d3c719d0ec5741af5a65161770ff140097fe63f7559d55b897500", "enabled": 1 } }, "dirsrv": { "100": { "checksum": "sha256:50efdc68200d27ce1a5db99a780aa7b0e84988669961d436d348c7bb8310d181", "enabled": 1 } }, "dirsrv-admin": { "100": { "checksum": "sha256:8d9234157484f6ae8ba22039b44fa19f4de8137be9321e5da393d72d85d89487", "enabled": 1 } }, "dmesg": { "100": { "checksum": "sha256:8b834312a2cb99ab89862f839a1315e78794dd92758785f84c9559285dfbe679", "enabled": 1 } }, "dmidecode": { "100": { "checksum": "sha256:2c7fb8c6c52f385b819713f0444a96cfd4e65b7dcb3ca79b932cc12ad9ce903d", "enabled": 1 } }, "dnsmasq": { "100": { "checksum": "sha256:44f66c5d4f635600ee9d0ba3fdea3896218f1420b5ead89e0f22d71a447f9e97", "enabled": 1 } }, "dnssec": { "100": { "checksum": "sha256:49427a9e92b87db77706e2b81ece254c99d3cd6ba020211e2afae65fab7ad066", "enabled": 1 } }, "dovecot": { "100": { "checksum": "sha256:cc8c3a2ee0233a7f1fdf38837b72ce5fd15efef782a36ab4b9aa2ec339b46fa6", "enabled": 1 } }, "drbd": { "100": { "checksum": "sha256:b66be23c1ded4e548e5369b744c7c2a4dfd7065582517525221177ca67657525", "enabled": 1 } }, "dspam": { "100": { "checksum": "sha256:5dd7221ba40e9b912367289fed8ca116c14da4fb8bd7f28f421c4008855bb9fc", "enabled": 1 } }, "entropyd": { "100": { "checksum": "sha256:0f68aeeb1da72efb8c9b1bb7db0a4180b6938672b16f33d1abcd65f5481d85a9", "enabled": 1 } }, "exim": { "100": { "checksum": "sha256:f4c4473ee49394e0e4629023772464a046c476f92b4a727acdf9f6c92711b952", "enabled": 1 } }, "fail2ban": { "100": { "checksum": "sha256:2383cb88b81bc5d87be9f3201a42da526532c4ea8e6d3b3f5023005c0ddf6f17", "enabled": 1 } }, "fcoe": { "100": { "checksum": "sha256:913e66ac5f5ce364e5ea556acfbf77845c25a4beb5ee64599613aa00127c1492", "enabled": 1 } }, "fetchmail": { "100": { "checksum": "sha256:63f00993bae4285eff5e993d208ea786785c4331e6947b3a48a97d31145b2e98", "enabled": 1 } }, "finger": { "100": { "checksum": "sha256:16c506d472b007f7d36850810ca0fcfd9482d30ce9c0ba790174b78294fd1d74", "enabled": 1 } }, "firewalld": { "100": { "checksum": "sha256:bbf58446f30b93de19e5a19087ee012f8e347fef5e7e8012e64b31a0ec21ab09", "enabled": 1 } }, "firewallgui": { "100": { "checksum": "sha256:b61ff17eee03141c9c7bd79d63331ecea733cba4b5b43b87d5141a40cdccdd69", "enabled": 1 } }, "firstboot": { "100": { "checksum": "sha256:c5540b8385c84075dd657e390d77ae886aa9d74b65444b9aa1d858f375819a8c", "enabled": 1 } }, "fprintd": { "100": { "checksum": "sha256:c1ffb7734a0359a7390830d9c6477ab61c45fc026368bfd5e2246523a6439464", "enabled": 1 } }, "freeipmi": { "100": { "checksum": "sha256:9af2291d75a2d643f53ff7a98bcabf22effb617329178efea45372d714825de1", "enabled": 1 } }, "freqset": { "100": { "checksum": "sha256:28bf77389f3e41743b30727a891609172a891466e92c28a919f43e628cc23a4d", "enabled": 1 } }, "fstools": { "100": { "checksum": "sha256:140caf542903419ee2471fd99ab06aa45899c400402c2580b395b182f24bd225", "enabled": 1 } }, "ftp": { "100": { "checksum": "sha256:7e8456fdf7807b30e1c257e568ba10305696cf5abdebc70988c288079884d46b", "enabled": 1 } }, "fwupd": { "100": { "checksum": "sha256:1dd6a45b73c7ce77a87af1e87354ada5aa5b2841aaaa045a6b4ae3c4d09f0f8b", "enabled": 1 } }, "games": { "100": { "checksum": "sha256:950d8be99d5349a3d893ba601c518e6b2af0d56c5b55514a45dbd8a3c61c9ecc", "enabled": 1 } }, "gdomap": { "100": { "checksum": "sha256:5040cb99d007fe9368bd37a9a6bf82f891c220ef652443896a0f2f6ca6f818e1", "enabled": 1 } }, "geoclue": { "100": { "checksum": "sha256:f0155b43152b6b4b850d1c4fb7daf16fd77992313b8be314ddb4901314bf913d", "enabled": 1 } }, "getty": { "100": { "checksum": "sha256:a60d07665b0ebd25fd54a9d82dad5eb7acbc11a2842dba56d7b9524d26ce14ce", "enabled": 1 } }, "git": { "100": { "checksum": "sha256:5eaccf209092db49c9a48d84e1387c1de76cb153c774c0bd615c001afab28664", "enabled": 1 } }, "gitosis": { "100": { "checksum": "sha256:b522382b64f36cf387cd892b45e916c861bd0a09697bc983eb55b53b0efd3081", "enabled": 1 } }, "glance": { "100": { "checksum": "sha256:2c51d19fca6ee40e137245ecb425edc77666d75c42ba583bf74cf13f10ace055", "enabled": 1 } }, "gnome": { "100": { "checksum": "sha256:420b9cefa6bdb542f6da10de7b36704a91509cf64cd2497e5693a858cfca5e41", "enabled": 1 } }, "gpg": { "100": { "checksum": "sha256:f821aa6ca5837a2d2de8180e74c267f68da951960c989fb13ebde5833c93738e", "enabled": 1 } }, "gpm": { "100": { "checksum": "sha256:bf30c4945be0065672fb47f70ad251b1079ada339f61f2679293cb0226d0d57a", "enabled": 1 } }, "gpsd": { "100": { "checksum": "sha256:5373b2332959d6c41c32160018274ab61e3f1abd0f0a5cc2302c45b141a39a9b", "enabled": 1 } }, "gssproxy": { "100": { "checksum": "sha256:7528c47be91a81ac19f2f54458309baeb0a232d83a1ccb2bd89fbc8cefb1ddc8", "enabled": 1 } }, "guest": { "100": { "checksum": "sha256:91f43e4d5ae283f0aa13c49efea93293dbdecd2b2f4f75db89371eda65b7523e", "enabled": 1 } }, "hddtemp": { "100": { "checksum": "sha256:f170e1da6acae4fd7108d22c8cf262916e034f0d3edbdebf3265a922a5355373", "enabled": 1 } }, "hostapd": { "100": { "checksum": "sha256:8b15f72328885c08bfda38082a62feeaa2c6692223a4d2bd1a572820d454a742", "enabled": 1 } }, "hostname": { "100": { "checksum": "sha256:e9fc1c4032c0346f751e1ef8ad1b3fe3425401b70a6c4354d4485472288e0bc5", "enabled": 1 } }, "hsqldb": { "100": { "checksum": "sha256:f70b198e5a5157722b69dc89109c4074a475e1085356cc610cc9b700567c154d", "enabled": 1 } }, "hwloc": { "100": { "checksum": "sha256:370e9eea2b927a2715018b667e9a56ad09af301a90811cd9b041da79f5384b38", "enabled": 1 } }, "hypervkvp": { "100": { "checksum": "sha256:b54ce6f4960a02d35e19d60bf8a07f7866514893e3193a5f4822c8580a46caa4", "enabled": 1 } }, "ibacm": { "100": { "checksum": "sha256:663b35f3874583ae074924bc068a8dc4c7c144adb60007da6103d1e3505ee37a", "enabled": 1 } }, "icecast": { "100": { "checksum": "sha256:dedaddef1d7447d25a1e7ff01e60e4545606e556c6770bd3fa94d9331de7a5d7", "enabled": 1 } }, "inetd": { "100": { "checksum": "sha256:ae408578a7160f2feae10269365558c43d9570b392642a92cc20f8ad47c58cce", "enabled": 1 } }, "init": { "100": { "checksum": "sha256:7ff95566a4f2bdb8ca3ec67acdade39e35fdabc57c2f00b989bab3f699f997f8", "enabled": 1 } }, "inn": { "100": { "checksum": "sha256:9ad99284192a443aa582e73b46667388b7a219dafae8dfce71a58a82bbae2f6c", "enabled": 1 } }, "insights_client": { "100": { "checksum": "sha256:0e41289d8dce065dcd41fd6cc1e1282efd4a58e7f9e3a2f1abc32f520fbbcc1e", "enabled": 1 } }, "iodine": { "100": { "checksum": "sha256:32501ab66def044fbc340cb5c656d5743c738bbd6fca5626c36c687419cd8d32", "enabled": 1 } }, "iotop": { "100": { "checksum": "sha256:d15656cd91a4e4e178a13f7cf910cfc552cc30db881a11ec88833f947edb4561", "enabled": 1 } }, "ipmievd": { "100": { "checksum": "sha256:d34fe186922c0e5726ca361343ec3846833ec3e4ab9b019b3d7bac1337383a16", "enabled": 1 } }, "ipsec": { "100": { "checksum": "sha256:d36c66c2c79d338c61c90d4136433e1e3a73435e920eb36d70682dfd5e147e59", "enabled": 1 } }, "iptables": { "100": { "checksum": "sha256:5a674017cc648e3262757464e5413503154cc1f593da545ce2c4f946991012bc", "enabled": 1 } }, "irc": { "100": { "checksum": "sha256:d72428ccbff5521367e00699c142bba64b2bbd44fed35deb29f9530cc0448378", "enabled": 1 } }, "irqbalance": { "100": { "checksum": "sha256:15650b2f39ccdfbcb1e4e867a35fce3c2768097e611e0c8ad9cb79ae6c66dd58", "enabled": 1 } }, "iscsi": { "100": { "checksum": "sha256:ccb27142f793095c79f531aae924baaeee5914c84228a09c09b9eca839f3524e", "enabled": 1 } }, "isns": { "100": { "checksum": "sha256:90b42f610fa328cdfb98bd0450bd052566f203e51e4a913dd6faded6da7fbe2c", "enabled": 1 } }, "jabber": { "100": { "checksum": "sha256:5ad49d140265305dc72781a6826d1de4614a33f83bd512acdc2263038ad41206", "enabled": 1 } }, "jetty": { "100": { "checksum": "sha256:d910afd1bfe836543ded50974dc24ae7bd5fd2609d6a9b2403316dffcd39832d", "enabled": 1 } }, "jockey": { "100": { "checksum": "sha256:d9a67ce1976ed2e79826d25f33dcb0b0bbde6c090600b605bbaaae45856d12f6", "enabled": 1 } }, "journalctl": { "100": { "checksum": "sha256:9ddb71271d0dbe5cede6179c0ca263e297dc6b65197bde2f7b14ce71f8dde369", "enabled": 1 } }, "kdbus": { "100": { "checksum": "sha256:5969c78be4a03cc91e426bc19b13c5188b5bf8ac11f5e2c21c098c3d68a7e3e3", "enabled": 1 } }, "kdump": { "100": { "checksum": "sha256:fdde3852d1decda649133c6345680f9353b86a6da2a98a83a8be101c9c25f103", "enabled": 1 } }, "kdumpgui": { "100": { "checksum": "sha256:66c67280c70a9b897b0f952067438e0eee05f9f48913508b38d745ef88747f32", "enabled": 1 } }, "keepalived": { "100": { "checksum": "sha256:c1177567c7bf67bb2d0de17760cecf56e0bb34f50d6fe060dec64ae97a76ecdb", "enabled": 1 } }, "kerberos": { "100": { "checksum": "sha256:826fbe83705494e009b242b88857c425eacba49aadae506ffa2012c80e60f7ae", "enabled": 1 } }, "keyboardd": { "100": { "checksum": "sha256:f199811d9ddc8db83864a09c543567fcb2f117b3241967b092bff7c9fdbfbfb6", "enabled": 1 } }, "keystone": { "100": { "checksum": "sha256:b0a7227a870ea987035e0cd524ad956a68287d0a67dd7135de41c6d5977ff4c2", "enabled": 1 } }, "kismet": { "100": { "checksum": "sha256:488fb5fd17cf1f630f3e48a853da05f86c06fc58219dc2ae59251865734bf800", "enabled": 1 } }, "kmscon": { "100": { "checksum": "sha256:d64019b11b6a37f6cdc5579d56eb1e19b6a7231501e1cfe2a838d26a2eac6033", "enabled": 1 } }, "kpatch": { "100": { "checksum": "sha256:00070d71dfe2632491305387ffb264127dca4387425015e4cb013d6bce5f95c3", "enabled": 1 } }, "ksmtuned": { "100": { "checksum": "sha256:891f082452240ad2e726bad777ea787d0f0f8695cc2a75f7439a2badda030d24", "enabled": 1 } }, "ktalk": { "100": { "checksum": "sha256:2df6f3dbad4a513ee1c113e496e8d2f5a19f56015f4a21e7478f2f5b53f36359", "enabled": 1 } }, "l2tp": { "100": { "checksum": "sha256:8e4cb0b0e0d1293d669de0b0e50f68d6d6fbe8e8d830a236a1c0e676f2326fb2", "enabled": 1 } }, "ldap": { "100": { "checksum": "sha256:d0177bb5873d0e6f9595020a8f39ba06b19e4636ea610175a3afef4aec2719cb", "enabled": 1 } }, "libraries": { "100": { "checksum": "sha256:6d5f128f2d4fd9137a7c70d0d024703547796a71f70017b3550a31d3450e0435", "enabled": 1 } }, "likewise": { "100": { "checksum": "sha256:e7eebd050230b358b43435d37ce308c3ba15e2516f4045abf7d26f03ebfbc11c", "enabled": 1 } }, "linuxptp": { "100": { "checksum": "sha256:4132cd51913a3044e453ed0b972db2ef511fdc7b2a1b592d1070177651066ab9", "enabled": 1 } }, "lircd": { "100": { "checksum": "sha256:cc81b79d2834e58bef7928f525c1a1eee5547e81d195444b3bc2741e396ae46b", "enabled": 1 } }, "livecd": { "100": { "checksum": "sha256:805c7bc4ded621b44ecf333d558328e115bba652fcbc91f436cefc948497688e", "enabled": 1 } }, "lldpad": { "100": { "checksum": "sha256:358c4b262655cffbf20f7484aedb22f094509f44d52a1fa3efe3edeafd99317e", "enabled": 1 } }, "loadkeys": { "100": { "checksum": "sha256:26f9e78406ecdc968ed670b32db1d10805e66875631558f092f08a6e1f2170dc", "enabled": 1 } }, "locallogin": { "100": { "checksum": "sha256:e07d92775ed25e7a3627bf977452844c67acf473b33075475f433f8be76dd755", "enabled": 1 } }, "lockdev": { "100": { "checksum": "sha256:1f946da2054cc1693209749df12ff01ab6456247d6225733aebb3a7d70a46e20", "enabled": 1 } }, "logadm": { "100": { "checksum": "sha256:70546c4b3d01f15bc7a69747dbb12fc6bcef5d899f6301f62c0c612c7069082a", "enabled": 1 } }, "logging": { "100": { "checksum": "sha256:656067c78ff1246a1a758a213d44307f91cb79336fe74a47015af425e58266fc", "enabled": 1 } }, "logrotate": { "100": { "checksum": "sha256:76cc40f1943fe21959793499bffaf35d0fe53ffc3f6c5a8b31eb96e738a286c2", "enabled": 1 } }, "logwatch": { "100": { "checksum": "sha256:cf4450b03e28762040c29f2a28af238cd4905d1c6bd4c73d656b266c7b9a8a6c", "enabled": 1 } }, "lpd": { "100": { "checksum": "sha256:9358dc35659b9570d3e8119a088b2693d7de505ea25996dc139517a857888857", "enabled": 1 } }, "lsm": { "100": { "checksum": "sha256:1247dc4bccfbc9ee42292db4415b21ae00bdef3dc2faeb267f045413da4a1b1b", "enabled": 1 } }, "lttng-tools": { "100": { "checksum": "sha256:79e4a2224ede13cd5f2c0e6e7c61e83efabaf1d05b86f6f7a710599bfc48edaf", "enabled": 1 } }, "lvm": { "100": { "checksum": "sha256:f56137657dd61a1a8a8844d5d1db01fc03330d17e05457d03f64756b344c32ef", "enabled": 1 } }, "mailman": { "100": { "checksum": "sha256:e47811cf3bd8204eaa02c4aab92f3d426f0a3ef97161e1579845d1e03df1fc1d", "enabled": 1 } }, "mailscanner": { "100": { "checksum": "sha256:8d447072ab5005ead27f1cb4d96dcbedf09a11182f660c6f59c6d56fd81235d8", "enabled": 1 } }, "man2html": { "100": { "checksum": "sha256:224584babd9e83c242d54fd8c5cd03379b0556005268aac22b15734b913f12e6", "enabled": 1 } }, "mandb": { "100": { "checksum": "sha256:ae44b8ec7a90ebbc45fdafe89663197b36e47120ad90eb22b475939055ea6924", "enabled": 1 } }, "mcelog": { "100": { "checksum": "sha256:c5d98ec368b145c74b4bf0ea8da3980b17af0c2d00654c5a6973241625f97b12", "enabled": 1 } }, "mediawiki": { "100": { "checksum": "sha256:43f1c6f7cfdeaa26891824167cf637a8670785c2674b45d85ce4a7ac77190a36", "enabled": 1 } }, "memcached": { "100": { "checksum": "sha256:f0f9c7367e9bd196aa463916bd5aab02f6966dad9564a0f2fd070bb2e8410aeb", "enabled": 1 } }, "milter": { "100": { "checksum": "sha256:db190bacd2b84a29971cd1940cd15d606abbfded5c9b956894717afd91fc7a0d", "enabled": 1 } }, "minidlna": { "100": { "checksum": "sha256:0d6ac660d641c1cf707a814ed08e19b9e21547a3eaa7134cab84dbc5fee6b5b2", "enabled": 1 } }, "minissdpd": { "100": { "checksum": "sha256:dd2ab85bcba6d204f9dbc7304e8a4940e5d1733d4b9cf4fcb0f4072982c585c3", "enabled": 1 } }, "mip6d": { "100": { "checksum": "sha256:406edf2c78ba0e692d5a78f3c5ca8d641d00131b143332adeaad9f325959683a", "enabled": 1 } }, "mirrormanager": { "100": { "checksum": "sha256:7084de59beaaaf4f630357ec53beff8d0a0ee532ac180fe58e23bfe98f1fdaee", "enabled": 1 } }, "miscfiles": { "100": { "checksum": "sha256:7e7e87e302bf847a4c59d69e5af60729e61bada0cc5d6ec17a25a6514476cb48", "enabled": 1 } }, "mock": { "100": { "checksum": "sha256:ae352eccf2f2c9ee8f0d9635517d9ae3c9bba83c617deca8f989e2aae8dd35fa", "enabled": 1 } }, "modemmanager": { "100": { "checksum": "sha256:84a60147d2b0121ff6ede6199583cdb5619480d015b2a675c6a0569f91c12d66", "enabled": 1 } }, "modutils": { "100": { "checksum": "sha256:67c3914aeb25e38fc6bd0793fddc41122dba1547d54e91a78065545fea3b9c87", "enabled": 1 } }, "mojomojo": { "100": { "checksum": "sha256:6030afcea9f8d46f25dd7785737edd25eb0f1e50b76eafe4d9103196b722d47e", "enabled": 1 } }, "mon_statd": { "100": { "checksum": "sha256:6ba3a594d01a11bc32e7cb554f7386314b5089eb4416fb776edb552a7d53c41d", "enabled": 1 } }, "mongodb": { "100": { "checksum": "sha256:1b2d30558bec7fc08d1d388ae2bb0becd2233c99c9fb173fd00809786ce5eed9", "enabled": 1 } }, "motion": { "100": { "checksum": "sha256:346e172be35df168eb0e4fbc8e176b0fda87de9bc5787f7a5ab7667cfe1e3c3b", "enabled": 1 } }, "mount": { "100": { "checksum": "sha256:f66c53d993dcd47ea1ff3d797f8fd69fb8161a4ff8a59f54f66a2de9462a55a7", "enabled": 1 } }, "mozilla": { "100": { "checksum": "sha256:7696dbb77c54531cf2574c7ede9f085cf64611dcf7a612530dce2de19f7a8b9f", "enabled": 1 } }, "mpd": { "100": { "checksum": "sha256:0f67c18c9101b53f57ef857a74d6044701e1d2c347f829a03c0579c545fdbef3", "enabled": 1 } }, "mplayer": { "100": { "checksum": "sha256:f82c0a72506f1011e47ba98e51d5edf906f58fc190d797f5d1a0b8e5cc7d0762", "enabled": 1 } }, "mrtg": { "100": { "checksum": "sha256:afcd9267261b334900420461279b8555fdb4bd783af880fa4606d8afc65e0712", "enabled": 1 } }, "mta": { "100": { "checksum": "sha256:b0f9753424c504a288f55d495105f6d475d69287b718190ae5192cf7d6ddfde6", "enabled": 1 } }, "munin": { "100": { "checksum": "sha256:29f87ec15fa19e975c83288d55e56bab64855a24c4d8826fe4138eda9a46cc97", "enabled": 1 } }, "mysql": { "100": { "checksum": "sha256:b028af8f4e726feb8c26037f7c6d6f97383977bd5ee6141ab4e8e1d096d6481f", "enabled": 1 } }, "mythtv": { "100": { "checksum": "sha256:e025b2dbf50901632da0ee2aa658105a322275eb120d782cbbf25f2895231154", "enabled": 1 } }, "naemon": { "100": { "checksum": "sha256:a19b3b0540dc52d9506ca7e5d804c2fe9115b3ea28bfd9273030e841e12eb277", "enabled": 1 } }, "nagios": { "100": { "checksum": "sha256:39ca80027ac8585f368bcd57f555ba87bf409f7b7d6c4292c09fd06cc1691c80", "enabled": 1 } }, "namespace": { "100": { "checksum": "sha256:ef73850f29b4ff4ff904d506d545bf366fd1e7c2ba82a7a7c9a4513e3eee45d9", "enabled": 1 } }, "ncftool": { "100": { "checksum": "sha256:2c9356101a9ddbec94afdd12ca669ba93a1d422c302f9e17b78b18670617d2a1", "enabled": 1 } }, "netlabel": { "100": { "checksum": "sha256:9a32ce04c1dd8e120588c15b3057f838bedce8f14c91576b667295d47800e0ad", "enabled": 1 } }, "netutils": { "100": { "checksum": "sha256:5e0a20ae09b00fac69ee30a0d55ff73fa692d8350c9c0b0343af61e4f0dd654f", "enabled": 1 } }, "networkmanager": { "100": { "checksum": "sha256:9c67b21155929e43e4efd3fc81a85fddc9f1030b47ee4a275789014c1311b972", "enabled": 1 } }, "ninfod": { "100": { "checksum": "sha256:85cac2885d75522eb07189efcc3feeb7775fc6daf5cf3f1a28a1fd2109fe148c", "enabled": 1 } }, "nis": { "100": { "checksum": "sha256:b5b133d60b98068eb9480c54285050ae9b49d2fb309eac8994cc91c865ee02d4", "enabled": 1 } }, "nova": { "100": { "checksum": "sha256:59919a89d30a5d4b60d6971fa636fb62605d59d214ec614adc279f6cbe2c2b27", "enabled": 1 } }, "nscd": { "100": { "checksum": "sha256:578bc975477539c659f3608b1445a0c7a9bc7c3f2dcf65b3e55f3a3af89ea564", "enabled": 1 } }, "nsd": { "100": { "checksum": "sha256:d5b03cdc6c8bbc222b8e3d30680b1a7d2d1a49837e7d509aafcf6b2a3a32195b", "enabled": 1 } }, "nslcd": { "100": { "checksum": "sha256:18b003071f4c36307616f7d5de8cff6d4e376af31cb96ce1a5ad6ae3011dfd09", "enabled": 1 } }, "ntop": { "100": { "checksum": "sha256:f942c7fbe636b9d60327ef9dade1120340c16a2992a6b50db5fbaecd44ffd63d", "enabled": 1 } }, "ntp": { "100": { "checksum": "sha256:686664a71e74b0edd643ab9d556b1aab092fa707935da5ea928a66f54a3c84e0", "enabled": 1 } }, "numad": { "100": { "checksum": "sha256:dabc5ce6244d0b0939e9a07bd6bc232e8b666529a0b7b29527e586db8224862c", "enabled": 1 } }, "nut": { "100": { "checksum": "sha256:653e708dec531e483992b25944a689ec9369478d039a5ec62c98294ab73ce8c4", "enabled": 1 } }, "nx": { "100": { "checksum": "sha256:4ae55fe839abaaf0ea52b79a5c8f6a906575b83cca29532c2dd52337fb3d5790", "enabled": 1 } }, "obex": { "100": { "checksum": "sha256:7b2c87e864b6008f734e1effa48cee1399f41843b9d80d3fd95fbd19e058598f", "enabled": 1 } }, "oddjob": { "100": { "checksum": "sha256:9de0b544b2373ea0f1b7217f9179898479dbff0da36ea9857783de57d06585cf", "enabled": 1 } }, "opafm": { "100": { "checksum": "sha256:761bf911674d23053eceabbbda8da16c73af5f300929a33a64513dc6e3b2d0af", "enabled": 1 } }, "openct": { "100": { "checksum": "sha256:5674f8e8c975570649e3065460786cb4521a86370bffef5a9de18c69813fe68e", "enabled": 1 } }, "opendnssec": { "100": { "checksum": "sha256:bdef6dbb24ae22548634759ac823a8c3e21fde6368cfdfd742480f7027e63ddd", "enabled": 1 } }, "openfortivpn": { "100": { "checksum": "sha256:1a1bff55993510cb6481383b299e1f1a6349ec76e4947bfc8c5b1347e4d30bf4", "enabled": 1 } }, "openhpid": { "100": { "checksum": "sha256:ad3f3f3ba4442930560b291c022e674e6a50e4a37fe027926299b2f6cdec14bd", "enabled": 1 } }, "openshift": { "100": { "checksum": "sha256:329e4b9d1df5012ace94cbe9cba7dfa7ee7d9f242090072c71aaacbeea78986a", "enabled": 1 } }, "openshift-origin": { "100": { "checksum": "sha256:31cbbb069354f984e4af75b387778fae1ff4dc6c3e60533357d005ffa960b51c", "enabled": 1 } }, "opensm": { "100": { "checksum": "sha256:c0e1bf0a8eb50e0b41fa69bf5b65e2a7c324e4bc7255933a5d2bac3b9ae6f4de", "enabled": 1 } }, "openvpn": { "100": { "checksum": "sha256:a4d12ae8ad77d65d0fcabb20aa4a83886e782d732123f686f88a7d7472384104", "enabled": 1 } }, "openvswitch": { "100": { "checksum": "sha256:a54f8a8ea5abb8a33734ecef9d9ad1c0dd090a6e0c5187e80de52f522d2d5e39", "enabled": 1 } }, "openwsman": { "100": { "checksum": "sha256:d6b7bb8f7749265bdaf938abecb2f8f78c6e9e8dc06c1c26b48da227af5a8654", "enabled": 1 } }, "oracleasm": { "100": { "checksum": "sha256:67e31eec391bac337ebacb78c096589af4b7e8be6aa05c34cf187ba922a2abde", "enabled": 1 } }, "osad": { "100": { "checksum": "sha256:6635ff0231bfc3d88c771553d495941ee0f98871edfe6c86205b087186b3a72f", "enabled": 1 } }, "pads": { "100": { "checksum": "sha256:5b4531e9231d399ebec8e6b6870a812c6a64b2daffde35fa57a009b24a01809f", "enabled": 1 } }, "passenger": { "100": { "checksum": "sha256:912a1c442559d6ab48453d87e2b997bdee3017a54a0b60aeaf7d4603fde0f34b", "enabled": 1 } }, "pcmcia": { "100": { "checksum": "sha256:456b3520c26e5f2a913437318715712ae00f64932a27ab1bb8b8b42e0524fa05", "enabled": 1 } }, "pcp": { "100": { "checksum": "sha256:5302332fba7e6724ab7a3c32bd523b10322c20011c6e42ae4e769a49f3efabdd", "enabled": 1 } }, "pcscd": { "100": { "checksum": "sha256:2ee37df066a9ff80439b08c092809f3661e2f9a8ad02134e839627fd23a20c1f", "enabled": 1 } }, "pdns": { "100": { "checksum": "sha256:a1a10cd52eb9dd15bc1ccfed440f6b3d235edc7405a3932f81805d8d94000245", "enabled": 1 } }, "pegasus": { "100": { "checksum": "sha256:4280c40629dd111fd1c89ff867ac72d1e7ddde49dc3d286637e6a86b868e2303", "enabled": 1 } }, "permissivedomains": { "100": { "checksum": "sha256:2453bad4ace526f3cf2c60b358e95a5476692ef25da107b10f52f3af27c056d2", "enabled": 1 } }, "pesign": { "100": { "checksum": "sha256:6461acd0385c0b1a32bf646fc9e09da0c7ca513954ed8fe2a03f4ee7f6a64fcf", "enabled": 1 } }, "pingd": { "100": { "checksum": "sha256:f7536a518a046b793ea3f74a67d677b878baac44b28268c5ccecbf10715d89ab", "enabled": 1 } }, "piranha": { "100": { "checksum": "sha256:11436fb7942d28e3eca22bc078ee5475f632d8447008a6414f337d4bbc3515dc", "enabled": 1 } }, "pkcs": { "100": { "checksum": "sha256:c70e17d1a4d347b38fdfbb2a5dab292e3e0c538ea52fb6cfdef2714e130da0b1", "enabled": 1 } }, "pkcs11proxyd": { "100": { "checksum": "sha256:c9582c89cac1546fa1e5bf9802c5a322e52e2529256f9e5922d5813e40be3646", "enabled": 1 } }, "pki": { "100": { "checksum": "sha256:ec40fbe6355370fe69a8ff343744654b06d4134c1518c64269be1f3a49083968", "enabled": 1 } }, "plymouthd": { "100": { "checksum": "sha256:7aa52d533e28a3ebf76d879c24bb4e0a58574033d5af6d4d22b716d1156c3f90", "enabled": 1 } }, "podsleuth": { "100": { "checksum": "sha256:b32a5cc38b8edcc76b94862cee0c822a5b4d095329f53ab6f7cb014c76346e8c", "enabled": 1 } }, "policykit": { "100": { "checksum": "sha256:686d9f7652cb2b3d7ce6af2aa620c14a6cbbbdb8d26b3630cfbf6bc34d9e3e6c", "enabled": 1 } }, "polipo": { "100": { "checksum": "sha256:6098bd8a4f449c01dc7e0f4509663994259fe8848f2f21d1319bf7105bbacc4e", "enabled": 1 } }, "portmap": { "100": { "checksum": "sha256:f561aef22cda98a94a74bedda09645e50066a77a23d3bdcbb1143b0c62ffe7b2", "enabled": 1 } }, "portreserve": { "100": { "checksum": "sha256:9de99e881e9e2e7e0b78629eec721840da4aa18f78ff5a06e46b7a596c28a09a", "enabled": 1 } }, "postfix": { "100": { "checksum": "sha256:3101c4c1d54f3e175dc3fcff001c6937a9ffec7781f4095ea38fea88df7e8067", "enabled": 1 } }, "postgresql": { "100": { "checksum": "sha256:a734cc086d7d73ef2ffe7543f82dc50b57619e78e60664cb67a9513790f3335a", "enabled": 1 } }, "postgrey": { "100": { "checksum": "sha256:ef4d03336b66c1184f352f9b3fe8004d870bbf003673d4393bde24ea14b056b8", "enabled": 1 } }, "ppp": { "100": { "checksum": "sha256:83e6712ba7343dc1346e94c51b75b05839f78bd24f9324d984b7aa9631bd0377", "enabled": 1 } }, "prelink": { "100": { "checksum": "sha256:df050b0d180947788ab45862c4627ae640c92cf0f6a994a685e4cb5fe46bef76", "enabled": 1 } }, "prelude": { "100": { "checksum": "sha256:88c5fa3da64c127ed6e688f9eba5e50a8f6f98ea3243d29b8b0bc0375ef95420", "enabled": 1 } }, "privoxy": { "100": { "checksum": "sha256:e4a84567c63c892d4cdda3a9a4b15ad5188c093da679a354f00c43b6376a844d", "enabled": 1 } }, "procmail": { "100": { "checksum": "sha256:98170eed35b67b9097514bcb044a18cc3f757af5f91b5d870ea707d6048cde75", "enabled": 1 } }, "prosody": { "100": { "checksum": "sha256:07e999e033252b28ae41697ddc23b42dbcf4bdc143c9eb1c55475aabc9fc9caf", "enabled": 1 } }, "psad": { "100": { "checksum": "sha256:7fc3410de486bf89c4d35989937f424b435c9c4f5398f47f9c840b146197c6ac", "enabled": 1 } }, "ptchown": { "100": { "checksum": "sha256:129978bcb62fdcaed728fb288b321c204575246eb535354e02bfd83089cb0ded", "enabled": 1 } }, "publicfile": { "100": { "checksum": "sha256:9cc75080e25fb5602ab266f1c0d0f16843bdfc561e7af6dec32d669e31bebe98", "enabled": 1 } }, "pulseaudio": { "100": { "checksum": "sha256:a41fc5d1275d548510a2be0180741f952f0f696f443eaabf03c1abf3f80f499e", "enabled": 1 } }, "puppet": { "100": { "checksum": "sha256:81559a7d5e16e228382840986ae0e414d4a78163a9b51b5d9c05a58e07574e8d", "enabled": 1 } }, "pwauth": { "100": { "checksum": "sha256:8590f80ce91ddd4862ce2beab9ec64deb66d99c5583ff5ee3cbff2e503caaa37", "enabled": 1 } }, "qmail": { "100": { "checksum": "sha256:917a35c0ec48acfb5166c937e97269acac39541acebad9c1c410bfdbcb483da1", "enabled": 1 } }, "qpid": { "100": { "checksum": "sha256:cfdb156d23ae6c99b3dbac171ab1626202bf1ae7671fae9f6d6f7241116638dd", "enabled": 1 } }, "quantum": { "100": { "checksum": "sha256:eb4881c554de7882b4e5590a8efb35a758fc1b3d61bc1502632d6f4e571cb331", "enabled": 1 } }, "quota": { "100": { "checksum": "sha256:27d1fb8e99c6d1c75fc8efa8aeaf4303d0dcd8d03cb2992d968a3186d648f4b9", "enabled": 1 } }, "rabbitmq": { "100": { "checksum": "sha256:f0b2b81a6670b7640d49d49c364635f39272330f08bcdaa23c681bf2ac64e10f", "enabled": 1 } }, "radius": { "100": { "checksum": "sha256:791a60cff31fca43e01aa4bfe3a57c5938015db44fd1f64064778dbbcdb6e2e2", "enabled": 1 } }, "radvd": { "100": { "checksum": "sha256:1cea7f5b37f7a0e722ecbccaa09d95db2b175ec125d62e3898a99081c51c6f96", "enabled": 1 } }, "raid": { "100": { "checksum": "sha256:a94b0b917312a73eda50ea641dee49eb00f49df286133fcdb13267fd49ce5d1f", "enabled": 1 } }, "rasdaemon": { "100": { "checksum": "sha256:159d40315f3f5086a31e6f0a6a90d342783d6f0c97c5feeb9c92808c7345adcf", "enabled": 1 } }, "rdisc": { "100": { "checksum": "sha256:a61f7efd50387ebfd35b675b22a8cba86c6216c0bbd901aab5e8674b5c442777", "enabled": 1 } }, "readahead": { "100": { "checksum": "sha256:276a24e14ef12f5fadaeab2883d501cb096e01a9ce1be2178a5c50ebfa6b3fcb", "enabled": 1 } }, "realmd": { "100": { "checksum": "sha256:61561d5f14d9a6597d6e312f5429947baab045d01a729f7cc34406e859fa0015", "enabled": 1 } }, "redis": { "100": { "checksum": "sha256:f40066828d25674c525148f890d9cc84ddbb203f5a4aaad616ef2cd3a497fdc3", "enabled": 1 } }, "remotelogin": { "100": { "checksum": "sha256:742f881c1a4838ecfc1a55a7f3b78a72267644e3a64e3ec45a191599b5bd8532", "enabled": 1 } }, "restraint": { "400": { "checksum": "sha256:5dd2b902123ef00065db6ec8d173f37baa26dbe43566bd5f06594ef1243fd5fd", "enabled": 1 } }, "rhcs": { "100": { "checksum": "sha256:67f232676ac23535867e2494f04989dbd6b9b6d4bbc67df67dc2edb4d31a8be8", "enabled": 1 } }, "rhev": { "100": { "checksum": "sha256:ee2f26beaa5c6a5d25e03ef9ab30302d6b29b283283683421fab52e29e47fe3d", "enabled": 1 } }, "rhgb": { "100": { "checksum": "sha256:39c550e1c8b149dc6f308b0f9ef238315208453ee064bb1558eff9137531840f", "enabled": 1 } }, "rhnsd": { "100": { "checksum": "sha256:16bff56244925c7696fa2da5a4c986132488c352149cc88181bf6b4143fc80ba", "enabled": 1 } }, "rhsmcertd": { "100": { "checksum": "sha256:e999510837aabb3ce118ad61225a846f687588e9a321ffe675b56511191bc323", "enabled": 1 } }, "rhts": { "400": { "checksum": "sha256:9000bd99784bc22ffda4493b4985e8c5a2e65e87aeaa1cb96ba82d367a27a8be", "enabled": 1 } }, "ricci": { "100": { "checksum": "sha256:c72c61297cf864a1abda8226de08039c8ae0212808d3f7fd8725b53b955d59f6", "enabled": 1 } }, "rkhunter": { "100": { "checksum": "sha256:d48bd9c5789f4adc396773664402ddeab432caa99597267ccdf24220948e5b3c", "enabled": 1 } }, "rkt": { "100": { "checksum": "sha256:a9414e82cadd2876471465737bd8322eb833e296869ebcefcd9e722ff717d350", "enabled": 1 } }, "rlogin": { "100": { "checksum": "sha256:a4b2e25abc4099a0a54821518b7c824a2ddb7544fb0b5ddde9a0a9be159ac1b2", "enabled": 1 } }, "rngd": { "100": { "checksum": "sha256:5c867af2674586cc1c41aa3203e3704a0d1400d344a8e257bc61e9eebb86ad03", "enabled": 1 } }, "rolekit": { "100": { "checksum": "sha256:73382d4b8a12fa161dbb5ba36c94e7f0b1f82b1abdf0a4f07ca6c981e08f271b", "enabled": 1 } }, "roundup": { "100": { "checksum": "sha256:1a2503ebaa997c6b6efd5d2343ea731f73b2f0312f2e8d5578dad2c8a84a94fa", "enabled": 1 } }, "rpc": { "100": { "checksum": "sha256:e423284f5ed36e7b6c52f581b444a981d5d1c8af6c8dabe8c6cb6c71d3f49fb2", "enabled": 1 } }, "rpcbind": { "100": { "checksum": "sha256:53831134210db04fe6e6b0f05e20b8b7307ae8c11e774faec9e1b3aa2b02b5dc", "enabled": 1 } }, "rpm": { "100": { "checksum": "sha256:acbd671bd661f9f2f25d4798f1646a51075f297c8b086ea9bd3133a00e356432", "enabled": 1 } }, "rrdcached": { "100": { "checksum": "sha256:c6110313310591ee2a08b504b04ebd1b98f370b6633172f06ee7c0c7db0a963d", "enabled": 1 } }, "rshd": { "100": { "checksum": "sha256:1340ab5daac926cc1354452869ab5aa78d27ceb110543624d2ffaf93773c394b", "enabled": 1 } }, "rssh": { "100": { "checksum": "sha256:9dabc52612d567e728786c007f5017c7032c02be3a9201521a530fc91ca789f8", "enabled": 1 } }, "rsync": { "100": { "checksum": "sha256:33dffe2764dc45bbc59b406a67187c39864412bac07ee089bda30ef09cb70faa", "enabled": 1 } }, "rtas": { "100": { "checksum": "sha256:9d55dfe843e44e8a93c02ea28b14856edfdb1f820bb647992daa6af11e2dbd37", "enabled": 1 } }, "rtkit": { "100": { "checksum": "sha256:ea77b9f26c8fc61b7fc281099b2f16e75c5b196660fff55a95f96e97935a7a1b", "enabled": 1 } }, "rwho": { "100": { "checksum": "sha256:4468bfdd23924a96b4cf8c6fa1a3fa606fdd8ac69b7cb17c16a6e39a95908921", "enabled": 1 } }, "samba": { "100": { "checksum": "sha256:c97b92abaf053976c89a670d82bf06bc5c7d561ccf03e3ff1ac84be6e01cfc5c", "enabled": 1 } }, "sambagui": { "100": { "checksum": "sha256:18d1a69de368fa621e8ef3234b8ddb40261ced880bb732328a310db5a62a7a0a", "enabled": 1 } }, "sandboxX": { "100": { "checksum": "sha256:711df017c1f168e33245144d67289225439bbed701fb1146cb83e9cd63ce1f7a", "enabled": 1 } }, "sanlock": { "100": { "checksum": "sha256:093d9d9793142bb9a8c4375f5f368ca1a4d9beb0cd05329518f91bb9ea51bd06", "enabled": 1 } }, "sasl": { "100": { "checksum": "sha256:536ce94509d38b40200debf17fbddc16ec9004463fdb3fc42890dde9b3eb56f1", "enabled": 1 } }, "sbd": { "100": { "checksum": "sha256:57ecac942ea46af55728362527d70a3e135c3b4711688ddf62596b9a768d9fb0", "enabled": 1 } }, "sblim": { "100": { "checksum": "sha256:2ab2f52e6bac063f176e007b39cd8a4e43012ea075d82af20fbb3403891b6493", "enabled": 1 } }, "screen": { "100": { "checksum": "sha256:7df09c8fa09e105ecf51fee797975603a2df8d15c3a0bf00fdb1d565fe4a6b91", "enabled": 1 } }, "secadm": { "100": { "checksum": "sha256:9cf04d33aa9dec0b559c892fb20df89fbe1883544d4ac2d6bf6fc319f0a16663", "enabled": 1 } }, "sectoolm": { "100": { "checksum": "sha256:e7f9a696e0958d6bdbd6696e67a9b4af62430456d0f278e290db0ea1ee9750b7", "enabled": 1 } }, "selinuxutil": { "100": { "checksum": "sha256:c72355dc70789deb94777acd0b47c2c3ae628e8d90bffb0e0e320941e5ddf3b7", "enabled": 1 } }, "sendmail": { "100": { "checksum": "sha256:98f68238d6ca96277390c160adeed4e3e382d5ded5a88a3909cfebe986b849be", "enabled": 1 } }, "sensord": { "100": { "checksum": "sha256:10ca96a581ef4b0fa1789160fd71fb340d8b1d13906b42fab6e9119033d4f942", "enabled": 1 } }, "setrans": { "100": { "checksum": "sha256:3a172b4972f9271250b4d228541c78b0243fd0544ac983db0f590e09674f700d", "enabled": 1 } }, "setroubleshoot": { "100": { "checksum": "sha256:f78edfcb470cd9929f45b6db29ae4924a286ab30a03f80b7bdf3699bccb98314", "enabled": 1 } }, "seunshare": { "100": { "checksum": "sha256:ba2043d9665e2fd3a9e2d103671bfe647060b93d9c02eed2dca3066a0ecfb81d", "enabled": 1 } }, "sge": { "100": { "checksum": "sha256:cf843c98ff4113ded675f79df694549b4f848aecb1295f0a510101e301fbd348", "enabled": 1 } }, "shorewall": { "100": { "checksum": "sha256:c7c49d28e52aba4d168e684b9160a225fbecab373bfbb6963bbe89c93ecb867b", "enabled": 1 } }, "slocate": { "100": { "checksum": "sha256:be1825562f583305597e5ceb1298ebb60e42c4f270b4a7e3751cf9d9be1b1fac", "enabled": 1 } }, "slpd": { "100": { "checksum": "sha256:14748519962688e62b7bc7e7c03ad91c1f815c5d33c63f2d60e03340f55609a8", "enabled": 1 } }, "smartmon": { "100": { "checksum": "sha256:9f26cf1e9fa128e98c758a6325525f8547950a2440b6582202228c3c5c2c80d9", "enabled": 1 } }, "smokeping": { "100": { "checksum": "sha256:ae8cbd09d519a42bc01063c4c16f58e96cb3673acb557dcd2d09af444d742db1", "enabled": 1 } }, "smoltclient": { "100": { "checksum": "sha256:8aa5f2749eeaef5ae871dc903dad87611e369c92e9b3fc28b4944f75db785a18", "enabled": 1 } }, "smsd": { "100": { "checksum": "sha256:d36a762c836a0e4305773e352fe0f46657784b5d9bf749f02df9c6d15f68d101", "enabled": 1 } }, "snapper": { "100": { "checksum": "sha256:62bba8f6a236bae902815188cedbb5f3090acf0829247e6808787f8c913d9981", "enabled": 1 } }, "snmp": { "100": { "checksum": "sha256:68b5e9d408704e44ebf29ba76ae18afdcf6d8aef12794e8e9026997376ce12f8", "enabled": 1 } }, "snort": { "100": { "checksum": "sha256:eef39dec8d416650af3f9eeeb518b06dd9a9e09144aa579b6bd6422ba0037d70", "enabled": 1 } }, "sosreport": { "100": { "checksum": "sha256:c19dc2ed34c3d274f8e01647dc2d869ca06d4a9a3009f57c1845fac4d33ed358", "enabled": 1 } }, "soundserver": { "100": { "checksum": "sha256:a46a9508591afb1407fd14441c9c26cd495a3789e3c6792a2eba38a6642e4b97", "enabled": 1 } }, "spamassassin": { "100": { "checksum": "sha256:8255ad891466762e31763d6f4791a32aa1eea1147a812020724eab8eb07c1916", "enabled": 1 } }, "speech-dispatcher": { "100": { "checksum": "sha256:ce5ba130d5d0ae5fafe8f823b824856590f990ad7c08aa0a5930f5060c252021", "enabled": 1 } }, "squid": { "100": { "checksum": "sha256:4170a7354e69ed60e0268389f74042e02a2511a4451ca20b97a63213b8881e1e", "enabled": 1 } }, "ssh": { "100": { "checksum": "sha256:a4b4b395d2185abfd68edce0f813103ccbedd5d9748f9a41d83cc63dd1465109", "enabled": 1 } }, "sslh": { "100": { "checksum": "sha256:5b0cc219f31e88f2fa78bc31d9c6fe6c7af29b4832509635672ca9edc79409c6", "enabled": 1 } }, "sssd": { "100": { "checksum": "sha256:29cd0921e9effe356c856c3319488adf66c794cbb7d1610e5fca2b730b852939", "enabled": 1 } }, "staff": { "100": { "checksum": "sha256:943b25df416f2181aab46b3492aad9336f60a1b5b46187494f43ab516aae9c6a", "enabled": 1 } }, "stapserver": { "100": { "checksum": "sha256:788f2eb60a3d902060a6c5a08b086e2a1e96d213f86b206736da7e37eb21e51d", "enabled": 1 } }, "stratisd": { "100": { "checksum": "sha256:72c10f773d67b4209c39b4bea22e95c66d105f6f13e30f89bcd568eab6c889e3", "enabled": 1 } }, "stunnel": { "100": { "checksum": "sha256:736a46f682ff77d7c2cf54d5c264eb7b149793c12701b96e9be12bb3e6722796", "enabled": 1 } }, "su": { "100": { "checksum": "sha256:0cc5796bfe362c3b28c73f62377c029a5f2321078b6d5f90bad42764415cd038", "enabled": 1 } }, "sudo": { "100": { "checksum": "sha256:d96538a9cbb09fc38ba701cda88b2a0d199ab7826826d0043e4f07b05418bf84", "enabled": 1 } }, "svnserve": { "100": { "checksum": "sha256:a80606afbcc994e6fdc418cd83182f901d3e5b4b7b36fe262c71a25f43f10af1", "enabled": 1 } }, "swift": { "100": { "checksum": "sha256:19dfb362a8f445099eac9281522f0b13794cb9a0893a7acf0b54c15d193ef70e", "enabled": 1 } }, "sysadm": { "100": { "checksum": "sha256:f0e7b74086d47000f8335de5bade5a5a19a5e83bf581f885db92548546b7ea94", "enabled": 1 } }, "sysadm_secadm": { "100": { "checksum": "sha256:4614737ea0603530691e6158eb1bd07efa1992cb7ef52c201df3a637d3184cdf", "enabled": 1 } }, "sysnetwork": { "100": { "checksum": "sha256:f6a5a3b49885a9f780c5a9078cc968673809eaf89ecbe170fbb8a1ed4f521ea2", "enabled": 1 } }, "sysstat": { "100": { "checksum": "sha256:1fadc57b1e46515cbc038e96ae47ab74dd365a910f4d81ec9fb3044c4691260b", "enabled": 1 } }, "systemd": { "100": { "checksum": "sha256:a5f0e5c340eaf127a166cc50be8170bfce80ccee0c14f32e4cc264089350da1a", "enabled": 1 } }, "tangd": { "100": { "checksum": "sha256:fd538dbdeba0b4a1c244ba76b8dfef47f61da5a56f24f39fc24c137a9b3b303a", "enabled": 1 } }, "targetd": { "100": { "checksum": "sha256:bc0f37cdcdd0c9014e89e8be6758f7d9c97c67a4e42652459d6107314f059632", "enabled": 1 } }, "tcpd": { "100": { "checksum": "sha256:c78dcf2b9abf8d5ccf9f32b2debf6181a935a7078fe4a527991ab11d2999c4a9", "enabled": 1 } }, "tcsd": { "100": { "checksum": "sha256:e92fb82a2e509e3595d46dd464dac1029ce3a731f117fa67712d119d2878f195", "enabled": 1 } }, "telepathy": { "100": { "checksum": "sha256:fea41add022251126312da78373cb7fd05df1e9fd27547f1b4fc604a774827a1", "enabled": 1 } }, "telnet": { "100": { "checksum": "sha256:06d4733c0fc7358d738d4dbf53968c9d9017a72b01456be46633364f00a4207d", "enabled": 1 } }, "tftp": { "100": { "checksum": "sha256:8ba2497a28f4c2a31177811fc0a091a3bb9814f9e02cfc8d84c004718f661e5f", "enabled": 1 } }, "tgtd": { "100": { "checksum": "sha256:6ec8d4d38e58efa04572ac713c9148e7182e7d49713ed89955fabdd512b8eea4", "enabled": 1 } }, "thin": { "100": { "checksum": "sha256:c464da2b8e789d74ea2b2914217a194a3c07081b9f383acd2fee9ab77bc525b5", "enabled": 1 } }, "thumb": { "100": { "checksum": "sha256:2ce98252c7ff59539bb38204ee65898ba6cc701c3dc87417c11e2e7124f448a3", "enabled": 1 } }, "timedatex": { "100": { "checksum": "sha256:df36b9f44f28df1b14b4d6bff01de42c414b947a8e6f1e6efdaa7023250709aa", "enabled": 1 } }, "tlp": { "100": { "checksum": "sha256:7b1d2643c7470dc5b80dee41d18482bb6fd6de55371aba888708a28fe0bb0172", "enabled": 1 } }, "tmpreaper": { "100": { "checksum": "sha256:2a54cea48dfbeb1c9dad0e167f70aa17970c4f2c76c560330c467051fe3b574b", "enabled": 1 } }, "tomcat": { "100": { "checksum": "sha256:de3ed9b8d62d29e80e29a051419a648c154c12f6bb188814ca79120ff1dc263b", "enabled": 1 } }, "tor": { "100": { "checksum": "sha256:16c95ae098af2b964a7a94b5bb6cd1c84d5c7f1254d6411209e4d5cfe87677bc", "enabled": 1 } }, "tuned": { "100": { "checksum": "sha256:b90ac3a04d3f04c7284f75802ffd69d6c1c3d5c0e6d08c3d0f2d9270b99dd487", "enabled": 1 } }, "tvtime": { "100": { "checksum": "sha256:8f8a1f1b2fea7a9fb8c3853e02c830f5204f691e9223cbdfbc320ec6914725dc", "enabled": 1 } }, "udev": { "100": { "checksum": "sha256:24410f1221660b8443af29cb55e42180e268fce722ceed2c99aa202e7dd3cc21", "enabled": 1 } }, "ulogd": { "100": { "checksum": "sha256:dba41aee81015b99378cff2273a56effd1202c0c937c05c63a913243b0641cdc", "enabled": 1 } }, "uml": { "100": { "checksum": "sha256:29e7469ef2704943f23c5040531fee8657cfed8440ef44b6268d21e6a9afe309", "enabled": 1 } }, "unconfined": { "100": { "checksum": "sha256:54482715f4fb5bca5c68ff67b9d145d12ad3df1438db97bcadcc32a2fb0f6191", "enabled": 1 } }, "unconfineduser": { "100": { "checksum": "sha256:13e69d4cbec7926c0ac6fb796749b4286462add3051f1e94554f23e637b81277", "enabled": 1 } }, "unlabelednet": { "100": { "checksum": "sha256:cb370bbe8bc0d7bca49a4fd1fad652017f4f8587c7c9d3277155fba32987550e", "enabled": 1 } }, "unprivuser": { "100": { "checksum": "sha256:bbb2700ca73d867432851e12276a932b1553b034b1cc635f5c6681d6b62dcd3a", "enabled": 1 } }, "updfstab": { "100": { "checksum": "sha256:57a37a5c07af0f7ad80f4f01173e6cd6b604659e2d1b5605c2719dff8bbaf2fb", "enabled": 1 } }, "usbmodules": { "100": { "checksum": "sha256:683c0598bdd00543cb696f7ed8cce6b55c658e566141538fc01b3f852af5f697", "enabled": 1 } }, "usbmuxd": { "100": { "checksum": "sha256:852eb8259277c64b80c91bd1dcbbe85f629e7218ab2f51d39324dcd78a4a278e", "enabled": 1 } }, "userdomain": { "100": { "checksum": "sha256:066e429e71ebcf11014f4ff6d7647c9d6d88ff191c64eeb9793021d16f4cde97", "enabled": 1 } }, "userhelper": { "100": { "checksum": "sha256:74b817fb60fd3ed5f074ef8ff399342ddc49fb2c250b08015dc975edd48f4dfd", "enabled": 1 } }, "usermanage": { "100": { "checksum": "sha256:fa589ab303d10fadd28a3e8d27cc9bc2e55a9b28f28c3f4c7e05968cb00a7cdd", "enabled": 1 } }, "usernetctl": { "100": { "checksum": "sha256:c5e4e24e89775d797a8988e2d5f72ec7a7dd8387289ede61af7a3ce2173cf167", "enabled": 1 } }, "uucp": { "100": { "checksum": "sha256:6a3659d3706bc3af4b60e5de7efa9532dcc0c0a6f0c7735ed1300ec2120f9d01", "enabled": 1 } }, "uuidd": { "100": { "checksum": "sha256:f85ad7d20dd77416ab246ee0837b016a648176ec9956f40ff2ac6b3c2924edc5", "enabled": 1 } }, "varnishd": { "100": { "checksum": "sha256:18dab548c81b02f1b0f3efd6e25dd529bb0565e974156d55e42e274d3ccdf704", "enabled": 1 } }, "vdagent": { "100": { "checksum": "sha256:ee8af0b085b727e060ac3c82f1e38c89545505c9b26e849eda22e571064c46e7", "enabled": 1 } }, "vhostmd": { "100": { "checksum": "sha256:0f7c8c575b060e863fe17e7ee8c67cc5cc3ea31da734a5428dc62c15f3b15bf4", "enabled": 1 } }, "virt": { "100": { "checksum": "sha256:df433826471b1c65a3686b57b4b07872a695d900731feb88cd6dfb76ddcbc5d9", "enabled": 1 } }, "vlock": { "100": { "checksum": "sha256:4a9362fc5876897cae7062564d54d7f8ae12413c65c4c7fc6709f6407cc27160", "enabled": 1 } }, "vmtools": { "100": { "checksum": "sha256:fb9dda20b16232ac253b148063c9b267356b6f2831650f4c00fa01a6d0a8024a", "enabled": 1 } }, "vmware": { "100": { "checksum": "sha256:d0ce73ebc7d2f494b669257a9a68106245371b455566654c7062694bcbad35df", "enabled": 1 } }, "vnstatd": { "100": { "checksum": "sha256:1df1aaf42d9c96922226b4828c38b6d315f7a9d3cda60fe54d99be5d618e140d", "enabled": 1 } }, "vpn": { "100": { "checksum": "sha256:9ea8931bf1c97618b2e99afb8c60a13d51a84db878bffa4082f6973e23b13eb1", "enabled": 1 } }, "w3c": { "100": { "checksum": "sha256:43663b66ef8275c639a8076d92fc7da6821e0523c120e2c854839f9dc9d1db66", "enabled": 1 } }, "watchdog": { "100": { "checksum": "sha256:65b78e9b48a6cfe62f6c67c443d3bc667a58d206c09df00870949b6ae7ff8c30", "enabled": 1 } }, "wdmd": { "100": { "checksum": "sha256:65560477bd0ae271799a76f75c5a3d46ef0c29f6922aa38e727c95b7e1095a99", "enabled": 1 } }, "webadm": { "100": { "checksum": "sha256:4d4d609b3be3c2dc659694cfd2076e0c0c0d6446d16a3fb054a9e5f951b29410", "enabled": 1 } }, "webalizer": { "100": { "checksum": "sha256:867139a0cc2cb236ee54575ce6a8568cdbefd6785e8b7f64e09a3041da46b095", "enabled": 1 } }, "wine": { "100": { "checksum": "sha256:419d697ac987518dee6095070e2894c4112b50256e59d2b4f6acac585fb087f8", "enabled": 1 } }, "wireshark": { "100": { "checksum": "sha256:ce85b40df4d548aa55eb54bc546943366b654a3af7f602817f1fc499c0c8039e", "enabled": 1 } }, "xen": { "100": { "checksum": "sha256:f5d46e297e4e8e0a3f76c1fc8ae96db3ebf5b99ab538a54c171e489ac94ae1f0", "enabled": 1 } }, "xguest": { "100": { "checksum": "sha256:aeb8895098531d1607e389703c783a3c1e8a8c1ad962397debe65214ff86e29e", "enabled": 1 } }, "xserver": { "100": { "checksum": "sha256:85f1f1ed778597ec568ab7b9069779c088219d1da283a09382439c6803e7863e", "enabled": 1 } }, "zabbix": { "100": { "checksum": "sha256:476521323be1b84d7ba2539aa208d857678746a76e7e079577d3f46d251637ac", "enabled": 1 } }, "zarafa": { "100": { "checksum": "sha256:7536116b2852a578cbc5d32f7752b6dd3bb1202817db05306e1a16553c1d43b6", "enabled": 1 } }, "zebra": { "100": { "checksum": "sha256:3d18bbdc44c396c7715cce348f9248712132a1c53341d3b5760016d245f86e75", "enabled": 1 } }, "zoneminder": { "100": { "checksum": "sha256:44cf07d7e6b15709d131b8b406032d0e6395a84e1e20bc67f9320a1e97c4dfcc", "enabled": 1 } }, "zosremote": { "100": { "checksum": "sha256:1177170edbd47b6fe17fa022a247d9b75b1fb3a5a49721bcff3c7da4f480c702", "enabled": 1 } } }, "selinux_priorities": true }, "changed": false } TASK [fedora.linux_system_roles.selinux : Load SELinux modules] **************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:115 Wednesday 02 April 2025 12:15:17 -0400 (0:00:03.269) 0:02:41.016 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_modules is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:128 Wednesday 02 April 2025 12:15:17 -0400 (0:00:00.052) 0:02:41.069 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree in check mode] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:136 Wednesday 02 April 2025 12:15:18 -0400 (0:00:00.113) 0:02:41.183 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Wednesday 02 April 2025 12:15:18 -0400 (0:00:00.064) 0:02:41.247 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Wednesday 02 April 2025 12:15:18 -0400 (0:00:00.042) 0:02:41.290 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Wednesday 02 April 2025 12:15:18 -0400 (0:00:00.038) 0:02:41.328 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Wednesday 02 April 2025 12:15:18 -0400 (0:00:00.038) 0:02:41.367 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Wednesday 02 April 2025 12:15:18 -0400 (0:00:00.042) 0:02:41.410 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:14 Wednesday 02 April 2025 12:15:18 -0400 (0:00:00.199) 0:02:41.610 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_spec": { "state": "absent" }, "__podman_kube_str": "apiVersion: v1\nkind: Pod\nmetadata:\n labels:\n app: test\n io.containers.autoupdate: registry\n name: nopull\nspec:\n containers:\n - name: nopull\n image: quay.io/libpod/testimage:20210610\n" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:21 Wednesday 02 April 2025 12:15:18 -0400 (0:00:00.088) 0:02:41.698 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_kube": { "apiVersion": "v1", "kind": "Pod", "metadata": { "labels": { "app": "test", "io.containers.autoupdate": "registry" }, "name": "nopull" }, "spec": { "containers": [ { "image": "quay.io/libpod/testimage:20210610", "name": "nopull" } ] } }, "__podman_kube_file": "", "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:33 Wednesday 02 April 2025 12:15:18 -0400 (0:00:00.090) 0:02:41.789 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_name": "nopull", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:38 Wednesday 02 April 2025 12:15:18 -0400 (0:00:00.164) 0:02:41.954 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:15:18 -0400 (0:00:00.134) 0:02:42.088 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:15:19 -0400 (0:00:00.079) 0:02:42.167 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:15:19 -0400 (0:00:00.080) 0:02:42.247 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:15:19 -0400 (0:00:00.096) 0:02:42.344 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1743610430.4085276, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610401.5914862, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986657, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "2059311478", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:15:19 -0400 (0:00:00.419) 0:02:42.764 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:15:19 -0400 (0:00:00.070) 0:02:42.834 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:15:19 -0400 (0:00:00.070) 0:02:42.905 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:15:19 -0400 (0:00:00.070) 0:02:42.975 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:15:19 -0400 (0:00:00.070) 0:02:43.045 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:15:20 -0400 (0:00:00.155) 0:02:43.201 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:15:20 -0400 (0:00:00.069) 0:02:43.271 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:15:20 -0400 (0:00:00.069) 0:02:43.341 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if no kube spec is given] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:43 Wednesday 02 April 2025 12:15:20 -0400 (0:00:00.068) 0:02:43.409 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_kube", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:52 Wednesday 02 April 2025 12:15:20 -0400 (0:00:00.065) 0:02:43.475 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:60 Wednesday 02 April 2025 12:15:20 -0400 (0:00:00.107) 0:02:43.582 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_path": "/etc/containers/ansible-kubernetes.d" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:64 Wednesday 02 April 2025 12:15:20 -0400 (0:00:00.078) 0:02:43.661 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_file": "/etc/containers/ansible-kubernetes.d/nopull.yml" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:68 Wednesday 02 April 2025 12:15:20 -0400 (0:00:00.081) 0:02:43.742 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Get service name using systemd-escape] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:75 Wednesday 02 April 2025 12:15:20 -0400 (0:00:00.073) 0:02:43.815 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "systemd-escape", "--template", "podman-kube@.service", "/etc/containers/ansible-kubernetes.d/nopull.yml" ], "delta": "0:00:00.005149", "end": "2025-04-02 12:15:20.976189", "rc": 0, "start": "2025-04-02 12:15:20.971040" } STDOUT: podman-kube@-etc-containers-ansible\x2dkubernetes.d-nopull.yml.service TASK [fedora.linux_system_roles.podman : Cleanup containers and services] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:83 Wednesday 02 April 2025 12:15:21 -0400 (0:00:00.368) 0:02:44.183 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:2 Wednesday 02 April 2025 12:15:21 -0400 (0:00:00.217) 0:02:44.401 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:10 Wednesday 02 April 2025 12:15:21 -0400 (0:00:00.067) 0:02:44.468 ******* ok: [managed-node2] => { "changed": false, "enabled": false, "failed_when_result": false, "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-nopull.yml.service", "state": "stopped", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "systemd-journald.socket -.mount network-online.target basic.target system-podman\\x2dkube.slice sysinit.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "A template for running K8s workloads via podman-kube-play", "DevicePolicy": "auto", "Documentation": "man:podman-kube-play(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "Environment": "PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-nopull.yml.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true /etc/containers/ansible-kubernetes.d/nopull.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/ansible-kubernetes.d/nopull.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/podman-kube@.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-nopull.yml.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-nopull.yml.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "-.mount system-podman\\x2dkube.slice sysinit.target", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system-podman\\x2dkube.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 10s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : Check if kube file exists] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:28 Wednesday 02 April 2025 12:15:21 -0400 (0:00:00.565) 0:02:45.034 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1743610458.0405746, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "d5dc917e3cae36de03aa971a17ac473f86fdf934", "ctime": 1743610457.203572, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 88080647, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1743610456.9475713, "nlink": 1, "path": "/etc/containers/ansible-kubernetes.d/nopull.yml", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 217, "uid": 0, "version": "3809347408", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Remove pod/containers] **************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:38 Wednesday 02 April 2025 12:15:22 -0400 (0:00:00.364) 0:02:45.398 ******* changed: [managed-node2] => { "actions": [ "/usr/bin/podman kube play --down /etc/containers/ansible-kubernetes.d/nopull.yml" ], "changed": true, "failed_when_result": false } STDOUT: Pods stopped: 2ece7bcadf2eddade61e07eca32f44abb4ac4beb0eae18d4bfd24840a8730932 Pods removed: 2ece7bcadf2eddade61e07eca32f44abb4ac4beb0eae18d4bfd24840a8730932 Secrets removed: Volumes removed: TASK [fedora.linux_system_roles.podman : Remove kubernetes yaml file] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:56 Wednesday 02 April 2025 12:15:22 -0400 (0:00:00.561) 0:02:45.960 ******* changed: [managed-node2] => { "changed": true, "path": "/etc/containers/ansible-kubernetes.d/nopull.yml", "state": "absent" } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:61 Wednesday 02 April 2025 12:15:23 -0400 (0:00:00.396) 0:02:46.357 ******* changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "-f" ], "delta": "0:00:00.031165", "end": "2025-04-02 12:15:23.582592", "rc": 0, "start": "2025-04-02 12:15:23.551427" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:70 Wednesday 02 April 2025 12:15:23 -0400 (0:00:00.460) 0:02:46.817 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:15:23 -0400 (0:00:00.076) 0:02:46.893 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:15:23 -0400 (0:00:00.047) 0:02:46.940 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:15:23 -0400 (0:00:00.047) 0:02:46.988 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update containers and services] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:87 Wednesday 02 April 2025 12:15:23 -0400 (0:00:00.114) 0:02:47.102 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Wednesday 02 April 2025 12:15:24 -0400 (0:00:00.040) 0:02:47.143 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:198 Wednesday 02 April 2025 12:15:24 -0400 (0:00:00.037) 0:02:47.181 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:204 Wednesday 02 April 2025 12:15:24 -0400 (0:00:00.037) 0:02:47.218 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:213 Wednesday 02 April 2025 12:15:24 -0400 (0:00:00.042) 0:02:47.260 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Wednesday 02 April 2025 12:15:24 -0400 (0:00:00.086) 0:02:47.347 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Wednesday 02 April 2025 12:15:24 -0400 (0:00:00.120) 0:02:47.467 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Wednesday 02 April 2025 12:15:24 -0400 (0:00:00.084) 0:02:47.551 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Wednesday 02 April 2025 12:15:24 -0400 (0:00:00.075) 0:02:47.626 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Wednesday 02 April 2025 12:15:24 -0400 (0:00:00.167) 0:02:47.794 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Wednesday 02 April 2025 12:15:24 -0400 (0:00:00.076) 0:02:47.870 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Wednesday 02 April 2025 12:15:24 -0400 (0:00:00.077) 0:02:47.947 ******* ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node2] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Wednesday 02 April 2025 12:15:24 -0400 (0:00:00.155) 0:02:48.102 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Wednesday 02 April 2025 12:15:26 -0400 (0:00:01.459) 0:02:49.562 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Wednesday 02 April 2025 12:15:26 -0400 (0:00:00.042) 0:02:49.605 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Wednesday 02 April 2025 12:15:26 -0400 (0:00:00.050) 0:02:49.656 ******* skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Wednesday 02 April 2025 12:15:26 -0400 (0:00:00.041) 0:02:49.697 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Wednesday 02 April 2025 12:15:26 -0400 (0:00:00.050) 0:02:49.748 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Wednesday 02 April 2025 12:15:26 -0400 (0:00:00.042) 0:02:49.790 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.025600", "end": "2025-04-02 12:15:26.964789", "rc": 0, "start": "2025-04-02 12:15:26.939189" } STDOUT: podman version 4.9.4-dev TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Wednesday 02 April 2025 12:15:27 -0400 (0:00:00.452) 0:02:50.243 ******* ok: [managed-node2] => { "ansible_facts": { "podman_version": "4.9.4-dev" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Wednesday 02 April 2025 12:15:27 -0400 (0:00:00.045) 0:02:50.288 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Wednesday 02 April 2025 12:15:27 -0400 (0:00:00.045) 0:02:50.333 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "(podman_quadlet_specs | length > 0) or (podman_secrets | length > 0)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Wednesday 02 April 2025 12:15:27 -0400 (0:00:00.043) 0:02:50.377 ******* META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Wednesday 02 April 2025 12:15:27 -0400 (0:00:00.058) 0:02:50.436 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Wednesday 02 April 2025 12:15:27 -0400 (0:00:00.078) 0:02:50.514 ******* META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Wednesday 02 April 2025 12:15:27 -0400 (0:00:00.069) 0:02:50.584 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:15:27 -0400 (0:00:00.091) 0:02:50.676 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:15:27 -0400 (0:00:00.119) 0:02:50.795 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:15:27 -0400 (0:00:00.048) 0:02:50.843 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:15:27 -0400 (0:00:00.063) 0:02:50.907 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1743610430.4085276, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610401.5914862, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986657, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "2059311478", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:15:28 -0400 (0:00:00.375) 0:02:51.282 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:15:28 -0400 (0:00:00.044) 0:02:51.326 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:15:28 -0400 (0:00:00.043) 0:02:51.370 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:15:28 -0400 (0:00:00.043) 0:02:51.414 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:15:28 -0400 (0:00:00.043) 0:02:51.457 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:15:28 -0400 (0:00:00.047) 0:02:51.505 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:15:28 -0400 (0:00:00.049) 0:02:51.554 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:15:28 -0400 (0:00:00.044) 0:02:51.599 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Wednesday 02 April 2025 12:15:28 -0400 (0:00:00.111) 0:02:51.711 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Wednesday 02 April 2025 12:15:28 -0400 (0:00:00.063) 0:02:51.774 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Wednesday 02 April 2025 12:15:28 -0400 (0:00:00.087) 0:02:51.862 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Wednesday 02 April 2025 12:15:28 -0400 (0:00:00.067) 0:02:51.930 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Wednesday 02 April 2025 12:15:28 -0400 (0:00:00.046) 0:02:51.976 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Wednesday 02 April 2025 12:15:28 -0400 (0:00:00.094) 0:02:52.071 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Wednesday 02 April 2025 12:15:28 -0400 (0:00:00.045) 0:02:52.117 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Wednesday 02 April 2025 12:15:29 -0400 (0:00:00.042) 0:02:52.159 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Wednesday 02 April 2025 12:15:29 -0400 (0:00:00.139) 0:02:52.299 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Wednesday 02 April 2025 12:15:29 -0400 (0:00:00.041) 0:02:52.341 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Wednesday 02 April 2025 12:15:29 -0400 (0:00:00.043) 0:02:52.384 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Wednesday 02 April 2025 12:15:29 -0400 (0:00:00.082) 0:02:52.467 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Wednesday 02 April 2025 12:15:29 -0400 (0:00:00.045) 0:02:52.513 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Wednesday 02 April 2025 12:15:29 -0400 (0:00:00.059) 0:02:52.572 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Wednesday 02 April 2025 12:15:29 -0400 (0:00:00.070) 0:02:52.643 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Wednesday 02 April 2025 12:15:29 -0400 (0:00:00.067) 0:02:52.710 ******* TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Wednesday 02 April 2025 12:15:29 -0400 (0:00:00.318) 0:02:53.029 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Wednesday 02 April 2025 12:15:30 -0400 (0:00:00.130) 0:02:53.159 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Wednesday 02 April 2025 12:15:30 -0400 (0:00:00.083) 0:02:53.243 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Wednesday 02 April 2025 12:15:30 -0400 (0:00:00.074) 0:02:53.318 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Wednesday 02 April 2025 12:15:30 -0400 (0:00:00.076) 0:02:53.394 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Wednesday 02 April 2025 12:15:30 -0400 (0:00:00.074) 0:02:53.469 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 Wednesday 02 April 2025 12:15:30 -0400 (0:00:00.072) 0:02:53.542 ******* ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43 Wednesday 02 April 2025 12:15:33 -0400 (0:00:02.837) 0:02:56.379 ******* skipping: [managed-node2] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48 Wednesday 02 April 2025 12:15:33 -0400 (0:00:00.047) 0:02:56.427 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53 Wednesday 02 April 2025 12:15:33 -0400 (0:00:00.069) 0:02:56.497 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Collect service facts] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Wednesday 02 April 2025 12:15:33 -0400 (0:00:00.149) 0:02:56.646 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9 Wednesday 02 April 2025 12:15:33 -0400 (0:00:00.060) 0:02:56.707 ******* skipping: [managed-node2] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22 Wednesday 02 April 2025 12:15:33 -0400 (0:00:00.057) 0:02:56.764 ******* ok: [managed-node2] => { "changed": false, "name": "firewalld", "status": { "ActiveEnterTimestamp": "Wed 2025-04-02 12:13:57 EDT", "ActiveEnterTimestampMonotonic": "326139129", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "basic.target sysinit.target polkit.service dbus.socket dbus.service system.slice", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Wed 2025-04-02 12:13:56 EDT", "AssertTimestampMonotonic": "325830531", "Before": "shutdown.target network-pre.target multi-user.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Wed 2025-04-02 12:13:56 EDT", "ConditionTimestampMonotonic": "325830530", "ConfigurationDirectoryMode": "0755", "Conflicts": "ebtables.service iptables.service ip6tables.service shutdown.target ipset.service nftables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "man:firewalld(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "12711", "ExecMainStartTimestamp": "Wed 2025-04-02 12:13:56 EDT", "ExecMainStartTimestampMonotonic": "325832688", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[Wed 2025-04-02 12:13:56 EDT] ; stop_time=[n/a] ; pid=12711 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Wed 2025-04-02 12:13:56 EDT", "InactiveExitTimestampMonotonic": "325832722", "InvocationID": "41b8036e16214fc68c244a42727639e9", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "12711", "MemoryAccounting": "yes", "MemoryCurrent": "40091648", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Wed 2025-04-02 12:13:57 EDT", "StateChangeTimestampMonotonic": "326139129", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogTimestamp": "Wed 2025-04-02 12:13:57 EDT", "WatchdogTimestampMonotonic": "326139126", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 Wednesday 02 April 2025 12:15:34 -0400 (0:00:00.508) 0:02:57.272 ******* ok: [managed-node2] => { "changed": false, "enabled": true, "name": "firewalld", "state": "started", "status": { "ActiveEnterTimestamp": "Wed 2025-04-02 12:13:57 EDT", "ActiveEnterTimestampMonotonic": "326139129", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "basic.target sysinit.target polkit.service dbus.socket dbus.service system.slice", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Wed 2025-04-02 12:13:56 EDT", "AssertTimestampMonotonic": "325830531", "Before": "shutdown.target network-pre.target multi-user.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Wed 2025-04-02 12:13:56 EDT", "ConditionTimestampMonotonic": "325830530", "ConfigurationDirectoryMode": "0755", "Conflicts": "ebtables.service iptables.service ip6tables.service shutdown.target ipset.service nftables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "man:firewalld(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "12711", "ExecMainStartTimestamp": "Wed 2025-04-02 12:13:56 EDT", "ExecMainStartTimestampMonotonic": "325832688", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[Wed 2025-04-02 12:13:56 EDT] ; stop_time=[n/a] ; pid=12711 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Wed 2025-04-02 12:13:56 EDT", "InactiveExitTimestampMonotonic": "325832722", "InvocationID": "41b8036e16214fc68c244a42727639e9", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "12711", "MemoryAccounting": "yes", "MemoryCurrent": "40091648", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Wed 2025-04-02 12:13:57 EDT", "StateChangeTimestampMonotonic": "326139129", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogTimestamp": "Wed 2025-04-02 12:13:57 EDT", "WatchdogTimestampMonotonic": "326139126", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34 Wednesday 02 April 2025 12:15:34 -0400 (0:00:00.553) 0:02:57.826 ******* ok: [managed-node2] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/libexec/platform-python", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43 Wednesday 02 April 2025 12:15:34 -0400 (0:00:00.087) 0:02:57.914 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55 Wednesday 02 April 2025 12:15:34 -0400 (0:00:00.086) 0:02:58.000 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Wednesday 02 April 2025 12:15:34 -0400 (0:00:00.065) 0:02:58.066 ******* ok: [managed-node2] => (item={'port': '15001-15003/tcp', 'state': 'enabled'}) => { "__firewall_changed": false, "ansible_loop_var": "item", "changed": false, "item": { "port": "15001-15003/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120 Wednesday 02 April 2025 12:15:35 -0400 (0:00:00.596) 0:02:58.662 ******* skipping: [managed-node2] => (item={'port': '15001-15003/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "'detailed' in fw[0]", "item": { "port": "15001-15003/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Wednesday 02 April 2025 12:15:35 -0400 (0:00:00.096) 0:02:58.758 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'detailed' in fw[0]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139 Wednesday 02 April 2025 12:15:35 -0400 (0:00:00.082) 0:02:58.841 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144 Wednesday 02 April 2025 12:15:35 -0400 (0:00:00.071) 0:02:58.913 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153 Wednesday 02 April 2025 12:15:35 -0400 (0:00:00.178) 0:02:59.091 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163 Wednesday 02 April 2025 12:15:36 -0400 (0:00:00.067) 0:02:59.159 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169 Wednesday 02 April 2025 12:15:36 -0400 (0:00:00.069) 0:02:59.228 ******* skipping: [managed-node2] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Wednesday 02 April 2025 12:15:36 -0400 (0:00:00.111) 0:02:59.340 ******* redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.seboolean to ansible.posix.seboolean TASK [fedora.linux_system_roles.selinux : Set ansible_facts required by role and install packages] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:2 Wednesday 02 April 2025 12:15:36 -0400 (0:00:00.245) 0:02:59.586 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml for managed-node2 TASK [fedora.linux_system_roles.selinux : Ensure ansible_facts used by role] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:2 Wednesday 02 April 2025 12:15:36 -0400 (0:00:00.128) 0:02:59.714 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Ensure SELinux packages] ************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:7 Wednesday 02 April 2025 12:15:36 -0400 (0:00:00.083) 0:02:59.798 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml for managed-node2 TASK [fedora.linux_system_roles.selinux : Check if system is ostree] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:5 Wednesday 02 April 2025 12:15:36 -0400 (0:00:00.221) 0:03:00.019 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set flag to indicate system is ostree] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:10 Wednesday 02 April 2025 12:15:36 -0400 (0:00:00.075) 0:03:00.094 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:17 Wednesday 02 April 2025 12:15:37 -0400 (0:00:00.075) 0:03:00.170 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set flag if transactional-update exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:22 Wednesday 02 April 2025 12:15:37 -0400 (0:00:00.075) 0:03:00.246 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux python2 tools] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:26 Wednesday 02 April 2025 12:15:37 -0400 (0:00:00.069) 0:03:00.315 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_python_version is version('3', '<')", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35 Wednesday 02 April 2025 12:15:37 -0400 (0:00:00.048) 0:03:00.364 ******* ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:46 Wednesday 02 April 2025 12:15:40 -0400 (0:00:02.881) 0:03:03.246 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_os_family == \"Suse\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux tool semanage] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 Wednesday 02 April 2025 12:15:40 -0400 (0:00:00.074) 0:03:03.320 ******* ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.selinux : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:72 Wednesday 02 April 2025 12:15:43 -0400 (0:00:02.904) 0:03:06.225 ******* skipping: [managed-node2] => { "false_condition": "__selinux_is_transactional | d(false)" } TASK [fedora.linux_system_roles.selinux : Reboot transactional update systems] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:77 Wednesday 02 April 2025 12:15:43 -0400 (0:00:00.079) 0:03:06.304 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Fail if reboot is needed and not set] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:82 Wednesday 02 April 2025 12:15:43 -0400 (0:00:00.161) 0:03:06.466 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Refresh facts] *********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:89 Wednesday 02 April 2025 12:15:43 -0400 (0:00:00.072) 0:03:06.538 ******* ok: [managed-node2] TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if enabled] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:5 Wednesday 02 April 2025 12:15:44 -0400 (0:00:00.818) 0:03:07.357 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_selinux.status == \"enabled\" and (selinux_state or selinux_policy)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if disabled] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:13 Wednesday 02 April 2025 12:15:44 -0400 (0:00:00.050) 0:03:07.408 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_selinux.status == \"disabled\" and selinux_state", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set selinux_reboot_required] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:21 Wednesday 02 April 2025 12:15:44 -0400 (0:00:00.045) 0:03:07.453 ******* ok: [managed-node2] => { "ansible_facts": { "selinux_reboot_required": false }, "changed": false } TASK [fedora.linux_system_roles.selinux : Fail if reboot is required] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:25 Wednesday 02 April 2025 12:15:44 -0400 (0:00:00.056) 0:03:07.509 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_reboot_required", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Warn if SELinux is disabled] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:30 Wednesday 02 April 2025 12:15:44 -0400 (0:00:00.048) 0:03:07.558 ******* skipping: [managed-node2] => { "false_condition": "ansible_selinux.status == \"disabled\"" } TASK [fedora.linux_system_roles.selinux : Drop all local modifications] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:35 Wednesday 02 April 2025 12:15:44 -0400 (0:00:00.044) 0:03:07.603 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_all_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux boolean local modifications] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:43 Wednesday 02 April 2025 12:15:44 -0400 (0:00:00.041) 0:03:07.644 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_booleans_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux file context local modifications] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:48 Wednesday 02 April 2025 12:15:44 -0400 (0:00:00.041) 0:03:07.685 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_fcontexts_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux port local modifications] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:53 Wednesday 02 April 2025 12:15:44 -0400 (0:00:00.040) 0:03:07.725 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_ports_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux login local modifications] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:58 Wednesday 02 April 2025 12:15:44 -0400 (0:00:00.118) 0:03:07.843 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_logins_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set SELinux booleans] **************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:63 Wednesday 02 April 2025 12:15:44 -0400 (0:00:00.059) 0:03:07.903 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set SELinux file contexts] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:74 Wednesday 02 April 2025 12:15:44 -0400 (0:00:00.062) 0:03:07.965 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set an SELinux label on a port] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:87 Wednesday 02 April 2025 12:15:44 -0400 (0:00:00.071) 0:03:08.037 ******* ok: [managed-node2] => (item={'ports': '15001-15003', 'setype': 'http_port_t'}) => { "__selinux_item": { "ports": "15001-15003", "setype": "http_port_t" }, "ansible_loop_var": "__selinux_item", "changed": false, "ports": [ "15001-15003" ], "proto": "tcp", "setype": "http_port_t", "state": "present" } TASK [fedora.linux_system_roles.selinux : Set linux user to SELinux user mapping] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:99 Wednesday 02 April 2025 12:15:45 -0400 (0:00:01.033) 0:03:09.071 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Get SELinux modules facts] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:112 Wednesday 02 April 2025 12:15:45 -0400 (0:00:00.060) 0:03:09.131 ******* ok: [managed-node2] => { "ansible_facts": { "selinux_checksums": true, "selinux_installed_modules": { "abrt": { "100": { "checksum": "sha256:13dad22da122be9f7d5df4dbedae6a515323542fdc1a7e466d7a1a3d36d29731", "enabled": 1 } }, "accountsd": { "100": { "checksum": "sha256:8bd25829d921be0b5adf92ddaca7ab94cedca1d57796749cfa63571b6550e3da", "enabled": 1 } }, "acct": { "100": { "checksum": "sha256:2699d826efd46176017695c768804c505a54b277b05f1feb9c43a613bab4e6aa", "enabled": 1 } }, "afs": { "100": { "checksum": "sha256:99920dd4e0855870f7e6f9666928d13fe18ddccca9d38b92ea70a6ce3c8c7539", "enabled": 1 } }, "aiccu": { "100": { "checksum": "sha256:a7aedc8354b4335412871adfd2ab5b0c6da1ea63c8dd797718e4214a5d511bb5", "enabled": 1 } }, "aide": { "100": { "checksum": "sha256:8adb5c3a5ed74695e975eecbf290640b179eb6345a7740745ecfe3164efe209f", "enabled": 1 } }, "ajaxterm": { "100": { "checksum": "sha256:d3a03c2837d5dde7145e27902ff8578e00734ab34e8ea1a45aee58b83e9ad6d1", "enabled": 1 } }, "alsa": { "100": { "checksum": "sha256:202f94345fba8f4bc942dc9b75bbb6eea3b4cb02411cf6ed79858d72aa883c89", "enabled": 1 } }, "amanda": { "100": { "checksum": "sha256:f9a99d97370017307349a154ce479969395bbbfe434e4829573269f770efdd0d", "enabled": 1 } }, "amtu": { "100": { "checksum": "sha256:bc9934a2ae61fa117614f201479966d788484f3a7382de4ebad99790a465e2b7", "enabled": 1 } }, "anaconda": { "100": { "checksum": "sha256:b8aabc624243533d483c3dd5574a490a43e7ec0f2f7940798c12b4089bbd0642", "enabled": 1 } }, "antivirus": { "100": { "checksum": "sha256:1de6460ccaea5a5749eba17489b9765035c8202eb9492485ff39157564001a2c", "enabled": 1 } }, "apache": { "100": { "checksum": "sha256:1a0c38364558bebdae3efaa1fcf8be232184dcddcaab345bba1c40bf239dd0ed", "enabled": 1 } }, "apcupsd": { "100": { "checksum": "sha256:175308edb201092c22791f419d32da3f661e7ccfb9c5d5855ad753405c10023b", "enabled": 1 } }, "apm": { "100": { "checksum": "sha256:a1410f65d6bf017caedaffaa59016877686099fb7df3c4d801136de79a61795e", "enabled": 1 } }, "application": { "100": { "checksum": "sha256:a8e9d90aa1188068ca66be55c4d8abf9982666171bbdd8d4da1f2a254c34a080", "enabled": 1 } }, "arpwatch": { "100": { "checksum": "sha256:2cb8afd237d6bc5693e5d54be1a455b6ed632fbbe76cea406163f9c48d00e79f", "enabled": 1 } }, "asterisk": { "100": { "checksum": "sha256:0b66b387174001e926cf1454c3516bb32d96610a0f598065fe6d7a917ca897fe", "enabled": 1 } }, "auditadm": { "100": { "checksum": "sha256:dcd9e7f5e71fb9f7aace30b5755efcbf85fe88f884d4253cc9abcad1c44e5f4d", "enabled": 1 } }, "authconfig": { "100": { "checksum": "sha256:bdb8072e463c84cb01e6933093428be2b6ee5299d82e26730b12dd2b66d89355", "enabled": 1 } }, "authlogin": { "100": { "checksum": "sha256:a89b04c7a40bb373de2bbb0a2210cca454e7d4a805321fbe65462ae5551db656", "enabled": 1 } }, "automount": { "100": { "checksum": "sha256:41ec4e0c5c46118cb4dfa8c8b1834f330dce4ffdea3d534a8d5007a63b3e5262", "enabled": 1 } }, "avahi": { "100": { "checksum": "sha256:7628cb8340258102798a6e36902d0210e2051ffb9fb4f7a1e4c62a612edfe6fa", "enabled": 1 } }, "awstats": { "100": { "checksum": "sha256:9b92e64a3331076ad443862aa2ba98a2c4d9b00638bf19bb9726f572dee5eff4", "enabled": 1 } }, "bacula": { "100": { "checksum": "sha256:32cedcc57f6a973ac5adc16d8df343fc1ca4b3716f7cdcdae0d2490a6e5765ac", "enabled": 1 } }, "base": { "100": { "checksum": "sha256:d99ed290beecf2b10a557a21b06b63cabc28dab4050f2e7197d2cb9e30519fd3", "enabled": 1 } }, "bcfg2": { "100": { "checksum": "sha256:ea510637d47b7fabc3f617f8a6f3ca3172bf9215c2d6b64ad19cd5d8819c8b6b", "enabled": 1 } }, "bind": { "100": { "checksum": "sha256:39520749f8aba46f975a87187975d8dcd014ad67d22515951f51fa3fd1b0478f", "enabled": 1 } }, "bitlbee": { "100": { "checksum": "sha256:bf04e481614825a35c26a547b19098ff1c8acd0d915c5b4f938b9fa595459d00", "enabled": 1 } }, "blkmapd": { "100": { "checksum": "sha256:ca870c95742bf987a2e739286cbcb998b58c091a422251fdd8de57228b28fd96", "enabled": 1 } }, "blueman": { "100": { "checksum": "sha256:7e4b2b3df3962273436b561c806c816fe4b1e5d6781efa33a7109b05f796edd7", "enabled": 1 } }, "bluetooth": { "100": { "checksum": "sha256:da457ef2ce595c3bf9f70697029ea90e96472ae562f685a7f919a7778a778d09", "enabled": 1 } }, "boinc": { "100": { "checksum": "sha256:d74bd3b6b3850c30b5bbf77822ab82b43f36600e4f76cd68674ef361328afb05", "enabled": 1 } }, "boltd": { "100": { "checksum": "sha256:4ccf41e247c5a7066042a0ebaae492805a1d640f777e8e771701f340a76bce30", "enabled": 1 } }, "bootloader": { "100": { "checksum": "sha256:46e55021d6c4cede091a992ab33521bb1aba4ca1d44879d778973b279204933c", "enabled": 1 } }, "brctl": { "100": { "checksum": "sha256:f9645adde2441e43369a255c6a194f01c6f5800347ad710ce3e147df884b98aa", "enabled": 1 } }, "brltty": { "100": { "checksum": "sha256:603734d4772f482f282eb217c03647f705d66de27fc927c64e02787369b0f78a", "enabled": 1 } }, "bugzilla": { "100": { "checksum": "sha256:326d2a188603c908cdae3c9dcdae6bda37b98ec4cc23f3b31878e2bbd0cd33b2", "enabled": 1 } }, "bumblebee": { "100": { "checksum": "sha256:e8ca8d5318a68243441fdb993fbab6d566f7462fd5557b55733f8ddbfcc4b276", "enabled": 1 } }, "cachefilesd": { "100": { "checksum": "sha256:86fe9c1aa8b2d7a6bdd9bd8d0c7a41a7ae0e4e14e32eaea6cb920367c2f495d7", "enabled": 1 } }, "calamaris": { "100": { "checksum": "sha256:1069377693a5d730d57e4ddd6f73ce20b67b595aae90a16459e852d238163b48", "enabled": 1 } }, "callweaver": { "100": { "checksum": "sha256:880b626c3d04c5669d64ee617ee36a18566e91adeaac67b9527b0a795543575e", "enabled": 1 } }, "canna": { "100": { "checksum": "sha256:b9256764ca5e34142e8cffea57fafc2fa66f78dc8c05761f97fa9becd1d77311", "enabled": 1 } }, "ccs": { "100": { "checksum": "sha256:ad293ee5e252966d14fa6bf09240f143460df4b928672a33a398a5793777c4e4", "enabled": 1 } }, "cdrecord": { "100": { "checksum": "sha256:dda8d62c3bf2503ff9762bd031c35a76cac8059d08592fe23e4d3fe11b0ac8cc", "enabled": 1 } }, "certmaster": { "100": { "checksum": "sha256:b431dd84f2c6b971bc573674fa6c4ee2fedf910b0123ba5d9acb5011c208fd72", "enabled": 1 } }, "certmonger": { "100": { "checksum": "sha256:965ec65dfc98cbabce2350bd52fa7ce92c2f4ab4704348f1555f2a3d9edfd1b8", "enabled": 1 } }, "certwatch": { "100": { "checksum": "sha256:77f0299f67e43927eacb553d1002beeebc3098b4bee64d8dc3dadb8fd23fbb5c", "enabled": 1 } }, "cfengine": { "100": { "checksum": "sha256:c78b908838f1d64ee9ebb0a51b7fa438527716936471a573e1b4b7c393bd6b8d", "enabled": 1 } }, "cgdcbxd": { "100": { "checksum": "sha256:5d3633e0b77db69721e4f64167d7e5f7779c3e5fa76e095d25f8467f2a0bdfec", "enabled": 1 } }, "cgroup": { "100": { "checksum": "sha256:9368c6c54bd5ec6f20e4c3b47c86e60af07346c4e86e525b6bd7288b54b7e774", "enabled": 1 } }, "chrome": { "100": { "checksum": "sha256:d31ce9d2fe78cafcd5e3c8decf22ae1e9ea6f74026ca65b6320afe9a33cd609a", "enabled": 1 } }, "chronyd": { "100": { "checksum": "sha256:7d9624729861397cf7720c2324c65489a3d30485e6a884ab1ff9a8ca22efa678", "enabled": 1 } }, "cinder": { "100": { "checksum": "sha256:fc169721c78f5b0857ed8312e59ba4c134b685c4c322dae242b92e815e35e6fb", "enabled": 1 } }, "cipe": { "100": { "checksum": "sha256:02c20398b9eff51ed431b7ad739a6015d2451b4bf6e3e5da380606d85a77852c", "enabled": 1 } }, "clock": { "100": { "checksum": "sha256:4f90655d2243cfc32ea7436a953cccb8a34af895f83361235a3a5cda40dbc75f", "enabled": 1 } }, "clogd": { "100": { "checksum": "sha256:ba78a422a10b65591c48cb038f8a55614944163f3140275852d293fb0c548bfa", "enabled": 1 } }, "cloudform": { "100": { "checksum": "sha256:481f5fbc7810a5a81851edbe5a6b124141257f5fbbb83d8830ae0a34924ed3d9", "enabled": 1 } }, "cmirrord": { "100": { "checksum": "sha256:8f8fb986f15b8b7c5c250d250fdbbb2f78874e13394105c9c486488a16e94c91", "enabled": 1 } }, "cobbler": { "100": { "checksum": "sha256:e0e264b9cc83962dbbb27c152a72f01c6a355467c4e845b52e65c8b88d8d75d6", "enabled": 1 } }, "cockpit": { "100": { "checksum": "sha256:cb7fccd94903a6e256a586d758085f6f59c0f8b1c5b4cb99536915526d2224ec", "enabled": 1 } }, "collectd": { "100": { "checksum": "sha256:7f08e2e248d33162dc9b237c37ed3a3dba0511bbcc71d87482e95093fb8c6456", "enabled": 1 } }, "colord": { "100": { "checksum": "sha256:86e58c9f12c519a2c3b090b64a276722374054ea900c775b2f8ab4ef2867dcf0", "enabled": 1 } }, "comsat": { "100": { "checksum": "sha256:1d57ffaad6b96e3ca8ac82c23b52d58d81e1f69f5d54a648a16da8ffa8070e53", "enabled": 1 } }, "condor": { "100": { "checksum": "sha256:dbc3f2f0c12f9aeed14056fd7e7c46a4ecab3569198f891643172cd032f3fc00", "enabled": 1 } }, "conman": { "100": { "checksum": "sha256:1270caf15af248a487cd5ce728daae2699ffd9139823c805ec49213ab1c835cb", "enabled": 1 } }, "conntrackd": { "100": { "checksum": "sha256:56fd7d7a550dbc4188b93afd0fde8c706623b3a5d26db265ee016967ba4ddfee", "enabled": 1 } }, "consolekit": { "100": { "checksum": "sha256:5bd7a7acc191766583d933b04321e64657138959bf40a4d2986b013b942c4ba8", "enabled": 1 } }, "container": { "200": { "checksum": "sha256:301be7dafa07cdc68b4e5ade7e1a07017fab3efd85986bdfab7faa9466a95836", "enabled": 1 } }, "couchdb": { "100": { "checksum": "sha256:12b2e3e7314bda4e76d3883901e6470927e85343f742fb44b174ce968f1ad8b5", "enabled": 1 } }, "courier": { "100": { "checksum": "sha256:40ae5f173004741838002644e5bff73cf16f2f3a1928c45fa17674f9a0df5148", "enabled": 1 } }, "cpucontrol": { "100": { "checksum": "sha256:1485a6d64d00619898d2789d27391f2a57a7fb1f0e8c73daf59baca8641564a3", "enabled": 1 } }, "cpufreqselector": { "100": { "checksum": "sha256:687564eb09acf3e7f1475fe2a133941c36999bd037aa8a794feea2d9f2c26385", "enabled": 1 } }, "cpuplug": { "100": { "checksum": "sha256:c16e376ff6c51da1911e68a8a7d42f5730eda45febfd0875c78cac4b9cf6e78c", "enabled": 1 } }, "cron": { "100": { "checksum": "sha256:6be0252b3c6bcbfb4c51dfd3ae1ae262f5de153234917ac4d342b18ae0292060", "enabled": 1 } }, "ctdb": { "100": { "checksum": "sha256:06dd65a4361bf8076c14b322dd30003295c0b9d75bf1ae610961b13a1f9431da", "enabled": 1 } }, "cups": { "100": { "checksum": "sha256:3d5e5bbf131d98d95f7f1431893eb137bd833dbfd8469f9c386d72bb4e8f9b9a", "enabled": 1 } }, "cvs": { "100": { "checksum": "sha256:bbc8d76cc8609849d5b078c5b2ac7364470a06d77d67b97d5f58429d7b679e33", "enabled": 1 } }, "cyphesis": { "100": { "checksum": "sha256:b1a41211ae3cf69b819df517eccd0fda2088c27685dad68de64531b9794ec518", "enabled": 1 } }, "cyrus": { "100": { "checksum": "sha256:60defb1f6feeb1d607734c4912e52e03bf5b0c27cb6f31a37fa7e05f3497b323", "enabled": 1 } }, "daemontools": { "100": { "checksum": "sha256:1034e2442c975dd2ccf84791b1a826d02032f13762d57c3485e51e2b9a7dc03f", "enabled": 1 } }, "dbadm": { "100": { "checksum": "sha256:40306590ef444152ae18b65040d85442c14853a9cc4c31b0224c4d19517d66ea", "enabled": 1 } }, "dbskk": { "100": { "checksum": "sha256:24559eff82b251f9814ae88c36a7cbacda1ed419a80145aef545306e88cb0da8", "enabled": 1 } }, "dbus": { "100": { "checksum": "sha256:50ea4eb05a06315449092c939e2307436ac6461e47ca69f0d42cc4e321e86280", "enabled": 1 } }, "dcc": { "100": { "checksum": "sha256:06e414b0a83b49968f62018cecde48dcfe68b2e9d699915367b3e04461188a0d", "enabled": 1 } }, "ddclient": { "100": { "checksum": "sha256:73ca2525a14e3161524f6e8fc0d016430a536002f1cb3833db1334670b458436", "enabled": 1 } }, "denyhosts": { "100": { "checksum": "sha256:1bd00b13b9bda18274a771d66f7cba8fe62e5e95ea8f51415da6b1fa1336df1b", "enabled": 1 } }, "devicekit": { "100": { "checksum": "sha256:03b01b781881cc60438bc357bd60596970b8ac019b415969bca8a08358fcbfd1", "enabled": 1 } }, "dhcp": { "100": { "checksum": "sha256:2ad95a78468f7f4ea9a8c044c79c0a4ca9924b41432390ea2863a85c806c9a00", "enabled": 1 } }, "dictd": { "100": { "checksum": "sha256:c30c819f142d3c719d0ec5741af5a65161770ff140097fe63f7559d55b897500", "enabled": 1 } }, "dirsrv": { "100": { "checksum": "sha256:50efdc68200d27ce1a5db99a780aa7b0e84988669961d436d348c7bb8310d181", "enabled": 1 } }, "dirsrv-admin": { "100": { "checksum": "sha256:8d9234157484f6ae8ba22039b44fa19f4de8137be9321e5da393d72d85d89487", "enabled": 1 } }, "dmesg": { "100": { "checksum": "sha256:8b834312a2cb99ab89862f839a1315e78794dd92758785f84c9559285dfbe679", "enabled": 1 } }, "dmidecode": { "100": { "checksum": "sha256:2c7fb8c6c52f385b819713f0444a96cfd4e65b7dcb3ca79b932cc12ad9ce903d", "enabled": 1 } }, "dnsmasq": { "100": { "checksum": "sha256:44f66c5d4f635600ee9d0ba3fdea3896218f1420b5ead89e0f22d71a447f9e97", "enabled": 1 } }, "dnssec": { "100": { "checksum": "sha256:49427a9e92b87db77706e2b81ece254c99d3cd6ba020211e2afae65fab7ad066", "enabled": 1 } }, "dovecot": { "100": { "checksum": "sha256:cc8c3a2ee0233a7f1fdf38837b72ce5fd15efef782a36ab4b9aa2ec339b46fa6", "enabled": 1 } }, "drbd": { "100": { "checksum": "sha256:b66be23c1ded4e548e5369b744c7c2a4dfd7065582517525221177ca67657525", "enabled": 1 } }, "dspam": { "100": { "checksum": "sha256:5dd7221ba40e9b912367289fed8ca116c14da4fb8bd7f28f421c4008855bb9fc", "enabled": 1 } }, "entropyd": { "100": { "checksum": "sha256:0f68aeeb1da72efb8c9b1bb7db0a4180b6938672b16f33d1abcd65f5481d85a9", "enabled": 1 } }, "exim": { "100": { "checksum": "sha256:f4c4473ee49394e0e4629023772464a046c476f92b4a727acdf9f6c92711b952", "enabled": 1 } }, "fail2ban": { "100": { "checksum": "sha256:2383cb88b81bc5d87be9f3201a42da526532c4ea8e6d3b3f5023005c0ddf6f17", "enabled": 1 } }, "fcoe": { "100": { "checksum": "sha256:913e66ac5f5ce364e5ea556acfbf77845c25a4beb5ee64599613aa00127c1492", "enabled": 1 } }, "fetchmail": { "100": { "checksum": "sha256:63f00993bae4285eff5e993d208ea786785c4331e6947b3a48a97d31145b2e98", "enabled": 1 } }, "finger": { "100": { "checksum": "sha256:16c506d472b007f7d36850810ca0fcfd9482d30ce9c0ba790174b78294fd1d74", "enabled": 1 } }, "firewalld": { "100": { "checksum": "sha256:bbf58446f30b93de19e5a19087ee012f8e347fef5e7e8012e64b31a0ec21ab09", "enabled": 1 } }, "firewallgui": { "100": { "checksum": "sha256:b61ff17eee03141c9c7bd79d63331ecea733cba4b5b43b87d5141a40cdccdd69", "enabled": 1 } }, "firstboot": { "100": { "checksum": "sha256:c5540b8385c84075dd657e390d77ae886aa9d74b65444b9aa1d858f375819a8c", "enabled": 1 } }, "fprintd": { "100": { "checksum": "sha256:c1ffb7734a0359a7390830d9c6477ab61c45fc026368bfd5e2246523a6439464", "enabled": 1 } }, "freeipmi": { "100": { "checksum": "sha256:9af2291d75a2d643f53ff7a98bcabf22effb617329178efea45372d714825de1", "enabled": 1 } }, "freqset": { "100": { "checksum": "sha256:28bf77389f3e41743b30727a891609172a891466e92c28a919f43e628cc23a4d", "enabled": 1 } }, "fstools": { "100": { "checksum": "sha256:140caf542903419ee2471fd99ab06aa45899c400402c2580b395b182f24bd225", "enabled": 1 } }, "ftp": { "100": { "checksum": "sha256:7e8456fdf7807b30e1c257e568ba10305696cf5abdebc70988c288079884d46b", "enabled": 1 } }, "fwupd": { "100": { "checksum": "sha256:1dd6a45b73c7ce77a87af1e87354ada5aa5b2841aaaa045a6b4ae3c4d09f0f8b", "enabled": 1 } }, "games": { "100": { "checksum": "sha256:950d8be99d5349a3d893ba601c518e6b2af0d56c5b55514a45dbd8a3c61c9ecc", "enabled": 1 } }, "gdomap": { "100": { "checksum": "sha256:5040cb99d007fe9368bd37a9a6bf82f891c220ef652443896a0f2f6ca6f818e1", "enabled": 1 } }, "geoclue": { "100": { "checksum": "sha256:f0155b43152b6b4b850d1c4fb7daf16fd77992313b8be314ddb4901314bf913d", "enabled": 1 } }, "getty": { "100": { "checksum": "sha256:a60d07665b0ebd25fd54a9d82dad5eb7acbc11a2842dba56d7b9524d26ce14ce", "enabled": 1 } }, "git": { "100": { "checksum": "sha256:5eaccf209092db49c9a48d84e1387c1de76cb153c774c0bd615c001afab28664", "enabled": 1 } }, "gitosis": { "100": { "checksum": "sha256:b522382b64f36cf387cd892b45e916c861bd0a09697bc983eb55b53b0efd3081", "enabled": 1 } }, "glance": { "100": { "checksum": "sha256:2c51d19fca6ee40e137245ecb425edc77666d75c42ba583bf74cf13f10ace055", "enabled": 1 } }, "gnome": { "100": { "checksum": "sha256:420b9cefa6bdb542f6da10de7b36704a91509cf64cd2497e5693a858cfca5e41", "enabled": 1 } }, "gpg": { "100": { "checksum": "sha256:f821aa6ca5837a2d2de8180e74c267f68da951960c989fb13ebde5833c93738e", "enabled": 1 } }, "gpm": { "100": { "checksum": "sha256:bf30c4945be0065672fb47f70ad251b1079ada339f61f2679293cb0226d0d57a", "enabled": 1 } }, "gpsd": { "100": { "checksum": "sha256:5373b2332959d6c41c32160018274ab61e3f1abd0f0a5cc2302c45b141a39a9b", "enabled": 1 } }, "gssproxy": { "100": { "checksum": "sha256:7528c47be91a81ac19f2f54458309baeb0a232d83a1ccb2bd89fbc8cefb1ddc8", "enabled": 1 } }, "guest": { "100": { "checksum": "sha256:91f43e4d5ae283f0aa13c49efea93293dbdecd2b2f4f75db89371eda65b7523e", "enabled": 1 } }, "hddtemp": { "100": { "checksum": "sha256:f170e1da6acae4fd7108d22c8cf262916e034f0d3edbdebf3265a922a5355373", "enabled": 1 } }, "hostapd": { "100": { "checksum": "sha256:8b15f72328885c08bfda38082a62feeaa2c6692223a4d2bd1a572820d454a742", "enabled": 1 } }, "hostname": { "100": { "checksum": "sha256:e9fc1c4032c0346f751e1ef8ad1b3fe3425401b70a6c4354d4485472288e0bc5", "enabled": 1 } }, "hsqldb": { "100": { "checksum": "sha256:f70b198e5a5157722b69dc89109c4074a475e1085356cc610cc9b700567c154d", "enabled": 1 } }, "hwloc": { "100": { "checksum": "sha256:370e9eea2b927a2715018b667e9a56ad09af301a90811cd9b041da79f5384b38", "enabled": 1 } }, "hypervkvp": { "100": { "checksum": "sha256:b54ce6f4960a02d35e19d60bf8a07f7866514893e3193a5f4822c8580a46caa4", "enabled": 1 } }, "ibacm": { "100": { "checksum": "sha256:663b35f3874583ae074924bc068a8dc4c7c144adb60007da6103d1e3505ee37a", "enabled": 1 } }, "icecast": { "100": { "checksum": "sha256:dedaddef1d7447d25a1e7ff01e60e4545606e556c6770bd3fa94d9331de7a5d7", "enabled": 1 } }, "inetd": { "100": { "checksum": "sha256:ae408578a7160f2feae10269365558c43d9570b392642a92cc20f8ad47c58cce", "enabled": 1 } }, "init": { "100": { "checksum": "sha256:7ff95566a4f2bdb8ca3ec67acdade39e35fdabc57c2f00b989bab3f699f997f8", "enabled": 1 } }, "inn": { "100": { "checksum": "sha256:9ad99284192a443aa582e73b46667388b7a219dafae8dfce71a58a82bbae2f6c", "enabled": 1 } }, "insights_client": { "100": { "checksum": "sha256:0e41289d8dce065dcd41fd6cc1e1282efd4a58e7f9e3a2f1abc32f520fbbcc1e", "enabled": 1 } }, "iodine": { "100": { "checksum": "sha256:32501ab66def044fbc340cb5c656d5743c738bbd6fca5626c36c687419cd8d32", "enabled": 1 } }, "iotop": { "100": { "checksum": "sha256:d15656cd91a4e4e178a13f7cf910cfc552cc30db881a11ec88833f947edb4561", "enabled": 1 } }, "ipmievd": { "100": { "checksum": "sha256:d34fe186922c0e5726ca361343ec3846833ec3e4ab9b019b3d7bac1337383a16", "enabled": 1 } }, "ipsec": { "100": { "checksum": "sha256:d36c66c2c79d338c61c90d4136433e1e3a73435e920eb36d70682dfd5e147e59", "enabled": 1 } }, "iptables": { "100": { "checksum": "sha256:5a674017cc648e3262757464e5413503154cc1f593da545ce2c4f946991012bc", "enabled": 1 } }, "irc": { "100": { "checksum": "sha256:d72428ccbff5521367e00699c142bba64b2bbd44fed35deb29f9530cc0448378", "enabled": 1 } }, "irqbalance": { "100": { "checksum": "sha256:15650b2f39ccdfbcb1e4e867a35fce3c2768097e611e0c8ad9cb79ae6c66dd58", "enabled": 1 } }, "iscsi": { "100": { "checksum": "sha256:ccb27142f793095c79f531aae924baaeee5914c84228a09c09b9eca839f3524e", "enabled": 1 } }, "isns": { "100": { "checksum": "sha256:90b42f610fa328cdfb98bd0450bd052566f203e51e4a913dd6faded6da7fbe2c", "enabled": 1 } }, "jabber": { "100": { "checksum": "sha256:5ad49d140265305dc72781a6826d1de4614a33f83bd512acdc2263038ad41206", "enabled": 1 } }, "jetty": { "100": { "checksum": "sha256:d910afd1bfe836543ded50974dc24ae7bd5fd2609d6a9b2403316dffcd39832d", "enabled": 1 } }, "jockey": { "100": { "checksum": "sha256:d9a67ce1976ed2e79826d25f33dcb0b0bbde6c090600b605bbaaae45856d12f6", "enabled": 1 } }, "journalctl": { "100": { "checksum": "sha256:9ddb71271d0dbe5cede6179c0ca263e297dc6b65197bde2f7b14ce71f8dde369", "enabled": 1 } }, "kdbus": { "100": { "checksum": "sha256:5969c78be4a03cc91e426bc19b13c5188b5bf8ac11f5e2c21c098c3d68a7e3e3", "enabled": 1 } }, "kdump": { "100": { "checksum": "sha256:fdde3852d1decda649133c6345680f9353b86a6da2a98a83a8be101c9c25f103", "enabled": 1 } }, "kdumpgui": { "100": { "checksum": "sha256:66c67280c70a9b897b0f952067438e0eee05f9f48913508b38d745ef88747f32", "enabled": 1 } }, "keepalived": { "100": { "checksum": "sha256:c1177567c7bf67bb2d0de17760cecf56e0bb34f50d6fe060dec64ae97a76ecdb", "enabled": 1 } }, "kerberos": { "100": { "checksum": "sha256:826fbe83705494e009b242b88857c425eacba49aadae506ffa2012c80e60f7ae", "enabled": 1 } }, "keyboardd": { "100": { "checksum": "sha256:f199811d9ddc8db83864a09c543567fcb2f117b3241967b092bff7c9fdbfbfb6", "enabled": 1 } }, "keystone": { "100": { "checksum": "sha256:b0a7227a870ea987035e0cd524ad956a68287d0a67dd7135de41c6d5977ff4c2", "enabled": 1 } }, "kismet": { "100": { "checksum": "sha256:488fb5fd17cf1f630f3e48a853da05f86c06fc58219dc2ae59251865734bf800", "enabled": 1 } }, "kmscon": { "100": { "checksum": "sha256:d64019b11b6a37f6cdc5579d56eb1e19b6a7231501e1cfe2a838d26a2eac6033", "enabled": 1 } }, "kpatch": { "100": { "checksum": "sha256:00070d71dfe2632491305387ffb264127dca4387425015e4cb013d6bce5f95c3", "enabled": 1 } }, "ksmtuned": { "100": { "checksum": "sha256:891f082452240ad2e726bad777ea787d0f0f8695cc2a75f7439a2badda030d24", "enabled": 1 } }, "ktalk": { "100": { "checksum": "sha256:2df6f3dbad4a513ee1c113e496e8d2f5a19f56015f4a21e7478f2f5b53f36359", "enabled": 1 } }, "l2tp": { "100": { "checksum": "sha256:8e4cb0b0e0d1293d669de0b0e50f68d6d6fbe8e8d830a236a1c0e676f2326fb2", "enabled": 1 } }, "ldap": { "100": { "checksum": "sha256:d0177bb5873d0e6f9595020a8f39ba06b19e4636ea610175a3afef4aec2719cb", "enabled": 1 } }, "libraries": { "100": { "checksum": "sha256:6d5f128f2d4fd9137a7c70d0d024703547796a71f70017b3550a31d3450e0435", "enabled": 1 } }, "likewise": { "100": { "checksum": "sha256:e7eebd050230b358b43435d37ce308c3ba15e2516f4045abf7d26f03ebfbc11c", "enabled": 1 } }, "linuxptp": { "100": { "checksum": "sha256:4132cd51913a3044e453ed0b972db2ef511fdc7b2a1b592d1070177651066ab9", "enabled": 1 } }, "lircd": { "100": { "checksum": "sha256:cc81b79d2834e58bef7928f525c1a1eee5547e81d195444b3bc2741e396ae46b", "enabled": 1 } }, "livecd": { "100": { "checksum": "sha256:805c7bc4ded621b44ecf333d558328e115bba652fcbc91f436cefc948497688e", "enabled": 1 } }, "lldpad": { "100": { "checksum": "sha256:358c4b262655cffbf20f7484aedb22f094509f44d52a1fa3efe3edeafd99317e", "enabled": 1 } }, "loadkeys": { "100": { "checksum": "sha256:26f9e78406ecdc968ed670b32db1d10805e66875631558f092f08a6e1f2170dc", "enabled": 1 } }, "locallogin": { "100": { "checksum": "sha256:e07d92775ed25e7a3627bf977452844c67acf473b33075475f433f8be76dd755", "enabled": 1 } }, "lockdev": { "100": { "checksum": "sha256:1f946da2054cc1693209749df12ff01ab6456247d6225733aebb3a7d70a46e20", "enabled": 1 } }, "logadm": { "100": { "checksum": "sha256:70546c4b3d01f15bc7a69747dbb12fc6bcef5d899f6301f62c0c612c7069082a", "enabled": 1 } }, "logging": { "100": { "checksum": "sha256:656067c78ff1246a1a758a213d44307f91cb79336fe74a47015af425e58266fc", "enabled": 1 } }, "logrotate": { "100": { "checksum": "sha256:76cc40f1943fe21959793499bffaf35d0fe53ffc3f6c5a8b31eb96e738a286c2", "enabled": 1 } }, "logwatch": { "100": { "checksum": "sha256:cf4450b03e28762040c29f2a28af238cd4905d1c6bd4c73d656b266c7b9a8a6c", "enabled": 1 } }, "lpd": { "100": { "checksum": "sha256:9358dc35659b9570d3e8119a088b2693d7de505ea25996dc139517a857888857", "enabled": 1 } }, "lsm": { "100": { "checksum": "sha256:1247dc4bccfbc9ee42292db4415b21ae00bdef3dc2faeb267f045413da4a1b1b", "enabled": 1 } }, "lttng-tools": { "100": { "checksum": "sha256:79e4a2224ede13cd5f2c0e6e7c61e83efabaf1d05b86f6f7a710599bfc48edaf", "enabled": 1 } }, "lvm": { "100": { "checksum": "sha256:f56137657dd61a1a8a8844d5d1db01fc03330d17e05457d03f64756b344c32ef", "enabled": 1 } }, "mailman": { "100": { "checksum": "sha256:e47811cf3bd8204eaa02c4aab92f3d426f0a3ef97161e1579845d1e03df1fc1d", "enabled": 1 } }, "mailscanner": { "100": { "checksum": "sha256:8d447072ab5005ead27f1cb4d96dcbedf09a11182f660c6f59c6d56fd81235d8", "enabled": 1 } }, "man2html": { "100": { "checksum": "sha256:224584babd9e83c242d54fd8c5cd03379b0556005268aac22b15734b913f12e6", "enabled": 1 } }, "mandb": { "100": { "checksum": "sha256:ae44b8ec7a90ebbc45fdafe89663197b36e47120ad90eb22b475939055ea6924", "enabled": 1 } }, "mcelog": { "100": { "checksum": "sha256:c5d98ec368b145c74b4bf0ea8da3980b17af0c2d00654c5a6973241625f97b12", "enabled": 1 } }, "mediawiki": { "100": { "checksum": "sha256:43f1c6f7cfdeaa26891824167cf637a8670785c2674b45d85ce4a7ac77190a36", "enabled": 1 } }, "memcached": { "100": { "checksum": "sha256:f0f9c7367e9bd196aa463916bd5aab02f6966dad9564a0f2fd070bb2e8410aeb", "enabled": 1 } }, "milter": { "100": { "checksum": "sha256:db190bacd2b84a29971cd1940cd15d606abbfded5c9b956894717afd91fc7a0d", "enabled": 1 } }, "minidlna": { "100": { "checksum": "sha256:0d6ac660d641c1cf707a814ed08e19b9e21547a3eaa7134cab84dbc5fee6b5b2", "enabled": 1 } }, "minissdpd": { "100": { "checksum": "sha256:dd2ab85bcba6d204f9dbc7304e8a4940e5d1733d4b9cf4fcb0f4072982c585c3", "enabled": 1 } }, "mip6d": { "100": { "checksum": "sha256:406edf2c78ba0e692d5a78f3c5ca8d641d00131b143332adeaad9f325959683a", "enabled": 1 } }, "mirrormanager": { "100": { "checksum": "sha256:7084de59beaaaf4f630357ec53beff8d0a0ee532ac180fe58e23bfe98f1fdaee", "enabled": 1 } }, "miscfiles": { "100": { "checksum": "sha256:7e7e87e302bf847a4c59d69e5af60729e61bada0cc5d6ec17a25a6514476cb48", "enabled": 1 } }, "mock": { "100": { "checksum": "sha256:ae352eccf2f2c9ee8f0d9635517d9ae3c9bba83c617deca8f989e2aae8dd35fa", "enabled": 1 } }, "modemmanager": { "100": { "checksum": "sha256:84a60147d2b0121ff6ede6199583cdb5619480d015b2a675c6a0569f91c12d66", "enabled": 1 } }, "modutils": { "100": { "checksum": "sha256:67c3914aeb25e38fc6bd0793fddc41122dba1547d54e91a78065545fea3b9c87", "enabled": 1 } }, "mojomojo": { "100": { "checksum": "sha256:6030afcea9f8d46f25dd7785737edd25eb0f1e50b76eafe4d9103196b722d47e", "enabled": 1 } }, "mon_statd": { "100": { "checksum": "sha256:6ba3a594d01a11bc32e7cb554f7386314b5089eb4416fb776edb552a7d53c41d", "enabled": 1 } }, "mongodb": { "100": { "checksum": "sha256:1b2d30558bec7fc08d1d388ae2bb0becd2233c99c9fb173fd00809786ce5eed9", "enabled": 1 } }, "motion": { "100": { "checksum": "sha256:346e172be35df168eb0e4fbc8e176b0fda87de9bc5787f7a5ab7667cfe1e3c3b", "enabled": 1 } }, "mount": { "100": { "checksum": "sha256:f66c53d993dcd47ea1ff3d797f8fd69fb8161a4ff8a59f54f66a2de9462a55a7", "enabled": 1 } }, "mozilla": { "100": { "checksum": "sha256:7696dbb77c54531cf2574c7ede9f085cf64611dcf7a612530dce2de19f7a8b9f", "enabled": 1 } }, "mpd": { "100": { "checksum": "sha256:0f67c18c9101b53f57ef857a74d6044701e1d2c347f829a03c0579c545fdbef3", "enabled": 1 } }, "mplayer": { "100": { "checksum": "sha256:f82c0a72506f1011e47ba98e51d5edf906f58fc190d797f5d1a0b8e5cc7d0762", "enabled": 1 } }, "mrtg": { "100": { "checksum": "sha256:afcd9267261b334900420461279b8555fdb4bd783af880fa4606d8afc65e0712", "enabled": 1 } }, "mta": { "100": { "checksum": "sha256:b0f9753424c504a288f55d495105f6d475d69287b718190ae5192cf7d6ddfde6", "enabled": 1 } }, "munin": { "100": { "checksum": "sha256:29f87ec15fa19e975c83288d55e56bab64855a24c4d8826fe4138eda9a46cc97", "enabled": 1 } }, "mysql": { "100": { "checksum": "sha256:b028af8f4e726feb8c26037f7c6d6f97383977bd5ee6141ab4e8e1d096d6481f", "enabled": 1 } }, "mythtv": { "100": { "checksum": "sha256:e025b2dbf50901632da0ee2aa658105a322275eb120d782cbbf25f2895231154", "enabled": 1 } }, "naemon": { "100": { "checksum": "sha256:a19b3b0540dc52d9506ca7e5d804c2fe9115b3ea28bfd9273030e841e12eb277", "enabled": 1 } }, "nagios": { "100": { "checksum": "sha256:39ca80027ac8585f368bcd57f555ba87bf409f7b7d6c4292c09fd06cc1691c80", "enabled": 1 } }, "namespace": { "100": { "checksum": "sha256:ef73850f29b4ff4ff904d506d545bf366fd1e7c2ba82a7a7c9a4513e3eee45d9", "enabled": 1 } }, "ncftool": { "100": { "checksum": "sha256:2c9356101a9ddbec94afdd12ca669ba93a1d422c302f9e17b78b18670617d2a1", "enabled": 1 } }, "netlabel": { "100": { "checksum": "sha256:9a32ce04c1dd8e120588c15b3057f838bedce8f14c91576b667295d47800e0ad", "enabled": 1 } }, "netutils": { "100": { "checksum": "sha256:5e0a20ae09b00fac69ee30a0d55ff73fa692d8350c9c0b0343af61e4f0dd654f", "enabled": 1 } }, "networkmanager": { "100": { "checksum": "sha256:9c67b21155929e43e4efd3fc81a85fddc9f1030b47ee4a275789014c1311b972", "enabled": 1 } }, "ninfod": { "100": { "checksum": "sha256:85cac2885d75522eb07189efcc3feeb7775fc6daf5cf3f1a28a1fd2109fe148c", "enabled": 1 } }, "nis": { "100": { "checksum": "sha256:b5b133d60b98068eb9480c54285050ae9b49d2fb309eac8994cc91c865ee02d4", "enabled": 1 } }, "nova": { "100": { "checksum": "sha256:59919a89d30a5d4b60d6971fa636fb62605d59d214ec614adc279f6cbe2c2b27", "enabled": 1 } }, "nscd": { "100": { "checksum": "sha256:578bc975477539c659f3608b1445a0c7a9bc7c3f2dcf65b3e55f3a3af89ea564", "enabled": 1 } }, "nsd": { "100": { "checksum": "sha256:d5b03cdc6c8bbc222b8e3d30680b1a7d2d1a49837e7d509aafcf6b2a3a32195b", "enabled": 1 } }, "nslcd": { "100": { "checksum": "sha256:18b003071f4c36307616f7d5de8cff6d4e376af31cb96ce1a5ad6ae3011dfd09", "enabled": 1 } }, "ntop": { "100": { "checksum": "sha256:f942c7fbe636b9d60327ef9dade1120340c16a2992a6b50db5fbaecd44ffd63d", "enabled": 1 } }, "ntp": { "100": { "checksum": "sha256:686664a71e74b0edd643ab9d556b1aab092fa707935da5ea928a66f54a3c84e0", "enabled": 1 } }, "numad": { "100": { "checksum": "sha256:dabc5ce6244d0b0939e9a07bd6bc232e8b666529a0b7b29527e586db8224862c", "enabled": 1 } }, "nut": { "100": { "checksum": "sha256:653e708dec531e483992b25944a689ec9369478d039a5ec62c98294ab73ce8c4", "enabled": 1 } }, "nx": { "100": { "checksum": "sha256:4ae55fe839abaaf0ea52b79a5c8f6a906575b83cca29532c2dd52337fb3d5790", "enabled": 1 } }, "obex": { "100": { "checksum": "sha256:7b2c87e864b6008f734e1effa48cee1399f41843b9d80d3fd95fbd19e058598f", "enabled": 1 } }, "oddjob": { "100": { "checksum": "sha256:9de0b544b2373ea0f1b7217f9179898479dbff0da36ea9857783de57d06585cf", "enabled": 1 } }, "opafm": { "100": { "checksum": "sha256:761bf911674d23053eceabbbda8da16c73af5f300929a33a64513dc6e3b2d0af", "enabled": 1 } }, "openct": { "100": { "checksum": "sha256:5674f8e8c975570649e3065460786cb4521a86370bffef5a9de18c69813fe68e", "enabled": 1 } }, "opendnssec": { "100": { "checksum": "sha256:bdef6dbb24ae22548634759ac823a8c3e21fde6368cfdfd742480f7027e63ddd", "enabled": 1 } }, "openfortivpn": { "100": { "checksum": "sha256:1a1bff55993510cb6481383b299e1f1a6349ec76e4947bfc8c5b1347e4d30bf4", "enabled": 1 } }, "openhpid": { "100": { "checksum": "sha256:ad3f3f3ba4442930560b291c022e674e6a50e4a37fe027926299b2f6cdec14bd", "enabled": 1 } }, "openshift": { "100": { "checksum": "sha256:329e4b9d1df5012ace94cbe9cba7dfa7ee7d9f242090072c71aaacbeea78986a", "enabled": 1 } }, "openshift-origin": { "100": { "checksum": "sha256:31cbbb069354f984e4af75b387778fae1ff4dc6c3e60533357d005ffa960b51c", "enabled": 1 } }, "opensm": { "100": { "checksum": "sha256:c0e1bf0a8eb50e0b41fa69bf5b65e2a7c324e4bc7255933a5d2bac3b9ae6f4de", "enabled": 1 } }, "openvpn": { "100": { "checksum": "sha256:a4d12ae8ad77d65d0fcabb20aa4a83886e782d732123f686f88a7d7472384104", "enabled": 1 } }, "openvswitch": { "100": { "checksum": "sha256:a54f8a8ea5abb8a33734ecef9d9ad1c0dd090a6e0c5187e80de52f522d2d5e39", "enabled": 1 } }, "openwsman": { "100": { "checksum": "sha256:d6b7bb8f7749265bdaf938abecb2f8f78c6e9e8dc06c1c26b48da227af5a8654", "enabled": 1 } }, "oracleasm": { "100": { "checksum": "sha256:67e31eec391bac337ebacb78c096589af4b7e8be6aa05c34cf187ba922a2abde", "enabled": 1 } }, "osad": { "100": { "checksum": "sha256:6635ff0231bfc3d88c771553d495941ee0f98871edfe6c86205b087186b3a72f", "enabled": 1 } }, "pads": { "100": { "checksum": "sha256:5b4531e9231d399ebec8e6b6870a812c6a64b2daffde35fa57a009b24a01809f", "enabled": 1 } }, "passenger": { "100": { "checksum": "sha256:912a1c442559d6ab48453d87e2b997bdee3017a54a0b60aeaf7d4603fde0f34b", "enabled": 1 } }, "pcmcia": { "100": { "checksum": "sha256:456b3520c26e5f2a913437318715712ae00f64932a27ab1bb8b8b42e0524fa05", "enabled": 1 } }, "pcp": { "100": { "checksum": "sha256:5302332fba7e6724ab7a3c32bd523b10322c20011c6e42ae4e769a49f3efabdd", "enabled": 1 } }, "pcscd": { "100": { "checksum": "sha256:2ee37df066a9ff80439b08c092809f3661e2f9a8ad02134e839627fd23a20c1f", "enabled": 1 } }, "pdns": { "100": { "checksum": "sha256:a1a10cd52eb9dd15bc1ccfed440f6b3d235edc7405a3932f81805d8d94000245", "enabled": 1 } }, "pegasus": { "100": { "checksum": "sha256:4280c40629dd111fd1c89ff867ac72d1e7ddde49dc3d286637e6a86b868e2303", "enabled": 1 } }, "permissivedomains": { "100": { "checksum": "sha256:2453bad4ace526f3cf2c60b358e95a5476692ef25da107b10f52f3af27c056d2", "enabled": 1 } }, "pesign": { "100": { "checksum": "sha256:6461acd0385c0b1a32bf646fc9e09da0c7ca513954ed8fe2a03f4ee7f6a64fcf", "enabled": 1 } }, "pingd": { "100": { "checksum": "sha256:f7536a518a046b793ea3f74a67d677b878baac44b28268c5ccecbf10715d89ab", "enabled": 1 } }, "piranha": { "100": { "checksum": "sha256:11436fb7942d28e3eca22bc078ee5475f632d8447008a6414f337d4bbc3515dc", "enabled": 1 } }, "pkcs": { "100": { "checksum": "sha256:c70e17d1a4d347b38fdfbb2a5dab292e3e0c538ea52fb6cfdef2714e130da0b1", "enabled": 1 } }, "pkcs11proxyd": { "100": { "checksum": "sha256:c9582c89cac1546fa1e5bf9802c5a322e52e2529256f9e5922d5813e40be3646", "enabled": 1 } }, "pki": { "100": { "checksum": "sha256:ec40fbe6355370fe69a8ff343744654b06d4134c1518c64269be1f3a49083968", "enabled": 1 } }, "plymouthd": { "100": { "checksum": "sha256:7aa52d533e28a3ebf76d879c24bb4e0a58574033d5af6d4d22b716d1156c3f90", "enabled": 1 } }, "podsleuth": { "100": { "checksum": "sha256:b32a5cc38b8edcc76b94862cee0c822a5b4d095329f53ab6f7cb014c76346e8c", "enabled": 1 } }, "policykit": { "100": { "checksum": "sha256:686d9f7652cb2b3d7ce6af2aa620c14a6cbbbdb8d26b3630cfbf6bc34d9e3e6c", "enabled": 1 } }, "polipo": { "100": { "checksum": "sha256:6098bd8a4f449c01dc7e0f4509663994259fe8848f2f21d1319bf7105bbacc4e", "enabled": 1 } }, "portmap": { "100": { "checksum": "sha256:f561aef22cda98a94a74bedda09645e50066a77a23d3bdcbb1143b0c62ffe7b2", "enabled": 1 } }, "portreserve": { "100": { "checksum": "sha256:9de99e881e9e2e7e0b78629eec721840da4aa18f78ff5a06e46b7a596c28a09a", "enabled": 1 } }, "postfix": { "100": { "checksum": "sha256:3101c4c1d54f3e175dc3fcff001c6937a9ffec7781f4095ea38fea88df7e8067", "enabled": 1 } }, "postgresql": { "100": { "checksum": "sha256:a734cc086d7d73ef2ffe7543f82dc50b57619e78e60664cb67a9513790f3335a", "enabled": 1 } }, "postgrey": { "100": { "checksum": "sha256:ef4d03336b66c1184f352f9b3fe8004d870bbf003673d4393bde24ea14b056b8", "enabled": 1 } }, "ppp": { "100": { "checksum": "sha256:83e6712ba7343dc1346e94c51b75b05839f78bd24f9324d984b7aa9631bd0377", "enabled": 1 } }, "prelink": { "100": { "checksum": "sha256:df050b0d180947788ab45862c4627ae640c92cf0f6a994a685e4cb5fe46bef76", "enabled": 1 } }, "prelude": { "100": { "checksum": "sha256:88c5fa3da64c127ed6e688f9eba5e50a8f6f98ea3243d29b8b0bc0375ef95420", "enabled": 1 } }, "privoxy": { "100": { "checksum": "sha256:e4a84567c63c892d4cdda3a9a4b15ad5188c093da679a354f00c43b6376a844d", "enabled": 1 } }, "procmail": { "100": { "checksum": "sha256:98170eed35b67b9097514bcb044a18cc3f757af5f91b5d870ea707d6048cde75", "enabled": 1 } }, "prosody": { "100": { "checksum": "sha256:07e999e033252b28ae41697ddc23b42dbcf4bdc143c9eb1c55475aabc9fc9caf", "enabled": 1 } }, "psad": { "100": { "checksum": "sha256:7fc3410de486bf89c4d35989937f424b435c9c4f5398f47f9c840b146197c6ac", "enabled": 1 } }, "ptchown": { "100": { "checksum": "sha256:129978bcb62fdcaed728fb288b321c204575246eb535354e02bfd83089cb0ded", "enabled": 1 } }, "publicfile": { "100": { "checksum": "sha256:9cc75080e25fb5602ab266f1c0d0f16843bdfc561e7af6dec32d669e31bebe98", "enabled": 1 } }, "pulseaudio": { "100": { "checksum": "sha256:a41fc5d1275d548510a2be0180741f952f0f696f443eaabf03c1abf3f80f499e", "enabled": 1 } }, "puppet": { "100": { "checksum": "sha256:81559a7d5e16e228382840986ae0e414d4a78163a9b51b5d9c05a58e07574e8d", "enabled": 1 } }, "pwauth": { "100": { "checksum": "sha256:8590f80ce91ddd4862ce2beab9ec64deb66d99c5583ff5ee3cbff2e503caaa37", "enabled": 1 } }, "qmail": { "100": { "checksum": "sha256:917a35c0ec48acfb5166c937e97269acac39541acebad9c1c410bfdbcb483da1", "enabled": 1 } }, "qpid": { "100": { "checksum": "sha256:cfdb156d23ae6c99b3dbac171ab1626202bf1ae7671fae9f6d6f7241116638dd", "enabled": 1 } }, "quantum": { "100": { "checksum": "sha256:eb4881c554de7882b4e5590a8efb35a758fc1b3d61bc1502632d6f4e571cb331", "enabled": 1 } }, "quota": { "100": { "checksum": "sha256:27d1fb8e99c6d1c75fc8efa8aeaf4303d0dcd8d03cb2992d968a3186d648f4b9", "enabled": 1 } }, "rabbitmq": { "100": { "checksum": "sha256:f0b2b81a6670b7640d49d49c364635f39272330f08bcdaa23c681bf2ac64e10f", "enabled": 1 } }, "radius": { "100": { "checksum": "sha256:791a60cff31fca43e01aa4bfe3a57c5938015db44fd1f64064778dbbcdb6e2e2", "enabled": 1 } }, "radvd": { "100": { "checksum": "sha256:1cea7f5b37f7a0e722ecbccaa09d95db2b175ec125d62e3898a99081c51c6f96", "enabled": 1 } }, "raid": { "100": { "checksum": "sha256:a94b0b917312a73eda50ea641dee49eb00f49df286133fcdb13267fd49ce5d1f", "enabled": 1 } }, "rasdaemon": { "100": { "checksum": "sha256:159d40315f3f5086a31e6f0a6a90d342783d6f0c97c5feeb9c92808c7345adcf", "enabled": 1 } }, "rdisc": { "100": { "checksum": "sha256:a61f7efd50387ebfd35b675b22a8cba86c6216c0bbd901aab5e8674b5c442777", "enabled": 1 } }, "readahead": { "100": { "checksum": "sha256:276a24e14ef12f5fadaeab2883d501cb096e01a9ce1be2178a5c50ebfa6b3fcb", "enabled": 1 } }, "realmd": { "100": { "checksum": "sha256:61561d5f14d9a6597d6e312f5429947baab045d01a729f7cc34406e859fa0015", "enabled": 1 } }, "redis": { "100": { "checksum": "sha256:f40066828d25674c525148f890d9cc84ddbb203f5a4aaad616ef2cd3a497fdc3", "enabled": 1 } }, "remotelogin": { "100": { "checksum": "sha256:742f881c1a4838ecfc1a55a7f3b78a72267644e3a64e3ec45a191599b5bd8532", "enabled": 1 } }, "restraint": { "400": { "checksum": "sha256:5dd2b902123ef00065db6ec8d173f37baa26dbe43566bd5f06594ef1243fd5fd", "enabled": 1 } }, "rhcs": { "100": { "checksum": "sha256:67f232676ac23535867e2494f04989dbd6b9b6d4bbc67df67dc2edb4d31a8be8", "enabled": 1 } }, "rhev": { "100": { "checksum": "sha256:ee2f26beaa5c6a5d25e03ef9ab30302d6b29b283283683421fab52e29e47fe3d", "enabled": 1 } }, "rhgb": { "100": { "checksum": "sha256:39c550e1c8b149dc6f308b0f9ef238315208453ee064bb1558eff9137531840f", "enabled": 1 } }, "rhnsd": { "100": { "checksum": "sha256:16bff56244925c7696fa2da5a4c986132488c352149cc88181bf6b4143fc80ba", "enabled": 1 } }, "rhsmcertd": { "100": { "checksum": "sha256:e999510837aabb3ce118ad61225a846f687588e9a321ffe675b56511191bc323", "enabled": 1 } }, "rhts": { "400": { "checksum": "sha256:9000bd99784bc22ffda4493b4985e8c5a2e65e87aeaa1cb96ba82d367a27a8be", "enabled": 1 } }, "ricci": { "100": { "checksum": "sha256:c72c61297cf864a1abda8226de08039c8ae0212808d3f7fd8725b53b955d59f6", "enabled": 1 } }, "rkhunter": { "100": { "checksum": "sha256:d48bd9c5789f4adc396773664402ddeab432caa99597267ccdf24220948e5b3c", "enabled": 1 } }, "rkt": { "100": { "checksum": "sha256:a9414e82cadd2876471465737bd8322eb833e296869ebcefcd9e722ff717d350", "enabled": 1 } }, "rlogin": { "100": { "checksum": "sha256:a4b2e25abc4099a0a54821518b7c824a2ddb7544fb0b5ddde9a0a9be159ac1b2", "enabled": 1 } }, "rngd": { "100": { "checksum": "sha256:5c867af2674586cc1c41aa3203e3704a0d1400d344a8e257bc61e9eebb86ad03", "enabled": 1 } }, "rolekit": { "100": { "checksum": "sha256:73382d4b8a12fa161dbb5ba36c94e7f0b1f82b1abdf0a4f07ca6c981e08f271b", "enabled": 1 } }, "roundup": { "100": { "checksum": "sha256:1a2503ebaa997c6b6efd5d2343ea731f73b2f0312f2e8d5578dad2c8a84a94fa", "enabled": 1 } }, "rpc": { "100": { "checksum": "sha256:e423284f5ed36e7b6c52f581b444a981d5d1c8af6c8dabe8c6cb6c71d3f49fb2", "enabled": 1 } }, "rpcbind": { "100": { "checksum": "sha256:53831134210db04fe6e6b0f05e20b8b7307ae8c11e774faec9e1b3aa2b02b5dc", "enabled": 1 } }, "rpm": { "100": { "checksum": "sha256:acbd671bd661f9f2f25d4798f1646a51075f297c8b086ea9bd3133a00e356432", "enabled": 1 } }, "rrdcached": { "100": { "checksum": "sha256:c6110313310591ee2a08b504b04ebd1b98f370b6633172f06ee7c0c7db0a963d", "enabled": 1 } }, "rshd": { "100": { "checksum": "sha256:1340ab5daac926cc1354452869ab5aa78d27ceb110543624d2ffaf93773c394b", "enabled": 1 } }, "rssh": { "100": { "checksum": "sha256:9dabc52612d567e728786c007f5017c7032c02be3a9201521a530fc91ca789f8", "enabled": 1 } }, "rsync": { "100": { "checksum": "sha256:33dffe2764dc45bbc59b406a67187c39864412bac07ee089bda30ef09cb70faa", "enabled": 1 } }, "rtas": { "100": { "checksum": "sha256:9d55dfe843e44e8a93c02ea28b14856edfdb1f820bb647992daa6af11e2dbd37", "enabled": 1 } }, "rtkit": { "100": { "checksum": "sha256:ea77b9f26c8fc61b7fc281099b2f16e75c5b196660fff55a95f96e97935a7a1b", "enabled": 1 } }, "rwho": { "100": { "checksum": "sha256:4468bfdd23924a96b4cf8c6fa1a3fa606fdd8ac69b7cb17c16a6e39a95908921", "enabled": 1 } }, "samba": { "100": { "checksum": "sha256:c97b92abaf053976c89a670d82bf06bc5c7d561ccf03e3ff1ac84be6e01cfc5c", "enabled": 1 } }, "sambagui": { "100": { "checksum": "sha256:18d1a69de368fa621e8ef3234b8ddb40261ced880bb732328a310db5a62a7a0a", "enabled": 1 } }, "sandboxX": { "100": { "checksum": "sha256:711df017c1f168e33245144d67289225439bbed701fb1146cb83e9cd63ce1f7a", "enabled": 1 } }, "sanlock": { "100": { "checksum": "sha256:093d9d9793142bb9a8c4375f5f368ca1a4d9beb0cd05329518f91bb9ea51bd06", "enabled": 1 } }, "sasl": { "100": { "checksum": "sha256:536ce94509d38b40200debf17fbddc16ec9004463fdb3fc42890dde9b3eb56f1", "enabled": 1 } }, "sbd": { "100": { "checksum": "sha256:57ecac942ea46af55728362527d70a3e135c3b4711688ddf62596b9a768d9fb0", "enabled": 1 } }, "sblim": { "100": { "checksum": "sha256:2ab2f52e6bac063f176e007b39cd8a4e43012ea075d82af20fbb3403891b6493", "enabled": 1 } }, "screen": { "100": { "checksum": "sha256:7df09c8fa09e105ecf51fee797975603a2df8d15c3a0bf00fdb1d565fe4a6b91", "enabled": 1 } }, "secadm": { "100": { "checksum": "sha256:9cf04d33aa9dec0b559c892fb20df89fbe1883544d4ac2d6bf6fc319f0a16663", "enabled": 1 } }, "sectoolm": { "100": { "checksum": "sha256:e7f9a696e0958d6bdbd6696e67a9b4af62430456d0f278e290db0ea1ee9750b7", "enabled": 1 } }, "selinuxutil": { "100": { "checksum": "sha256:c72355dc70789deb94777acd0b47c2c3ae628e8d90bffb0e0e320941e5ddf3b7", "enabled": 1 } }, "sendmail": { "100": { "checksum": "sha256:98f68238d6ca96277390c160adeed4e3e382d5ded5a88a3909cfebe986b849be", "enabled": 1 } }, "sensord": { "100": { "checksum": "sha256:10ca96a581ef4b0fa1789160fd71fb340d8b1d13906b42fab6e9119033d4f942", "enabled": 1 } }, "setrans": { "100": { "checksum": "sha256:3a172b4972f9271250b4d228541c78b0243fd0544ac983db0f590e09674f700d", "enabled": 1 } }, "setroubleshoot": { "100": { "checksum": "sha256:f78edfcb470cd9929f45b6db29ae4924a286ab30a03f80b7bdf3699bccb98314", "enabled": 1 } }, "seunshare": { "100": { "checksum": "sha256:ba2043d9665e2fd3a9e2d103671bfe647060b93d9c02eed2dca3066a0ecfb81d", "enabled": 1 } }, "sge": { "100": { "checksum": "sha256:cf843c98ff4113ded675f79df694549b4f848aecb1295f0a510101e301fbd348", "enabled": 1 } }, "shorewall": { "100": { "checksum": "sha256:c7c49d28e52aba4d168e684b9160a225fbecab373bfbb6963bbe89c93ecb867b", "enabled": 1 } }, "slocate": { "100": { "checksum": "sha256:be1825562f583305597e5ceb1298ebb60e42c4f270b4a7e3751cf9d9be1b1fac", "enabled": 1 } }, "slpd": { "100": { "checksum": "sha256:14748519962688e62b7bc7e7c03ad91c1f815c5d33c63f2d60e03340f55609a8", "enabled": 1 } }, "smartmon": { "100": { "checksum": "sha256:9f26cf1e9fa128e98c758a6325525f8547950a2440b6582202228c3c5c2c80d9", "enabled": 1 } }, "smokeping": { "100": { "checksum": "sha256:ae8cbd09d519a42bc01063c4c16f58e96cb3673acb557dcd2d09af444d742db1", "enabled": 1 } }, "smoltclient": { "100": { "checksum": "sha256:8aa5f2749eeaef5ae871dc903dad87611e369c92e9b3fc28b4944f75db785a18", "enabled": 1 } }, "smsd": { "100": { "checksum": "sha256:d36a762c836a0e4305773e352fe0f46657784b5d9bf749f02df9c6d15f68d101", "enabled": 1 } }, "snapper": { "100": { "checksum": "sha256:62bba8f6a236bae902815188cedbb5f3090acf0829247e6808787f8c913d9981", "enabled": 1 } }, "snmp": { "100": { "checksum": "sha256:68b5e9d408704e44ebf29ba76ae18afdcf6d8aef12794e8e9026997376ce12f8", "enabled": 1 } }, "snort": { "100": { "checksum": "sha256:eef39dec8d416650af3f9eeeb518b06dd9a9e09144aa579b6bd6422ba0037d70", "enabled": 1 } }, "sosreport": { "100": { "checksum": "sha256:c19dc2ed34c3d274f8e01647dc2d869ca06d4a9a3009f57c1845fac4d33ed358", "enabled": 1 } }, "soundserver": { "100": { "checksum": "sha256:a46a9508591afb1407fd14441c9c26cd495a3789e3c6792a2eba38a6642e4b97", "enabled": 1 } }, "spamassassin": { "100": { "checksum": "sha256:8255ad891466762e31763d6f4791a32aa1eea1147a812020724eab8eb07c1916", "enabled": 1 } }, "speech-dispatcher": { "100": { "checksum": "sha256:ce5ba130d5d0ae5fafe8f823b824856590f990ad7c08aa0a5930f5060c252021", "enabled": 1 } }, "squid": { "100": { "checksum": "sha256:4170a7354e69ed60e0268389f74042e02a2511a4451ca20b97a63213b8881e1e", "enabled": 1 } }, "ssh": { "100": { "checksum": "sha256:a4b4b395d2185abfd68edce0f813103ccbedd5d9748f9a41d83cc63dd1465109", "enabled": 1 } }, "sslh": { "100": { "checksum": "sha256:5b0cc219f31e88f2fa78bc31d9c6fe6c7af29b4832509635672ca9edc79409c6", "enabled": 1 } }, "sssd": { "100": { "checksum": "sha256:29cd0921e9effe356c856c3319488adf66c794cbb7d1610e5fca2b730b852939", "enabled": 1 } }, "staff": { "100": { "checksum": "sha256:943b25df416f2181aab46b3492aad9336f60a1b5b46187494f43ab516aae9c6a", "enabled": 1 } }, "stapserver": { "100": { "checksum": "sha256:788f2eb60a3d902060a6c5a08b086e2a1e96d213f86b206736da7e37eb21e51d", "enabled": 1 } }, "stratisd": { "100": { "checksum": "sha256:72c10f773d67b4209c39b4bea22e95c66d105f6f13e30f89bcd568eab6c889e3", "enabled": 1 } }, "stunnel": { "100": { "checksum": "sha256:736a46f682ff77d7c2cf54d5c264eb7b149793c12701b96e9be12bb3e6722796", "enabled": 1 } }, "su": { "100": { "checksum": "sha256:0cc5796bfe362c3b28c73f62377c029a5f2321078b6d5f90bad42764415cd038", "enabled": 1 } }, "sudo": { "100": { "checksum": "sha256:d96538a9cbb09fc38ba701cda88b2a0d199ab7826826d0043e4f07b05418bf84", "enabled": 1 } }, "svnserve": { "100": { "checksum": "sha256:a80606afbcc994e6fdc418cd83182f901d3e5b4b7b36fe262c71a25f43f10af1", "enabled": 1 } }, "swift": { "100": { "checksum": "sha256:19dfb362a8f445099eac9281522f0b13794cb9a0893a7acf0b54c15d193ef70e", "enabled": 1 } }, "sysadm": { "100": { "checksum": "sha256:f0e7b74086d47000f8335de5bade5a5a19a5e83bf581f885db92548546b7ea94", "enabled": 1 } }, "sysadm_secadm": { "100": { "checksum": "sha256:4614737ea0603530691e6158eb1bd07efa1992cb7ef52c201df3a637d3184cdf", "enabled": 1 } }, "sysnetwork": { "100": { "checksum": "sha256:f6a5a3b49885a9f780c5a9078cc968673809eaf89ecbe170fbb8a1ed4f521ea2", "enabled": 1 } }, "sysstat": { "100": { "checksum": "sha256:1fadc57b1e46515cbc038e96ae47ab74dd365a910f4d81ec9fb3044c4691260b", "enabled": 1 } }, "systemd": { "100": { "checksum": "sha256:a5f0e5c340eaf127a166cc50be8170bfce80ccee0c14f32e4cc264089350da1a", "enabled": 1 } }, "tangd": { "100": { "checksum": "sha256:fd538dbdeba0b4a1c244ba76b8dfef47f61da5a56f24f39fc24c137a9b3b303a", "enabled": 1 } }, "targetd": { "100": { "checksum": "sha256:bc0f37cdcdd0c9014e89e8be6758f7d9c97c67a4e42652459d6107314f059632", "enabled": 1 } }, "tcpd": { "100": { "checksum": "sha256:c78dcf2b9abf8d5ccf9f32b2debf6181a935a7078fe4a527991ab11d2999c4a9", "enabled": 1 } }, "tcsd": { "100": { "checksum": "sha256:e92fb82a2e509e3595d46dd464dac1029ce3a731f117fa67712d119d2878f195", "enabled": 1 } }, "telepathy": { "100": { "checksum": "sha256:fea41add022251126312da78373cb7fd05df1e9fd27547f1b4fc604a774827a1", "enabled": 1 } }, "telnet": { "100": { "checksum": "sha256:06d4733c0fc7358d738d4dbf53968c9d9017a72b01456be46633364f00a4207d", "enabled": 1 } }, "tftp": { "100": { "checksum": "sha256:8ba2497a28f4c2a31177811fc0a091a3bb9814f9e02cfc8d84c004718f661e5f", "enabled": 1 } }, "tgtd": { "100": { "checksum": "sha256:6ec8d4d38e58efa04572ac713c9148e7182e7d49713ed89955fabdd512b8eea4", "enabled": 1 } }, "thin": { "100": { "checksum": "sha256:c464da2b8e789d74ea2b2914217a194a3c07081b9f383acd2fee9ab77bc525b5", "enabled": 1 } }, "thumb": { "100": { "checksum": "sha256:2ce98252c7ff59539bb38204ee65898ba6cc701c3dc87417c11e2e7124f448a3", "enabled": 1 } }, "timedatex": { "100": { "checksum": "sha256:df36b9f44f28df1b14b4d6bff01de42c414b947a8e6f1e6efdaa7023250709aa", "enabled": 1 } }, "tlp": { "100": { "checksum": "sha256:7b1d2643c7470dc5b80dee41d18482bb6fd6de55371aba888708a28fe0bb0172", "enabled": 1 } }, "tmpreaper": { "100": { "checksum": "sha256:2a54cea48dfbeb1c9dad0e167f70aa17970c4f2c76c560330c467051fe3b574b", "enabled": 1 } }, "tomcat": { "100": { "checksum": "sha256:de3ed9b8d62d29e80e29a051419a648c154c12f6bb188814ca79120ff1dc263b", "enabled": 1 } }, "tor": { "100": { "checksum": "sha256:16c95ae098af2b964a7a94b5bb6cd1c84d5c7f1254d6411209e4d5cfe87677bc", "enabled": 1 } }, "tuned": { "100": { "checksum": "sha256:b90ac3a04d3f04c7284f75802ffd69d6c1c3d5c0e6d08c3d0f2d9270b99dd487", "enabled": 1 } }, "tvtime": { "100": { "checksum": "sha256:8f8a1f1b2fea7a9fb8c3853e02c830f5204f691e9223cbdfbc320ec6914725dc", "enabled": 1 } }, "udev": { "100": { "checksum": "sha256:24410f1221660b8443af29cb55e42180e268fce722ceed2c99aa202e7dd3cc21", "enabled": 1 } }, "ulogd": { "100": { "checksum": "sha256:dba41aee81015b99378cff2273a56effd1202c0c937c05c63a913243b0641cdc", "enabled": 1 } }, "uml": { "100": { "checksum": "sha256:29e7469ef2704943f23c5040531fee8657cfed8440ef44b6268d21e6a9afe309", "enabled": 1 } }, "unconfined": { "100": { "checksum": "sha256:54482715f4fb5bca5c68ff67b9d145d12ad3df1438db97bcadcc32a2fb0f6191", "enabled": 1 } }, "unconfineduser": { "100": { "checksum": "sha256:13e69d4cbec7926c0ac6fb796749b4286462add3051f1e94554f23e637b81277", "enabled": 1 } }, "unlabelednet": { "100": { "checksum": "sha256:cb370bbe8bc0d7bca49a4fd1fad652017f4f8587c7c9d3277155fba32987550e", "enabled": 1 } }, "unprivuser": { "100": { "checksum": "sha256:bbb2700ca73d867432851e12276a932b1553b034b1cc635f5c6681d6b62dcd3a", "enabled": 1 } }, "updfstab": { "100": { "checksum": "sha256:57a37a5c07af0f7ad80f4f01173e6cd6b604659e2d1b5605c2719dff8bbaf2fb", "enabled": 1 } }, "usbmodules": { "100": { "checksum": "sha256:683c0598bdd00543cb696f7ed8cce6b55c658e566141538fc01b3f852af5f697", "enabled": 1 } }, "usbmuxd": { "100": { "checksum": "sha256:852eb8259277c64b80c91bd1dcbbe85f629e7218ab2f51d39324dcd78a4a278e", "enabled": 1 } }, "userdomain": { "100": { "checksum": "sha256:066e429e71ebcf11014f4ff6d7647c9d6d88ff191c64eeb9793021d16f4cde97", "enabled": 1 } }, "userhelper": { "100": { "checksum": "sha256:74b817fb60fd3ed5f074ef8ff399342ddc49fb2c250b08015dc975edd48f4dfd", "enabled": 1 } }, "usermanage": { "100": { "checksum": "sha256:fa589ab303d10fadd28a3e8d27cc9bc2e55a9b28f28c3f4c7e05968cb00a7cdd", "enabled": 1 } }, "usernetctl": { "100": { "checksum": "sha256:c5e4e24e89775d797a8988e2d5f72ec7a7dd8387289ede61af7a3ce2173cf167", "enabled": 1 } }, "uucp": { "100": { "checksum": "sha256:6a3659d3706bc3af4b60e5de7efa9532dcc0c0a6f0c7735ed1300ec2120f9d01", "enabled": 1 } }, "uuidd": { "100": { "checksum": "sha256:f85ad7d20dd77416ab246ee0837b016a648176ec9956f40ff2ac6b3c2924edc5", "enabled": 1 } }, "varnishd": { "100": { "checksum": "sha256:18dab548c81b02f1b0f3efd6e25dd529bb0565e974156d55e42e274d3ccdf704", "enabled": 1 } }, "vdagent": { "100": { "checksum": "sha256:ee8af0b085b727e060ac3c82f1e38c89545505c9b26e849eda22e571064c46e7", "enabled": 1 } }, "vhostmd": { "100": { "checksum": "sha256:0f7c8c575b060e863fe17e7ee8c67cc5cc3ea31da734a5428dc62c15f3b15bf4", "enabled": 1 } }, "virt": { "100": { "checksum": "sha256:df433826471b1c65a3686b57b4b07872a695d900731feb88cd6dfb76ddcbc5d9", "enabled": 1 } }, "vlock": { "100": { "checksum": "sha256:4a9362fc5876897cae7062564d54d7f8ae12413c65c4c7fc6709f6407cc27160", "enabled": 1 } }, "vmtools": { "100": { "checksum": "sha256:fb9dda20b16232ac253b148063c9b267356b6f2831650f4c00fa01a6d0a8024a", "enabled": 1 } }, "vmware": { "100": { "checksum": "sha256:d0ce73ebc7d2f494b669257a9a68106245371b455566654c7062694bcbad35df", "enabled": 1 } }, "vnstatd": { "100": { "checksum": "sha256:1df1aaf42d9c96922226b4828c38b6d315f7a9d3cda60fe54d99be5d618e140d", "enabled": 1 } }, "vpn": { "100": { "checksum": "sha256:9ea8931bf1c97618b2e99afb8c60a13d51a84db878bffa4082f6973e23b13eb1", "enabled": 1 } }, "w3c": { "100": { "checksum": "sha256:43663b66ef8275c639a8076d92fc7da6821e0523c120e2c854839f9dc9d1db66", "enabled": 1 } }, "watchdog": { "100": { "checksum": "sha256:65b78e9b48a6cfe62f6c67c443d3bc667a58d206c09df00870949b6ae7ff8c30", "enabled": 1 } }, "wdmd": { "100": { "checksum": "sha256:65560477bd0ae271799a76f75c5a3d46ef0c29f6922aa38e727c95b7e1095a99", "enabled": 1 } }, "webadm": { "100": { "checksum": "sha256:4d4d609b3be3c2dc659694cfd2076e0c0c0d6446d16a3fb054a9e5f951b29410", "enabled": 1 } }, "webalizer": { "100": { "checksum": "sha256:867139a0cc2cb236ee54575ce6a8568cdbefd6785e8b7f64e09a3041da46b095", "enabled": 1 } }, "wine": { "100": { "checksum": "sha256:419d697ac987518dee6095070e2894c4112b50256e59d2b4f6acac585fb087f8", "enabled": 1 } }, "wireshark": { "100": { "checksum": "sha256:ce85b40df4d548aa55eb54bc546943366b654a3af7f602817f1fc499c0c8039e", "enabled": 1 } }, "xen": { "100": { "checksum": "sha256:f5d46e297e4e8e0a3f76c1fc8ae96db3ebf5b99ab538a54c171e489ac94ae1f0", "enabled": 1 } }, "xguest": { "100": { "checksum": "sha256:aeb8895098531d1607e389703c783a3c1e8a8c1ad962397debe65214ff86e29e", "enabled": 1 } }, "xserver": { "100": { "checksum": "sha256:85f1f1ed778597ec568ab7b9069779c088219d1da283a09382439c6803e7863e", "enabled": 1 } }, "zabbix": { "100": { "checksum": "sha256:476521323be1b84d7ba2539aa208d857678746a76e7e079577d3f46d251637ac", "enabled": 1 } }, "zarafa": { "100": { "checksum": "sha256:7536116b2852a578cbc5d32f7752b6dd3bb1202817db05306e1a16553c1d43b6", "enabled": 1 } }, "zebra": { "100": { "checksum": "sha256:3d18bbdc44c396c7715cce348f9248712132a1c53341d3b5760016d245f86e75", "enabled": 1 } }, "zoneminder": { "100": { "checksum": "sha256:44cf07d7e6b15709d131b8b406032d0e6395a84e1e20bc67f9320a1e97c4dfcc", "enabled": 1 } }, "zosremote": { "100": { "checksum": "sha256:1177170edbd47b6fe17fa022a247d9b75b1fb3a5a49721bcff3c7da4f480c702", "enabled": 1 } } }, "selinux_priorities": true }, "changed": false } TASK [fedora.linux_system_roles.selinux : Load SELinux modules] **************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:115 Wednesday 02 April 2025 12:15:49 -0400 (0:00:03.261) 0:03:12.392 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_modules is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:128 Wednesday 02 April 2025 12:15:49 -0400 (0:00:00.071) 0:03:12.464 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree in check mode] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:136 Wednesday 02 April 2025 12:15:49 -0400 (0:00:00.048) 0:03:12.512 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Wednesday 02 April 2025 12:15:49 -0400 (0:00:00.079) 0:03:12.592 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Wednesday 02 April 2025 12:15:49 -0400 (0:00:00.120) 0:03:12.712 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Wednesday 02 April 2025 12:15:49 -0400 (0:00:00.038) 0:03:12.751 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Wednesday 02 April 2025 12:15:49 -0400 (0:00:00.038) 0:03:12.790 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Wednesday 02 April 2025 12:15:49 -0400 (0:00:00.038) 0:03:12.828 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:14 Wednesday 02 April 2025 12:15:49 -0400 (0:00:00.146) 0:03:12.975 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_spec": { "state": "absent" }, "__podman_kube_str": "apiVersion: v1\nkind: Pod\nmetadata:\n labels:\n app: test\n io.containers.autoupdate: registry\n name: bogus\nspec:\n containers:\n - name: bogus\n image: quay.io/libpod/testimage:20210610\n" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:21 Wednesday 02 April 2025 12:15:49 -0400 (0:00:00.066) 0:03:13.041 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_kube": { "apiVersion": "v1", "kind": "Pod", "metadata": { "labels": { "app": "test", "io.containers.autoupdate": "registry" }, "name": "bogus" }, "spec": { "containers": [ { "image": "quay.io/libpod/testimage:20210610", "name": "bogus" } ] } }, "__podman_kube_file": "", "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:33 Wednesday 02 April 2025 12:15:49 -0400 (0:00:00.070) 0:03:13.112 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_name": "bogus", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:38 Wednesday 02 April 2025 12:15:50 -0400 (0:00:00.044) 0:03:13.156 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:15:50 -0400 (0:00:00.079) 0:03:13.236 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:15:50 -0400 (0:00:00.257) 0:03:13.493 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:15:50 -0400 (0:00:00.079) 0:03:13.573 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:15:50 -0400 (0:00:00.097) 0:03:13.670 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1743610430.4085276, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610401.5914862, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986657, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "2059311478", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:15:50 -0400 (0:00:00.422) 0:03:14.092 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:15:51 -0400 (0:00:00.073) 0:03:14.165 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:15:51 -0400 (0:00:00.071) 0:03:14.237 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:15:51 -0400 (0:00:00.072) 0:03:14.309 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:15:51 -0400 (0:00:00.075) 0:03:14.385 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:15:51 -0400 (0:00:00.084) 0:03:14.469 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:15:51 -0400 (0:00:00.072) 0:03:14.542 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:15:51 -0400 (0:00:00.046) 0:03:14.589 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if no kube spec is given] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:43 Wednesday 02 April 2025 12:15:51 -0400 (0:00:00.133) 0:03:14.723 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_kube", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:52 Wednesday 02 April 2025 12:15:51 -0400 (0:00:00.041) 0:03:14.764 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:60 Wednesday 02 April 2025 12:15:51 -0400 (0:00:00.064) 0:03:14.829 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_path": "/etc/containers/ansible-kubernetes.d" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:64 Wednesday 02 April 2025 12:15:51 -0400 (0:00:00.052) 0:03:14.881 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_file": "/etc/containers/ansible-kubernetes.d/bogus.yml" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:68 Wednesday 02 April 2025 12:15:51 -0400 (0:00:00.079) 0:03:14.960 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Get service name using systemd-escape] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:75 Wednesday 02 April 2025 12:15:51 -0400 (0:00:00.081) 0:03:15.042 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "systemd-escape", "--template", "podman-kube@.service", "/etc/containers/ansible-kubernetes.d/bogus.yml" ], "delta": "0:00:00.005398", "end": "2025-04-02 12:15:52.229654", "rc": 0, "start": "2025-04-02 12:15:52.224256" } STDOUT: podman-kube@-etc-containers-ansible\x2dkubernetes.d-bogus.yml.service TASK [fedora.linux_system_roles.podman : Cleanup containers and services] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:83 Wednesday 02 April 2025 12:15:52 -0400 (0:00:00.436) 0:03:15.479 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:2 Wednesday 02 April 2025 12:15:52 -0400 (0:00:00.149) 0:03:15.628 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:10 Wednesday 02 April 2025 12:15:52 -0400 (0:00:00.063) 0:03:15.691 ******* ok: [managed-node2] => { "changed": false, "enabled": false, "failed_when_result": false, "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-bogus.yml.service", "state": "stopped", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "-.mount system-podman\\x2dkube.slice network-online.target sysinit.target systemd-journald.socket basic.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "A template for running K8s workloads via podman-kube-play", "DevicePolicy": "auto", "Documentation": "man:podman-kube-play(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "Environment": "PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-bogus.yml.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true /etc/containers/ansible-kubernetes.d/bogus.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/ansible-kubernetes.d/bogus.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/podman-kube@.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-bogus.yml.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-bogus.yml.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system-podman\\x2dkube.slice -.mount sysinit.target", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system-podman\\x2dkube.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 10s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : Check if kube file exists] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:28 Wednesday 02 April 2025 12:15:53 -0400 (0:00:00.537) 0:03:16.229 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1743610491.1326714, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "f8266a972ed3be7e204d2a67883fe3a22b8dbf18", "ctime": 1743610490.64767, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 46137547, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1743610490.380669, "nlink": 1, "path": "/etc/containers/ansible-kubernetes.d/bogus.yml", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 237, "uid": 0, "version": "2171296840", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Remove pod/containers] **************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:38 Wednesday 02 April 2025 12:15:53 -0400 (0:00:00.451) 0:03:16.680 ******* changed: [managed-node2] => { "actions": [ "/usr/bin/podman kube play --down /etc/containers/ansible-kubernetes.d/bogus.yml" ], "changed": true, "failed_when_result": false } STDOUT: Pods stopped: 29b9fe3cb454877533169680e06b10334d735efd7dd3a9e0c03db08bdbb51191 Pods removed: 29b9fe3cb454877533169680e06b10334d735efd7dd3a9e0c03db08bdbb51191 Secrets removed: Volumes removed: TASK [fedora.linux_system_roles.podman : Remove kubernetes yaml file] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:56 Wednesday 02 April 2025 12:15:54 -0400 (0:00:00.530) 0:03:17.210 ******* changed: [managed-node2] => { "changed": true, "path": "/etc/containers/ansible-kubernetes.d/bogus.yml", "state": "absent" } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:61 Wednesday 02 April 2025 12:15:54 -0400 (0:00:00.379) 0:03:17.590 ******* changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "-f" ], "delta": "0:00:00.033440", "end": "2025-04-02 12:15:54.783049", "rc": 0, "start": "2025-04-02 12:15:54.749609" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_kube_spec.yml:70 Wednesday 02 April 2025 12:15:54 -0400 (0:00:00.400) 0:03:17.991 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:15:54 -0400 (0:00:00.071) 0:03:18.062 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:15:54 -0400 (0:00:00.040) 0:03:18.103 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:15:55 -0400 (0:00:00.040) 0:03:18.143 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update containers and services] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:87 Wednesday 02 April 2025 12:15:55 -0400 (0:00:00.041) 0:03:18.185 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Wednesday 02 April 2025 12:15:55 -0400 (0:00:00.039) 0:03:18.224 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:198 Wednesday 02 April 2025 12:15:55 -0400 (0:00:00.037) 0:03:18.261 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:204 Wednesday 02 April 2025 12:15:55 -0400 (0:00:00.116) 0:03:18.377 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:213 Wednesday 02 April 2025 12:15:55 -0400 (0:00:00.064) 0:03:18.441 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Run role] **************************************************************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:291 Wednesday 02 April 2025 12:15:55 -0400 (0:00:00.077) 0:03:18.519 ******* TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Wednesday 02 April 2025 12:15:55 -0400 (0:00:00.120) 0:03:18.640 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Wednesday 02 April 2025 12:15:55 -0400 (0:00:00.072) 0:03:18.713 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Wednesday 02 April 2025 12:15:55 -0400 (0:00:00.050) 0:03:18.763 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Wednesday 02 April 2025 12:15:55 -0400 (0:00:00.051) 0:03:18.815 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Wednesday 02 April 2025 12:15:55 -0400 (0:00:00.166) 0:03:18.982 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Wednesday 02 April 2025 12:15:55 -0400 (0:00:00.073) 0:03:19.055 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Wednesday 02 April 2025 12:15:55 -0400 (0:00:00.075) 0:03:19.131 ******* ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node2] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Wednesday 02 April 2025 12:15:56 -0400 (0:00:00.150) 0:03:19.281 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Wednesday 02 April 2025 12:15:57 -0400 (0:00:01.540) 0:03:20.822 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Wednesday 02 April 2025 12:15:57 -0400 (0:00:00.071) 0:03:20.893 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Wednesday 02 April 2025 12:15:57 -0400 (0:00:00.082) 0:03:20.976 ******* skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Wednesday 02 April 2025 12:15:57 -0400 (0:00:00.066) 0:03:21.042 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Wednesday 02 April 2025 12:15:57 -0400 (0:00:00.069) 0:03:21.112 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Wednesday 02 April 2025 12:15:58 -0400 (0:00:00.055) 0:03:21.168 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.026552", "end": "2025-04-02 12:15:58.349043", "rc": 0, "start": "2025-04-02 12:15:58.322491" } STDOUT: podman version 4.9.4-dev TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Wednesday 02 April 2025 12:15:58 -0400 (0:00:00.496) 0:03:21.664 ******* ok: [managed-node2] => { "ansible_facts": { "podman_version": "4.9.4-dev" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Wednesday 02 April 2025 12:15:58 -0400 (0:00:00.073) 0:03:21.737 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Wednesday 02 April 2025 12:15:58 -0400 (0:00:00.061) 0:03:21.799 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "(podman_quadlet_specs | length > 0) or (podman_secrets | length > 0)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Wednesday 02 April 2025 12:15:58 -0400 (0:00:00.047) 0:03:21.846 ******* META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Wednesday 02 April 2025 12:15:58 -0400 (0:00:00.051) 0:03:21.897 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Wednesday 02 April 2025 12:15:58 -0400 (0:00:00.052) 0:03:21.950 ******* META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Wednesday 02 April 2025 12:15:58 -0400 (0:00:00.064) 0:03:22.014 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:15:58 -0400 (0:00:00.112) 0:03:22.127 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:15:59 -0400 (0:00:00.132) 0:03:22.259 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:15:59 -0400 (0:00:00.049) 0:03:22.308 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:15:59 -0400 (0:00:00.062) 0:03:22.371 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1743610430.4085276, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610401.5914862, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986657, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "2059311478", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:15:59 -0400 (0:00:00.410) 0:03:22.781 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:15:59 -0400 (0:00:00.085) 0:03:22.867 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:15:59 -0400 (0:00:00.071) 0:03:22.939 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:15:59 -0400 (0:00:00.074) 0:03:23.014 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:15:59 -0400 (0:00:00.069) 0:03:23.083 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:15:59 -0400 (0:00:00.053) 0:03:23.137 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:16:00 -0400 (0:00:00.049) 0:03:23.186 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:16:00 -0400 (0:00:00.048) 0:03:23.235 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Wednesday 02 April 2025 12:16:00 -0400 (0:00:00.120) 0:03:23.356 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Wednesday 02 April 2025 12:16:00 -0400 (0:00:00.057) 0:03:23.413 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Wednesday 02 April 2025 12:16:00 -0400 (0:00:00.122) 0:03:23.536 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Wednesday 02 April 2025 12:16:00 -0400 (0:00:00.072) 0:03:23.608 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Wednesday 02 April 2025 12:16:00 -0400 (0:00:00.067) 0:03:23.675 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Wednesday 02 April 2025 12:16:00 -0400 (0:00:00.127) 0:03:23.803 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Wednesday 02 April 2025 12:16:00 -0400 (0:00:00.089) 0:03:23.893 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Wednesday 02 April 2025 12:16:00 -0400 (0:00:00.069) 0:03:23.962 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Wednesday 02 April 2025 12:16:01 -0400 (0:00:00.227) 0:03:24.189 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Wednesday 02 April 2025 12:16:01 -0400 (0:00:00.073) 0:03:24.263 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Wednesday 02 April 2025 12:16:01 -0400 (0:00:00.069) 0:03:24.332 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Wednesday 02 April 2025 12:16:01 -0400 (0:00:00.120) 0:03:24.453 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Wednesday 02 April 2025 12:16:01 -0400 (0:00:00.049) 0:03:24.503 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Wednesday 02 April 2025 12:16:01 -0400 (0:00:00.050) 0:03:24.554 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Wednesday 02 April 2025 12:16:01 -0400 (0:00:00.043) 0:03:24.597 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Wednesday 02 April 2025 12:16:01 -0400 (0:00:00.042) 0:03:24.639 ******* TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Wednesday 02 April 2025 12:16:01 -0400 (0:00:00.223) 0:03:24.863 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Wednesday 02 April 2025 12:16:01 -0400 (0:00:00.088) 0:03:24.952 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Wednesday 02 April 2025 12:16:01 -0400 (0:00:00.051) 0:03:25.004 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Wednesday 02 April 2025 12:16:01 -0400 (0:00:00.046) 0:03:25.050 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Wednesday 02 April 2025 12:16:01 -0400 (0:00:00.047) 0:03:25.098 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Wednesday 02 April 2025 12:16:02 -0400 (0:00:00.059) 0:03:25.157 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 Wednesday 02 April 2025 12:16:02 -0400 (0:00:00.054) 0:03:25.211 ******* ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43 Wednesday 02 April 2025 12:16:04 -0400 (0:00:02.850) 0:03:28.062 ******* skipping: [managed-node2] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48 Wednesday 02 April 2025 12:16:04 -0400 (0:00:00.041) 0:03:28.104 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53 Wednesday 02 April 2025 12:16:05 -0400 (0:00:00.042) 0:03:28.146 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Collect service facts] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Wednesday 02 April 2025 12:16:05 -0400 (0:00:00.113) 0:03:28.260 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9 Wednesday 02 April 2025 12:16:05 -0400 (0:00:00.041) 0:03:28.301 ******* skipping: [managed-node2] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22 Wednesday 02 April 2025 12:16:05 -0400 (0:00:00.048) 0:03:28.350 ******* ok: [managed-node2] => { "changed": false, "name": "firewalld", "status": { "ActiveEnterTimestamp": "Wed 2025-04-02 12:13:57 EDT", "ActiveEnterTimestampMonotonic": "326139129", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "basic.target sysinit.target polkit.service dbus.socket dbus.service system.slice", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Wed 2025-04-02 12:13:56 EDT", "AssertTimestampMonotonic": "325830531", "Before": "shutdown.target network-pre.target multi-user.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Wed 2025-04-02 12:13:56 EDT", "ConditionTimestampMonotonic": "325830530", "ConfigurationDirectoryMode": "0755", "Conflicts": "ebtables.service iptables.service ip6tables.service shutdown.target ipset.service nftables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "man:firewalld(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "12711", "ExecMainStartTimestamp": "Wed 2025-04-02 12:13:56 EDT", "ExecMainStartTimestampMonotonic": "325832688", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[Wed 2025-04-02 12:13:56 EDT] ; stop_time=[n/a] ; pid=12711 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Wed 2025-04-02 12:13:56 EDT", "InactiveExitTimestampMonotonic": "325832722", "InvocationID": "41b8036e16214fc68c244a42727639e9", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "12711", "MemoryAccounting": "yes", "MemoryCurrent": "40091648", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Wed 2025-04-02 12:13:57 EDT", "StateChangeTimestampMonotonic": "326139129", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogTimestamp": "Wed 2025-04-02 12:13:57 EDT", "WatchdogTimestampMonotonic": "326139126", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 Wednesday 02 April 2025 12:16:05 -0400 (0:00:00.540) 0:03:28.890 ******* ok: [managed-node2] => { "changed": false, "enabled": true, "name": "firewalld", "state": "started", "status": { "ActiveEnterTimestamp": "Wed 2025-04-02 12:13:57 EDT", "ActiveEnterTimestampMonotonic": "326139129", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "basic.target sysinit.target polkit.service dbus.socket dbus.service system.slice", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Wed 2025-04-02 12:13:56 EDT", "AssertTimestampMonotonic": "325830531", "Before": "shutdown.target network-pre.target multi-user.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Wed 2025-04-02 12:13:56 EDT", "ConditionTimestampMonotonic": "325830530", "ConfigurationDirectoryMode": "0755", "Conflicts": "ebtables.service iptables.service ip6tables.service shutdown.target ipset.service nftables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "man:firewalld(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "12711", "ExecMainStartTimestamp": "Wed 2025-04-02 12:13:56 EDT", "ExecMainStartTimestampMonotonic": "325832688", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[Wed 2025-04-02 12:13:56 EDT] ; stop_time=[n/a] ; pid=12711 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Wed 2025-04-02 12:13:56 EDT", "InactiveExitTimestampMonotonic": "325832722", "InvocationID": "41b8036e16214fc68c244a42727639e9", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "12711", "MemoryAccounting": "yes", "MemoryCurrent": "40091648", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Wed 2025-04-02 12:13:57 EDT", "StateChangeTimestampMonotonic": "326139129", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogTimestamp": "Wed 2025-04-02 12:13:57 EDT", "WatchdogTimestampMonotonic": "326139126", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34 Wednesday 02 April 2025 12:16:06 -0400 (0:00:00.559) 0:03:29.450 ******* ok: [managed-node2] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/libexec/platform-python", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43 Wednesday 02 April 2025 12:16:06 -0400 (0:00:00.055) 0:03:29.505 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55 Wednesday 02 April 2025 12:16:06 -0400 (0:00:00.045) 0:03:29.551 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Wednesday 02 April 2025 12:16:06 -0400 (0:00:00.058) 0:03:29.609 ******* ok: [managed-node2] => (item={'port': '15001-15003/tcp', 'state': 'enabled'}) => { "__firewall_changed": false, "ansible_loop_var": "item", "changed": false, "item": { "port": "15001-15003/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120 Wednesday 02 April 2025 12:16:07 -0400 (0:00:00.646) 0:03:30.255 ******* skipping: [managed-node2] => (item={'port': '15001-15003/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "'detailed' in fw[0]", "item": { "port": "15001-15003/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Wednesday 02 April 2025 12:16:07 -0400 (0:00:00.068) 0:03:30.323 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'detailed' in fw[0]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139 Wednesday 02 April 2025 12:16:07 -0400 (0:00:00.058) 0:03:30.381 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144 Wednesday 02 April 2025 12:16:07 -0400 (0:00:00.043) 0:03:30.425 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153 Wednesday 02 April 2025 12:16:07 -0400 (0:00:00.116) 0:03:30.542 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163 Wednesday 02 April 2025 12:16:07 -0400 (0:00:00.044) 0:03:30.586 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169 Wednesday 02 April 2025 12:16:07 -0400 (0:00:00.067) 0:03:30.654 ******* skipping: [managed-node2] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Wednesday 02 April 2025 12:16:07 -0400 (0:00:00.111) 0:03:30.766 ******* redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.seboolean to ansible.posix.seboolean TASK [fedora.linux_system_roles.selinux : Set ansible_facts required by role and install packages] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:2 Wednesday 02 April 2025 12:16:07 -0400 (0:00:00.237) 0:03:31.003 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml for managed-node2 TASK [fedora.linux_system_roles.selinux : Ensure ansible_facts used by role] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:2 Wednesday 02 April 2025 12:16:07 -0400 (0:00:00.124) 0:03:31.128 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Ensure SELinux packages] ************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:7 Wednesday 02 April 2025 12:16:08 -0400 (0:00:00.084) 0:03:31.212 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml for managed-node2 TASK [fedora.linux_system_roles.selinux : Check if system is ostree] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:5 Wednesday 02 April 2025 12:16:08 -0400 (0:00:00.229) 0:03:31.442 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set flag to indicate system is ostree] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:10 Wednesday 02 April 2025 12:16:08 -0400 (0:00:00.075) 0:03:31.518 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:17 Wednesday 02 April 2025 12:16:08 -0400 (0:00:00.071) 0:03:31.590 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set flag if transactional-update exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:22 Wednesday 02 April 2025 12:16:08 -0400 (0:00:00.074) 0:03:31.664 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux python2 tools] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:26 Wednesday 02 April 2025 12:16:08 -0400 (0:00:00.075) 0:03:31.740 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_python_version is version('3', '<')", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35 Wednesday 02 April 2025 12:16:08 -0400 (0:00:00.072) 0:03:31.812 ******* ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:46 Wednesday 02 April 2025 12:16:11 -0400 (0:00:02.884) 0:03:34.696 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_os_family == \"Suse\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux tool semanage] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 Wednesday 02 April 2025 12:16:11 -0400 (0:00:00.046) 0:03:34.743 ******* ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.selinux : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:72 Wednesday 02 April 2025 12:16:14 -0400 (0:00:02.864) 0:03:37.608 ******* skipping: [managed-node2] => { "false_condition": "__selinux_is_transactional | d(false)" } TASK [fedora.linux_system_roles.selinux : Reboot transactional update systems] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:77 Wednesday 02 April 2025 12:16:14 -0400 (0:00:00.047) 0:03:37.656 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Fail if reboot is needed and not set] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:82 Wednesday 02 April 2025 12:16:14 -0400 (0:00:00.131) 0:03:37.787 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Refresh facts] *********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:89 Wednesday 02 April 2025 12:16:14 -0400 (0:00:00.043) 0:03:37.831 ******* ok: [managed-node2] TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if enabled] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:5 Wednesday 02 April 2025 12:16:15 -0400 (0:00:00.766) 0:03:38.597 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_selinux.status == \"enabled\" and (selinux_state or selinux_policy)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if disabled] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:13 Wednesday 02 April 2025 12:16:15 -0400 (0:00:00.062) 0:03:38.659 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_selinux.status == \"disabled\" and selinux_state", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set selinux_reboot_required] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:21 Wednesday 02 April 2025 12:16:15 -0400 (0:00:00.069) 0:03:38.729 ******* ok: [managed-node2] => { "ansible_facts": { "selinux_reboot_required": false }, "changed": false } TASK [fedora.linux_system_roles.selinux : Fail if reboot is required] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:25 Wednesday 02 April 2025 12:16:15 -0400 (0:00:00.083) 0:03:38.812 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_reboot_required", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Warn if SELinux is disabled] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:30 Wednesday 02 April 2025 12:16:15 -0400 (0:00:00.069) 0:03:38.881 ******* skipping: [managed-node2] => { "false_condition": "ansible_selinux.status == \"disabled\"" } TASK [fedora.linux_system_roles.selinux : Drop all local modifications] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:35 Wednesday 02 April 2025 12:16:15 -0400 (0:00:00.071) 0:03:38.953 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_all_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux boolean local modifications] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:43 Wednesday 02 April 2025 12:16:15 -0400 (0:00:00.067) 0:03:39.020 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_booleans_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux file context local modifications] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:48 Wednesday 02 April 2025 12:16:15 -0400 (0:00:00.067) 0:03:39.087 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_fcontexts_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux port local modifications] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:53 Wednesday 02 April 2025 12:16:16 -0400 (0:00:00.067) 0:03:39.154 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_ports_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux login local modifications] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:58 Wednesday 02 April 2025 12:16:16 -0400 (0:00:00.078) 0:03:39.232 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_logins_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set SELinux booleans] **************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:63 Wednesday 02 April 2025 12:16:16 -0400 (0:00:00.174) 0:03:39.406 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set SELinux file contexts] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:74 Wednesday 02 April 2025 12:16:16 -0400 (0:00:00.065) 0:03:39.472 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set an SELinux label on a port] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:87 Wednesday 02 April 2025 12:16:16 -0400 (0:00:00.071) 0:03:39.544 ******* ok: [managed-node2] => (item={'ports': '15001-15003', 'setype': 'http_port_t'}) => { "__selinux_item": { "ports": "15001-15003", "setype": "http_port_t" }, "ansible_loop_var": "__selinux_item", "changed": false, "ports": [ "15001-15003" ], "proto": "tcp", "setype": "http_port_t", "state": "present" } TASK [fedora.linux_system_roles.selinux : Set linux user to SELinux user mapping] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:99 Wednesday 02 April 2025 12:16:17 -0400 (0:00:01.000) 0:03:40.544 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Get SELinux modules facts] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:112 Wednesday 02 April 2025 12:16:17 -0400 (0:00:00.039) 0:03:40.584 ******* ok: [managed-node2] => { "ansible_facts": { "selinux_checksums": true, "selinux_installed_modules": { "abrt": { "100": { "checksum": "sha256:13dad22da122be9f7d5df4dbedae6a515323542fdc1a7e466d7a1a3d36d29731", "enabled": 1 } }, "accountsd": { "100": { "checksum": "sha256:8bd25829d921be0b5adf92ddaca7ab94cedca1d57796749cfa63571b6550e3da", "enabled": 1 } }, "acct": { "100": { "checksum": "sha256:2699d826efd46176017695c768804c505a54b277b05f1feb9c43a613bab4e6aa", "enabled": 1 } }, "afs": { "100": { "checksum": "sha256:99920dd4e0855870f7e6f9666928d13fe18ddccca9d38b92ea70a6ce3c8c7539", "enabled": 1 } }, "aiccu": { "100": { "checksum": "sha256:a7aedc8354b4335412871adfd2ab5b0c6da1ea63c8dd797718e4214a5d511bb5", "enabled": 1 } }, "aide": { "100": { "checksum": "sha256:8adb5c3a5ed74695e975eecbf290640b179eb6345a7740745ecfe3164efe209f", "enabled": 1 } }, "ajaxterm": { "100": { "checksum": "sha256:d3a03c2837d5dde7145e27902ff8578e00734ab34e8ea1a45aee58b83e9ad6d1", "enabled": 1 } }, "alsa": { "100": { "checksum": "sha256:202f94345fba8f4bc942dc9b75bbb6eea3b4cb02411cf6ed79858d72aa883c89", "enabled": 1 } }, "amanda": { "100": { "checksum": "sha256:f9a99d97370017307349a154ce479969395bbbfe434e4829573269f770efdd0d", "enabled": 1 } }, "amtu": { "100": { "checksum": "sha256:bc9934a2ae61fa117614f201479966d788484f3a7382de4ebad99790a465e2b7", "enabled": 1 } }, "anaconda": { "100": { "checksum": "sha256:b8aabc624243533d483c3dd5574a490a43e7ec0f2f7940798c12b4089bbd0642", "enabled": 1 } }, "antivirus": { "100": { "checksum": "sha256:1de6460ccaea5a5749eba17489b9765035c8202eb9492485ff39157564001a2c", "enabled": 1 } }, "apache": { "100": { "checksum": "sha256:1a0c38364558bebdae3efaa1fcf8be232184dcddcaab345bba1c40bf239dd0ed", "enabled": 1 } }, "apcupsd": { "100": { "checksum": "sha256:175308edb201092c22791f419d32da3f661e7ccfb9c5d5855ad753405c10023b", "enabled": 1 } }, "apm": { "100": { "checksum": "sha256:a1410f65d6bf017caedaffaa59016877686099fb7df3c4d801136de79a61795e", "enabled": 1 } }, "application": { "100": { "checksum": "sha256:a8e9d90aa1188068ca66be55c4d8abf9982666171bbdd8d4da1f2a254c34a080", "enabled": 1 } }, "arpwatch": { "100": { "checksum": "sha256:2cb8afd237d6bc5693e5d54be1a455b6ed632fbbe76cea406163f9c48d00e79f", "enabled": 1 } }, "asterisk": { "100": { "checksum": "sha256:0b66b387174001e926cf1454c3516bb32d96610a0f598065fe6d7a917ca897fe", "enabled": 1 } }, "auditadm": { "100": { "checksum": "sha256:dcd9e7f5e71fb9f7aace30b5755efcbf85fe88f884d4253cc9abcad1c44e5f4d", "enabled": 1 } }, "authconfig": { "100": { "checksum": "sha256:bdb8072e463c84cb01e6933093428be2b6ee5299d82e26730b12dd2b66d89355", "enabled": 1 } }, "authlogin": { "100": { "checksum": "sha256:a89b04c7a40bb373de2bbb0a2210cca454e7d4a805321fbe65462ae5551db656", "enabled": 1 } }, "automount": { "100": { "checksum": "sha256:41ec4e0c5c46118cb4dfa8c8b1834f330dce4ffdea3d534a8d5007a63b3e5262", "enabled": 1 } }, "avahi": { "100": { "checksum": "sha256:7628cb8340258102798a6e36902d0210e2051ffb9fb4f7a1e4c62a612edfe6fa", "enabled": 1 } }, "awstats": { "100": { "checksum": "sha256:9b92e64a3331076ad443862aa2ba98a2c4d9b00638bf19bb9726f572dee5eff4", "enabled": 1 } }, "bacula": { "100": { "checksum": "sha256:32cedcc57f6a973ac5adc16d8df343fc1ca4b3716f7cdcdae0d2490a6e5765ac", "enabled": 1 } }, "base": { "100": { "checksum": "sha256:d99ed290beecf2b10a557a21b06b63cabc28dab4050f2e7197d2cb9e30519fd3", "enabled": 1 } }, "bcfg2": { "100": { "checksum": "sha256:ea510637d47b7fabc3f617f8a6f3ca3172bf9215c2d6b64ad19cd5d8819c8b6b", "enabled": 1 } }, "bind": { "100": { "checksum": "sha256:39520749f8aba46f975a87187975d8dcd014ad67d22515951f51fa3fd1b0478f", "enabled": 1 } }, "bitlbee": { "100": { "checksum": "sha256:bf04e481614825a35c26a547b19098ff1c8acd0d915c5b4f938b9fa595459d00", "enabled": 1 } }, "blkmapd": { "100": { "checksum": "sha256:ca870c95742bf987a2e739286cbcb998b58c091a422251fdd8de57228b28fd96", "enabled": 1 } }, "blueman": { "100": { "checksum": "sha256:7e4b2b3df3962273436b561c806c816fe4b1e5d6781efa33a7109b05f796edd7", "enabled": 1 } }, "bluetooth": { "100": { "checksum": "sha256:da457ef2ce595c3bf9f70697029ea90e96472ae562f685a7f919a7778a778d09", "enabled": 1 } }, "boinc": { "100": { "checksum": "sha256:d74bd3b6b3850c30b5bbf77822ab82b43f36600e4f76cd68674ef361328afb05", "enabled": 1 } }, "boltd": { "100": { "checksum": "sha256:4ccf41e247c5a7066042a0ebaae492805a1d640f777e8e771701f340a76bce30", "enabled": 1 } }, "bootloader": { "100": { "checksum": "sha256:46e55021d6c4cede091a992ab33521bb1aba4ca1d44879d778973b279204933c", "enabled": 1 } }, "brctl": { "100": { "checksum": "sha256:f9645adde2441e43369a255c6a194f01c6f5800347ad710ce3e147df884b98aa", "enabled": 1 } }, "brltty": { "100": { "checksum": "sha256:603734d4772f482f282eb217c03647f705d66de27fc927c64e02787369b0f78a", "enabled": 1 } }, "bugzilla": { "100": { "checksum": "sha256:326d2a188603c908cdae3c9dcdae6bda37b98ec4cc23f3b31878e2bbd0cd33b2", "enabled": 1 } }, "bumblebee": { "100": { "checksum": "sha256:e8ca8d5318a68243441fdb993fbab6d566f7462fd5557b55733f8ddbfcc4b276", "enabled": 1 } }, "cachefilesd": { "100": { "checksum": "sha256:86fe9c1aa8b2d7a6bdd9bd8d0c7a41a7ae0e4e14e32eaea6cb920367c2f495d7", "enabled": 1 } }, "calamaris": { "100": { "checksum": "sha256:1069377693a5d730d57e4ddd6f73ce20b67b595aae90a16459e852d238163b48", "enabled": 1 } }, "callweaver": { "100": { "checksum": "sha256:880b626c3d04c5669d64ee617ee36a18566e91adeaac67b9527b0a795543575e", "enabled": 1 } }, "canna": { "100": { "checksum": "sha256:b9256764ca5e34142e8cffea57fafc2fa66f78dc8c05761f97fa9becd1d77311", "enabled": 1 } }, "ccs": { "100": { "checksum": "sha256:ad293ee5e252966d14fa6bf09240f143460df4b928672a33a398a5793777c4e4", "enabled": 1 } }, "cdrecord": { "100": { "checksum": "sha256:dda8d62c3bf2503ff9762bd031c35a76cac8059d08592fe23e4d3fe11b0ac8cc", "enabled": 1 } }, "certmaster": { "100": { "checksum": "sha256:b431dd84f2c6b971bc573674fa6c4ee2fedf910b0123ba5d9acb5011c208fd72", "enabled": 1 } }, "certmonger": { "100": { "checksum": "sha256:965ec65dfc98cbabce2350bd52fa7ce92c2f4ab4704348f1555f2a3d9edfd1b8", "enabled": 1 } }, "certwatch": { "100": { "checksum": "sha256:77f0299f67e43927eacb553d1002beeebc3098b4bee64d8dc3dadb8fd23fbb5c", "enabled": 1 } }, "cfengine": { "100": { "checksum": "sha256:c78b908838f1d64ee9ebb0a51b7fa438527716936471a573e1b4b7c393bd6b8d", "enabled": 1 } }, "cgdcbxd": { "100": { "checksum": "sha256:5d3633e0b77db69721e4f64167d7e5f7779c3e5fa76e095d25f8467f2a0bdfec", "enabled": 1 } }, "cgroup": { "100": { "checksum": "sha256:9368c6c54bd5ec6f20e4c3b47c86e60af07346c4e86e525b6bd7288b54b7e774", "enabled": 1 } }, "chrome": { "100": { "checksum": "sha256:d31ce9d2fe78cafcd5e3c8decf22ae1e9ea6f74026ca65b6320afe9a33cd609a", "enabled": 1 } }, "chronyd": { "100": { "checksum": "sha256:7d9624729861397cf7720c2324c65489a3d30485e6a884ab1ff9a8ca22efa678", "enabled": 1 } }, "cinder": { "100": { "checksum": "sha256:fc169721c78f5b0857ed8312e59ba4c134b685c4c322dae242b92e815e35e6fb", "enabled": 1 } }, "cipe": { "100": { "checksum": "sha256:02c20398b9eff51ed431b7ad739a6015d2451b4bf6e3e5da380606d85a77852c", "enabled": 1 } }, "clock": { "100": { "checksum": "sha256:4f90655d2243cfc32ea7436a953cccb8a34af895f83361235a3a5cda40dbc75f", "enabled": 1 } }, "clogd": { "100": { "checksum": "sha256:ba78a422a10b65591c48cb038f8a55614944163f3140275852d293fb0c548bfa", "enabled": 1 } }, "cloudform": { "100": { "checksum": "sha256:481f5fbc7810a5a81851edbe5a6b124141257f5fbbb83d8830ae0a34924ed3d9", "enabled": 1 } }, "cmirrord": { "100": { "checksum": "sha256:8f8fb986f15b8b7c5c250d250fdbbb2f78874e13394105c9c486488a16e94c91", "enabled": 1 } }, "cobbler": { "100": { "checksum": "sha256:e0e264b9cc83962dbbb27c152a72f01c6a355467c4e845b52e65c8b88d8d75d6", "enabled": 1 } }, "cockpit": { "100": { "checksum": "sha256:cb7fccd94903a6e256a586d758085f6f59c0f8b1c5b4cb99536915526d2224ec", "enabled": 1 } }, "collectd": { "100": { "checksum": "sha256:7f08e2e248d33162dc9b237c37ed3a3dba0511bbcc71d87482e95093fb8c6456", "enabled": 1 } }, "colord": { "100": { "checksum": "sha256:86e58c9f12c519a2c3b090b64a276722374054ea900c775b2f8ab4ef2867dcf0", "enabled": 1 } }, "comsat": { "100": { "checksum": "sha256:1d57ffaad6b96e3ca8ac82c23b52d58d81e1f69f5d54a648a16da8ffa8070e53", "enabled": 1 } }, "condor": { "100": { "checksum": "sha256:dbc3f2f0c12f9aeed14056fd7e7c46a4ecab3569198f891643172cd032f3fc00", "enabled": 1 } }, "conman": { "100": { "checksum": "sha256:1270caf15af248a487cd5ce728daae2699ffd9139823c805ec49213ab1c835cb", "enabled": 1 } }, "conntrackd": { "100": { "checksum": "sha256:56fd7d7a550dbc4188b93afd0fde8c706623b3a5d26db265ee016967ba4ddfee", "enabled": 1 } }, "consolekit": { "100": { "checksum": "sha256:5bd7a7acc191766583d933b04321e64657138959bf40a4d2986b013b942c4ba8", "enabled": 1 } }, "container": { "200": { "checksum": "sha256:301be7dafa07cdc68b4e5ade7e1a07017fab3efd85986bdfab7faa9466a95836", "enabled": 1 } }, "couchdb": { "100": { "checksum": "sha256:12b2e3e7314bda4e76d3883901e6470927e85343f742fb44b174ce968f1ad8b5", "enabled": 1 } }, "courier": { "100": { "checksum": "sha256:40ae5f173004741838002644e5bff73cf16f2f3a1928c45fa17674f9a0df5148", "enabled": 1 } }, "cpucontrol": { "100": { "checksum": "sha256:1485a6d64d00619898d2789d27391f2a57a7fb1f0e8c73daf59baca8641564a3", "enabled": 1 } }, "cpufreqselector": { "100": { "checksum": "sha256:687564eb09acf3e7f1475fe2a133941c36999bd037aa8a794feea2d9f2c26385", "enabled": 1 } }, "cpuplug": { "100": { "checksum": "sha256:c16e376ff6c51da1911e68a8a7d42f5730eda45febfd0875c78cac4b9cf6e78c", "enabled": 1 } }, "cron": { "100": { "checksum": "sha256:6be0252b3c6bcbfb4c51dfd3ae1ae262f5de153234917ac4d342b18ae0292060", "enabled": 1 } }, "ctdb": { "100": { "checksum": "sha256:06dd65a4361bf8076c14b322dd30003295c0b9d75bf1ae610961b13a1f9431da", "enabled": 1 } }, "cups": { "100": { "checksum": "sha256:3d5e5bbf131d98d95f7f1431893eb137bd833dbfd8469f9c386d72bb4e8f9b9a", "enabled": 1 } }, "cvs": { "100": { "checksum": "sha256:bbc8d76cc8609849d5b078c5b2ac7364470a06d77d67b97d5f58429d7b679e33", "enabled": 1 } }, "cyphesis": { "100": { "checksum": "sha256:b1a41211ae3cf69b819df517eccd0fda2088c27685dad68de64531b9794ec518", "enabled": 1 } }, "cyrus": { "100": { "checksum": "sha256:60defb1f6feeb1d607734c4912e52e03bf5b0c27cb6f31a37fa7e05f3497b323", "enabled": 1 } }, "daemontools": { "100": { "checksum": "sha256:1034e2442c975dd2ccf84791b1a826d02032f13762d57c3485e51e2b9a7dc03f", "enabled": 1 } }, "dbadm": { "100": { "checksum": "sha256:40306590ef444152ae18b65040d85442c14853a9cc4c31b0224c4d19517d66ea", "enabled": 1 } }, "dbskk": { "100": { "checksum": "sha256:24559eff82b251f9814ae88c36a7cbacda1ed419a80145aef545306e88cb0da8", "enabled": 1 } }, "dbus": { "100": { "checksum": "sha256:50ea4eb05a06315449092c939e2307436ac6461e47ca69f0d42cc4e321e86280", "enabled": 1 } }, "dcc": { "100": { "checksum": "sha256:06e414b0a83b49968f62018cecde48dcfe68b2e9d699915367b3e04461188a0d", "enabled": 1 } }, "ddclient": { "100": { "checksum": "sha256:73ca2525a14e3161524f6e8fc0d016430a536002f1cb3833db1334670b458436", "enabled": 1 } }, "denyhosts": { "100": { "checksum": "sha256:1bd00b13b9bda18274a771d66f7cba8fe62e5e95ea8f51415da6b1fa1336df1b", "enabled": 1 } }, "devicekit": { "100": { "checksum": "sha256:03b01b781881cc60438bc357bd60596970b8ac019b415969bca8a08358fcbfd1", "enabled": 1 } }, "dhcp": { "100": { "checksum": "sha256:2ad95a78468f7f4ea9a8c044c79c0a4ca9924b41432390ea2863a85c806c9a00", "enabled": 1 } }, "dictd": { "100": { "checksum": "sha256:c30c819f142d3c719d0ec5741af5a65161770ff140097fe63f7559d55b897500", "enabled": 1 } }, "dirsrv": { "100": { "checksum": "sha256:50efdc68200d27ce1a5db99a780aa7b0e84988669961d436d348c7bb8310d181", "enabled": 1 } }, "dirsrv-admin": { "100": { "checksum": "sha256:8d9234157484f6ae8ba22039b44fa19f4de8137be9321e5da393d72d85d89487", "enabled": 1 } }, "dmesg": { "100": { "checksum": "sha256:8b834312a2cb99ab89862f839a1315e78794dd92758785f84c9559285dfbe679", "enabled": 1 } }, "dmidecode": { "100": { "checksum": "sha256:2c7fb8c6c52f385b819713f0444a96cfd4e65b7dcb3ca79b932cc12ad9ce903d", "enabled": 1 } }, "dnsmasq": { "100": { "checksum": "sha256:44f66c5d4f635600ee9d0ba3fdea3896218f1420b5ead89e0f22d71a447f9e97", "enabled": 1 } }, "dnssec": { "100": { "checksum": "sha256:49427a9e92b87db77706e2b81ece254c99d3cd6ba020211e2afae65fab7ad066", "enabled": 1 } }, "dovecot": { "100": { "checksum": "sha256:cc8c3a2ee0233a7f1fdf38837b72ce5fd15efef782a36ab4b9aa2ec339b46fa6", "enabled": 1 } }, "drbd": { "100": { "checksum": "sha256:b66be23c1ded4e548e5369b744c7c2a4dfd7065582517525221177ca67657525", "enabled": 1 } }, "dspam": { "100": { "checksum": "sha256:5dd7221ba40e9b912367289fed8ca116c14da4fb8bd7f28f421c4008855bb9fc", "enabled": 1 } }, "entropyd": { "100": { "checksum": "sha256:0f68aeeb1da72efb8c9b1bb7db0a4180b6938672b16f33d1abcd65f5481d85a9", "enabled": 1 } }, "exim": { "100": { "checksum": "sha256:f4c4473ee49394e0e4629023772464a046c476f92b4a727acdf9f6c92711b952", "enabled": 1 } }, "fail2ban": { "100": { "checksum": "sha256:2383cb88b81bc5d87be9f3201a42da526532c4ea8e6d3b3f5023005c0ddf6f17", "enabled": 1 } }, "fcoe": { "100": { "checksum": "sha256:913e66ac5f5ce364e5ea556acfbf77845c25a4beb5ee64599613aa00127c1492", "enabled": 1 } }, "fetchmail": { "100": { "checksum": "sha256:63f00993bae4285eff5e993d208ea786785c4331e6947b3a48a97d31145b2e98", "enabled": 1 } }, "finger": { "100": { "checksum": "sha256:16c506d472b007f7d36850810ca0fcfd9482d30ce9c0ba790174b78294fd1d74", "enabled": 1 } }, "firewalld": { "100": { "checksum": "sha256:bbf58446f30b93de19e5a19087ee012f8e347fef5e7e8012e64b31a0ec21ab09", "enabled": 1 } }, "firewallgui": { "100": { "checksum": "sha256:b61ff17eee03141c9c7bd79d63331ecea733cba4b5b43b87d5141a40cdccdd69", "enabled": 1 } }, "firstboot": { "100": { "checksum": "sha256:c5540b8385c84075dd657e390d77ae886aa9d74b65444b9aa1d858f375819a8c", "enabled": 1 } }, "fprintd": { "100": { "checksum": "sha256:c1ffb7734a0359a7390830d9c6477ab61c45fc026368bfd5e2246523a6439464", "enabled": 1 } }, "freeipmi": { "100": { "checksum": "sha256:9af2291d75a2d643f53ff7a98bcabf22effb617329178efea45372d714825de1", "enabled": 1 } }, "freqset": { "100": { "checksum": "sha256:28bf77389f3e41743b30727a891609172a891466e92c28a919f43e628cc23a4d", "enabled": 1 } }, "fstools": { "100": { "checksum": "sha256:140caf542903419ee2471fd99ab06aa45899c400402c2580b395b182f24bd225", "enabled": 1 } }, "ftp": { "100": { "checksum": "sha256:7e8456fdf7807b30e1c257e568ba10305696cf5abdebc70988c288079884d46b", "enabled": 1 } }, "fwupd": { "100": { "checksum": "sha256:1dd6a45b73c7ce77a87af1e87354ada5aa5b2841aaaa045a6b4ae3c4d09f0f8b", "enabled": 1 } }, "games": { "100": { "checksum": "sha256:950d8be99d5349a3d893ba601c518e6b2af0d56c5b55514a45dbd8a3c61c9ecc", "enabled": 1 } }, "gdomap": { "100": { "checksum": "sha256:5040cb99d007fe9368bd37a9a6bf82f891c220ef652443896a0f2f6ca6f818e1", "enabled": 1 } }, "geoclue": { "100": { "checksum": "sha256:f0155b43152b6b4b850d1c4fb7daf16fd77992313b8be314ddb4901314bf913d", "enabled": 1 } }, "getty": { "100": { "checksum": "sha256:a60d07665b0ebd25fd54a9d82dad5eb7acbc11a2842dba56d7b9524d26ce14ce", "enabled": 1 } }, "git": { "100": { "checksum": "sha256:5eaccf209092db49c9a48d84e1387c1de76cb153c774c0bd615c001afab28664", "enabled": 1 } }, "gitosis": { "100": { "checksum": "sha256:b522382b64f36cf387cd892b45e916c861bd0a09697bc983eb55b53b0efd3081", "enabled": 1 } }, "glance": { "100": { "checksum": "sha256:2c51d19fca6ee40e137245ecb425edc77666d75c42ba583bf74cf13f10ace055", "enabled": 1 } }, "gnome": { "100": { "checksum": "sha256:420b9cefa6bdb542f6da10de7b36704a91509cf64cd2497e5693a858cfca5e41", "enabled": 1 } }, "gpg": { "100": { "checksum": "sha256:f821aa6ca5837a2d2de8180e74c267f68da951960c989fb13ebde5833c93738e", "enabled": 1 } }, "gpm": { "100": { "checksum": "sha256:bf30c4945be0065672fb47f70ad251b1079ada339f61f2679293cb0226d0d57a", "enabled": 1 } }, "gpsd": { "100": { "checksum": "sha256:5373b2332959d6c41c32160018274ab61e3f1abd0f0a5cc2302c45b141a39a9b", "enabled": 1 } }, "gssproxy": { "100": { "checksum": "sha256:7528c47be91a81ac19f2f54458309baeb0a232d83a1ccb2bd89fbc8cefb1ddc8", "enabled": 1 } }, "guest": { "100": { "checksum": "sha256:91f43e4d5ae283f0aa13c49efea93293dbdecd2b2f4f75db89371eda65b7523e", "enabled": 1 } }, "hddtemp": { "100": { "checksum": "sha256:f170e1da6acae4fd7108d22c8cf262916e034f0d3edbdebf3265a922a5355373", "enabled": 1 } }, "hostapd": { "100": { "checksum": "sha256:8b15f72328885c08bfda38082a62feeaa2c6692223a4d2bd1a572820d454a742", "enabled": 1 } }, "hostname": { "100": { "checksum": "sha256:e9fc1c4032c0346f751e1ef8ad1b3fe3425401b70a6c4354d4485472288e0bc5", "enabled": 1 } }, "hsqldb": { "100": { "checksum": "sha256:f70b198e5a5157722b69dc89109c4074a475e1085356cc610cc9b700567c154d", "enabled": 1 } }, "hwloc": { "100": { "checksum": "sha256:370e9eea2b927a2715018b667e9a56ad09af301a90811cd9b041da79f5384b38", "enabled": 1 } }, "hypervkvp": { "100": { "checksum": "sha256:b54ce6f4960a02d35e19d60bf8a07f7866514893e3193a5f4822c8580a46caa4", "enabled": 1 } }, "ibacm": { "100": { "checksum": "sha256:663b35f3874583ae074924bc068a8dc4c7c144adb60007da6103d1e3505ee37a", "enabled": 1 } }, "icecast": { "100": { "checksum": "sha256:dedaddef1d7447d25a1e7ff01e60e4545606e556c6770bd3fa94d9331de7a5d7", "enabled": 1 } }, "inetd": { "100": { "checksum": "sha256:ae408578a7160f2feae10269365558c43d9570b392642a92cc20f8ad47c58cce", "enabled": 1 } }, "init": { "100": { "checksum": "sha256:7ff95566a4f2bdb8ca3ec67acdade39e35fdabc57c2f00b989bab3f699f997f8", "enabled": 1 } }, "inn": { "100": { "checksum": "sha256:9ad99284192a443aa582e73b46667388b7a219dafae8dfce71a58a82bbae2f6c", "enabled": 1 } }, "insights_client": { "100": { "checksum": "sha256:0e41289d8dce065dcd41fd6cc1e1282efd4a58e7f9e3a2f1abc32f520fbbcc1e", "enabled": 1 } }, "iodine": { "100": { "checksum": "sha256:32501ab66def044fbc340cb5c656d5743c738bbd6fca5626c36c687419cd8d32", "enabled": 1 } }, "iotop": { "100": { "checksum": "sha256:d15656cd91a4e4e178a13f7cf910cfc552cc30db881a11ec88833f947edb4561", "enabled": 1 } }, "ipmievd": { "100": { "checksum": "sha256:d34fe186922c0e5726ca361343ec3846833ec3e4ab9b019b3d7bac1337383a16", "enabled": 1 } }, "ipsec": { "100": { "checksum": "sha256:d36c66c2c79d338c61c90d4136433e1e3a73435e920eb36d70682dfd5e147e59", "enabled": 1 } }, "iptables": { "100": { "checksum": "sha256:5a674017cc648e3262757464e5413503154cc1f593da545ce2c4f946991012bc", "enabled": 1 } }, "irc": { "100": { "checksum": "sha256:d72428ccbff5521367e00699c142bba64b2bbd44fed35deb29f9530cc0448378", "enabled": 1 } }, "irqbalance": { "100": { "checksum": "sha256:15650b2f39ccdfbcb1e4e867a35fce3c2768097e611e0c8ad9cb79ae6c66dd58", "enabled": 1 } }, "iscsi": { "100": { "checksum": "sha256:ccb27142f793095c79f531aae924baaeee5914c84228a09c09b9eca839f3524e", "enabled": 1 } }, "isns": { "100": { "checksum": "sha256:90b42f610fa328cdfb98bd0450bd052566f203e51e4a913dd6faded6da7fbe2c", "enabled": 1 } }, "jabber": { "100": { "checksum": "sha256:5ad49d140265305dc72781a6826d1de4614a33f83bd512acdc2263038ad41206", "enabled": 1 } }, "jetty": { "100": { "checksum": "sha256:d910afd1bfe836543ded50974dc24ae7bd5fd2609d6a9b2403316dffcd39832d", "enabled": 1 } }, "jockey": { "100": { "checksum": "sha256:d9a67ce1976ed2e79826d25f33dcb0b0bbde6c090600b605bbaaae45856d12f6", "enabled": 1 } }, "journalctl": { "100": { "checksum": "sha256:9ddb71271d0dbe5cede6179c0ca263e297dc6b65197bde2f7b14ce71f8dde369", "enabled": 1 } }, "kdbus": { "100": { "checksum": "sha256:5969c78be4a03cc91e426bc19b13c5188b5bf8ac11f5e2c21c098c3d68a7e3e3", "enabled": 1 } }, "kdump": { "100": { "checksum": "sha256:fdde3852d1decda649133c6345680f9353b86a6da2a98a83a8be101c9c25f103", "enabled": 1 } }, "kdumpgui": { "100": { "checksum": "sha256:66c67280c70a9b897b0f952067438e0eee05f9f48913508b38d745ef88747f32", "enabled": 1 } }, "keepalived": { "100": { "checksum": "sha256:c1177567c7bf67bb2d0de17760cecf56e0bb34f50d6fe060dec64ae97a76ecdb", "enabled": 1 } }, "kerberos": { "100": { "checksum": "sha256:826fbe83705494e009b242b88857c425eacba49aadae506ffa2012c80e60f7ae", "enabled": 1 } }, "keyboardd": { "100": { "checksum": "sha256:f199811d9ddc8db83864a09c543567fcb2f117b3241967b092bff7c9fdbfbfb6", "enabled": 1 } }, "keystone": { "100": { "checksum": "sha256:b0a7227a870ea987035e0cd524ad956a68287d0a67dd7135de41c6d5977ff4c2", "enabled": 1 } }, "kismet": { "100": { "checksum": "sha256:488fb5fd17cf1f630f3e48a853da05f86c06fc58219dc2ae59251865734bf800", "enabled": 1 } }, "kmscon": { "100": { "checksum": "sha256:d64019b11b6a37f6cdc5579d56eb1e19b6a7231501e1cfe2a838d26a2eac6033", "enabled": 1 } }, "kpatch": { "100": { "checksum": "sha256:00070d71dfe2632491305387ffb264127dca4387425015e4cb013d6bce5f95c3", "enabled": 1 } }, "ksmtuned": { "100": { "checksum": "sha256:891f082452240ad2e726bad777ea787d0f0f8695cc2a75f7439a2badda030d24", "enabled": 1 } }, "ktalk": { "100": { "checksum": "sha256:2df6f3dbad4a513ee1c113e496e8d2f5a19f56015f4a21e7478f2f5b53f36359", "enabled": 1 } }, "l2tp": { "100": { "checksum": "sha256:8e4cb0b0e0d1293d669de0b0e50f68d6d6fbe8e8d830a236a1c0e676f2326fb2", "enabled": 1 } }, "ldap": { "100": { "checksum": "sha256:d0177bb5873d0e6f9595020a8f39ba06b19e4636ea610175a3afef4aec2719cb", "enabled": 1 } }, "libraries": { "100": { "checksum": "sha256:6d5f128f2d4fd9137a7c70d0d024703547796a71f70017b3550a31d3450e0435", "enabled": 1 } }, "likewise": { "100": { "checksum": "sha256:e7eebd050230b358b43435d37ce308c3ba15e2516f4045abf7d26f03ebfbc11c", "enabled": 1 } }, "linuxptp": { "100": { "checksum": "sha256:4132cd51913a3044e453ed0b972db2ef511fdc7b2a1b592d1070177651066ab9", "enabled": 1 } }, "lircd": { "100": { "checksum": "sha256:cc81b79d2834e58bef7928f525c1a1eee5547e81d195444b3bc2741e396ae46b", "enabled": 1 } }, "livecd": { "100": { "checksum": "sha256:805c7bc4ded621b44ecf333d558328e115bba652fcbc91f436cefc948497688e", "enabled": 1 } }, "lldpad": { "100": { "checksum": "sha256:358c4b262655cffbf20f7484aedb22f094509f44d52a1fa3efe3edeafd99317e", "enabled": 1 } }, "loadkeys": { "100": { "checksum": "sha256:26f9e78406ecdc968ed670b32db1d10805e66875631558f092f08a6e1f2170dc", "enabled": 1 } }, "locallogin": { "100": { "checksum": "sha256:e07d92775ed25e7a3627bf977452844c67acf473b33075475f433f8be76dd755", "enabled": 1 } }, "lockdev": { "100": { "checksum": "sha256:1f946da2054cc1693209749df12ff01ab6456247d6225733aebb3a7d70a46e20", "enabled": 1 } }, "logadm": { "100": { "checksum": "sha256:70546c4b3d01f15bc7a69747dbb12fc6bcef5d899f6301f62c0c612c7069082a", "enabled": 1 } }, "logging": { "100": { "checksum": "sha256:656067c78ff1246a1a758a213d44307f91cb79336fe74a47015af425e58266fc", "enabled": 1 } }, "logrotate": { "100": { "checksum": "sha256:76cc40f1943fe21959793499bffaf35d0fe53ffc3f6c5a8b31eb96e738a286c2", "enabled": 1 } }, "logwatch": { "100": { "checksum": "sha256:cf4450b03e28762040c29f2a28af238cd4905d1c6bd4c73d656b266c7b9a8a6c", "enabled": 1 } }, "lpd": { "100": { "checksum": "sha256:9358dc35659b9570d3e8119a088b2693d7de505ea25996dc139517a857888857", "enabled": 1 } }, "lsm": { "100": { "checksum": "sha256:1247dc4bccfbc9ee42292db4415b21ae00bdef3dc2faeb267f045413da4a1b1b", "enabled": 1 } }, "lttng-tools": { "100": { "checksum": "sha256:79e4a2224ede13cd5f2c0e6e7c61e83efabaf1d05b86f6f7a710599bfc48edaf", "enabled": 1 } }, "lvm": { "100": { "checksum": "sha256:f56137657dd61a1a8a8844d5d1db01fc03330d17e05457d03f64756b344c32ef", "enabled": 1 } }, "mailman": { "100": { "checksum": "sha256:e47811cf3bd8204eaa02c4aab92f3d426f0a3ef97161e1579845d1e03df1fc1d", "enabled": 1 } }, "mailscanner": { "100": { "checksum": "sha256:8d447072ab5005ead27f1cb4d96dcbedf09a11182f660c6f59c6d56fd81235d8", "enabled": 1 } }, "man2html": { "100": { "checksum": "sha256:224584babd9e83c242d54fd8c5cd03379b0556005268aac22b15734b913f12e6", "enabled": 1 } }, "mandb": { "100": { "checksum": "sha256:ae44b8ec7a90ebbc45fdafe89663197b36e47120ad90eb22b475939055ea6924", "enabled": 1 } }, "mcelog": { "100": { "checksum": "sha256:c5d98ec368b145c74b4bf0ea8da3980b17af0c2d00654c5a6973241625f97b12", "enabled": 1 } }, "mediawiki": { "100": { "checksum": "sha256:43f1c6f7cfdeaa26891824167cf637a8670785c2674b45d85ce4a7ac77190a36", "enabled": 1 } }, "memcached": { "100": { "checksum": "sha256:f0f9c7367e9bd196aa463916bd5aab02f6966dad9564a0f2fd070bb2e8410aeb", "enabled": 1 } }, "milter": { "100": { "checksum": "sha256:db190bacd2b84a29971cd1940cd15d606abbfded5c9b956894717afd91fc7a0d", "enabled": 1 } }, "minidlna": { "100": { "checksum": "sha256:0d6ac660d641c1cf707a814ed08e19b9e21547a3eaa7134cab84dbc5fee6b5b2", "enabled": 1 } }, "minissdpd": { "100": { "checksum": "sha256:dd2ab85bcba6d204f9dbc7304e8a4940e5d1733d4b9cf4fcb0f4072982c585c3", "enabled": 1 } }, "mip6d": { "100": { "checksum": "sha256:406edf2c78ba0e692d5a78f3c5ca8d641d00131b143332adeaad9f325959683a", "enabled": 1 } }, "mirrormanager": { "100": { "checksum": "sha256:7084de59beaaaf4f630357ec53beff8d0a0ee532ac180fe58e23bfe98f1fdaee", "enabled": 1 } }, "miscfiles": { "100": { "checksum": "sha256:7e7e87e302bf847a4c59d69e5af60729e61bada0cc5d6ec17a25a6514476cb48", "enabled": 1 } }, "mock": { "100": { "checksum": "sha256:ae352eccf2f2c9ee8f0d9635517d9ae3c9bba83c617deca8f989e2aae8dd35fa", "enabled": 1 } }, "modemmanager": { "100": { "checksum": "sha256:84a60147d2b0121ff6ede6199583cdb5619480d015b2a675c6a0569f91c12d66", "enabled": 1 } }, "modutils": { "100": { "checksum": "sha256:67c3914aeb25e38fc6bd0793fddc41122dba1547d54e91a78065545fea3b9c87", "enabled": 1 } }, "mojomojo": { "100": { "checksum": "sha256:6030afcea9f8d46f25dd7785737edd25eb0f1e50b76eafe4d9103196b722d47e", "enabled": 1 } }, "mon_statd": { "100": { "checksum": "sha256:6ba3a594d01a11bc32e7cb554f7386314b5089eb4416fb776edb552a7d53c41d", "enabled": 1 } }, "mongodb": { "100": { "checksum": "sha256:1b2d30558bec7fc08d1d388ae2bb0becd2233c99c9fb173fd00809786ce5eed9", "enabled": 1 } }, "motion": { "100": { "checksum": "sha256:346e172be35df168eb0e4fbc8e176b0fda87de9bc5787f7a5ab7667cfe1e3c3b", "enabled": 1 } }, "mount": { "100": { "checksum": "sha256:f66c53d993dcd47ea1ff3d797f8fd69fb8161a4ff8a59f54f66a2de9462a55a7", "enabled": 1 } }, "mozilla": { "100": { "checksum": "sha256:7696dbb77c54531cf2574c7ede9f085cf64611dcf7a612530dce2de19f7a8b9f", "enabled": 1 } }, "mpd": { "100": { "checksum": "sha256:0f67c18c9101b53f57ef857a74d6044701e1d2c347f829a03c0579c545fdbef3", "enabled": 1 } }, "mplayer": { "100": { "checksum": "sha256:f82c0a72506f1011e47ba98e51d5edf906f58fc190d797f5d1a0b8e5cc7d0762", "enabled": 1 } }, "mrtg": { "100": { "checksum": "sha256:afcd9267261b334900420461279b8555fdb4bd783af880fa4606d8afc65e0712", "enabled": 1 } }, "mta": { "100": { "checksum": "sha256:b0f9753424c504a288f55d495105f6d475d69287b718190ae5192cf7d6ddfde6", "enabled": 1 } }, "munin": { "100": { "checksum": "sha256:29f87ec15fa19e975c83288d55e56bab64855a24c4d8826fe4138eda9a46cc97", "enabled": 1 } }, "mysql": { "100": { "checksum": "sha256:b028af8f4e726feb8c26037f7c6d6f97383977bd5ee6141ab4e8e1d096d6481f", "enabled": 1 } }, "mythtv": { "100": { "checksum": "sha256:e025b2dbf50901632da0ee2aa658105a322275eb120d782cbbf25f2895231154", "enabled": 1 } }, "naemon": { "100": { "checksum": "sha256:a19b3b0540dc52d9506ca7e5d804c2fe9115b3ea28bfd9273030e841e12eb277", "enabled": 1 } }, "nagios": { "100": { "checksum": "sha256:39ca80027ac8585f368bcd57f555ba87bf409f7b7d6c4292c09fd06cc1691c80", "enabled": 1 } }, "namespace": { "100": { "checksum": "sha256:ef73850f29b4ff4ff904d506d545bf366fd1e7c2ba82a7a7c9a4513e3eee45d9", "enabled": 1 } }, "ncftool": { "100": { "checksum": "sha256:2c9356101a9ddbec94afdd12ca669ba93a1d422c302f9e17b78b18670617d2a1", "enabled": 1 } }, "netlabel": { "100": { "checksum": "sha256:9a32ce04c1dd8e120588c15b3057f838bedce8f14c91576b667295d47800e0ad", "enabled": 1 } }, "netutils": { "100": { "checksum": "sha256:5e0a20ae09b00fac69ee30a0d55ff73fa692d8350c9c0b0343af61e4f0dd654f", "enabled": 1 } }, "networkmanager": { "100": { "checksum": "sha256:9c67b21155929e43e4efd3fc81a85fddc9f1030b47ee4a275789014c1311b972", "enabled": 1 } }, "ninfod": { "100": { "checksum": "sha256:85cac2885d75522eb07189efcc3feeb7775fc6daf5cf3f1a28a1fd2109fe148c", "enabled": 1 } }, "nis": { "100": { "checksum": "sha256:b5b133d60b98068eb9480c54285050ae9b49d2fb309eac8994cc91c865ee02d4", "enabled": 1 } }, "nova": { "100": { "checksum": "sha256:59919a89d30a5d4b60d6971fa636fb62605d59d214ec614adc279f6cbe2c2b27", "enabled": 1 } }, "nscd": { "100": { "checksum": "sha256:578bc975477539c659f3608b1445a0c7a9bc7c3f2dcf65b3e55f3a3af89ea564", "enabled": 1 } }, "nsd": { "100": { "checksum": "sha256:d5b03cdc6c8bbc222b8e3d30680b1a7d2d1a49837e7d509aafcf6b2a3a32195b", "enabled": 1 } }, "nslcd": { "100": { "checksum": "sha256:18b003071f4c36307616f7d5de8cff6d4e376af31cb96ce1a5ad6ae3011dfd09", "enabled": 1 } }, "ntop": { "100": { "checksum": "sha256:f942c7fbe636b9d60327ef9dade1120340c16a2992a6b50db5fbaecd44ffd63d", "enabled": 1 } }, "ntp": { "100": { "checksum": "sha256:686664a71e74b0edd643ab9d556b1aab092fa707935da5ea928a66f54a3c84e0", "enabled": 1 } }, "numad": { "100": { "checksum": "sha256:dabc5ce6244d0b0939e9a07bd6bc232e8b666529a0b7b29527e586db8224862c", "enabled": 1 } }, "nut": { "100": { "checksum": "sha256:653e708dec531e483992b25944a689ec9369478d039a5ec62c98294ab73ce8c4", "enabled": 1 } }, "nx": { "100": { "checksum": "sha256:4ae55fe839abaaf0ea52b79a5c8f6a906575b83cca29532c2dd52337fb3d5790", "enabled": 1 } }, "obex": { "100": { "checksum": "sha256:7b2c87e864b6008f734e1effa48cee1399f41843b9d80d3fd95fbd19e058598f", "enabled": 1 } }, "oddjob": { "100": { "checksum": "sha256:9de0b544b2373ea0f1b7217f9179898479dbff0da36ea9857783de57d06585cf", "enabled": 1 } }, "opafm": { "100": { "checksum": "sha256:761bf911674d23053eceabbbda8da16c73af5f300929a33a64513dc6e3b2d0af", "enabled": 1 } }, "openct": { "100": { "checksum": "sha256:5674f8e8c975570649e3065460786cb4521a86370bffef5a9de18c69813fe68e", "enabled": 1 } }, "opendnssec": { "100": { "checksum": "sha256:bdef6dbb24ae22548634759ac823a8c3e21fde6368cfdfd742480f7027e63ddd", "enabled": 1 } }, "openfortivpn": { "100": { "checksum": "sha256:1a1bff55993510cb6481383b299e1f1a6349ec76e4947bfc8c5b1347e4d30bf4", "enabled": 1 } }, "openhpid": { "100": { "checksum": "sha256:ad3f3f3ba4442930560b291c022e674e6a50e4a37fe027926299b2f6cdec14bd", "enabled": 1 } }, "openshift": { "100": { "checksum": "sha256:329e4b9d1df5012ace94cbe9cba7dfa7ee7d9f242090072c71aaacbeea78986a", "enabled": 1 } }, "openshift-origin": { "100": { "checksum": "sha256:31cbbb069354f984e4af75b387778fae1ff4dc6c3e60533357d005ffa960b51c", "enabled": 1 } }, "opensm": { "100": { "checksum": "sha256:c0e1bf0a8eb50e0b41fa69bf5b65e2a7c324e4bc7255933a5d2bac3b9ae6f4de", "enabled": 1 } }, "openvpn": { "100": { "checksum": "sha256:a4d12ae8ad77d65d0fcabb20aa4a83886e782d732123f686f88a7d7472384104", "enabled": 1 } }, "openvswitch": { "100": { "checksum": "sha256:a54f8a8ea5abb8a33734ecef9d9ad1c0dd090a6e0c5187e80de52f522d2d5e39", "enabled": 1 } }, "openwsman": { "100": { "checksum": "sha256:d6b7bb8f7749265bdaf938abecb2f8f78c6e9e8dc06c1c26b48da227af5a8654", "enabled": 1 } }, "oracleasm": { "100": { "checksum": "sha256:67e31eec391bac337ebacb78c096589af4b7e8be6aa05c34cf187ba922a2abde", "enabled": 1 } }, "osad": { "100": { "checksum": "sha256:6635ff0231bfc3d88c771553d495941ee0f98871edfe6c86205b087186b3a72f", "enabled": 1 } }, "pads": { "100": { "checksum": "sha256:5b4531e9231d399ebec8e6b6870a812c6a64b2daffde35fa57a009b24a01809f", "enabled": 1 } }, "passenger": { "100": { "checksum": "sha256:912a1c442559d6ab48453d87e2b997bdee3017a54a0b60aeaf7d4603fde0f34b", "enabled": 1 } }, "pcmcia": { "100": { "checksum": "sha256:456b3520c26e5f2a913437318715712ae00f64932a27ab1bb8b8b42e0524fa05", "enabled": 1 } }, "pcp": { "100": { "checksum": "sha256:5302332fba7e6724ab7a3c32bd523b10322c20011c6e42ae4e769a49f3efabdd", "enabled": 1 } }, "pcscd": { "100": { "checksum": "sha256:2ee37df066a9ff80439b08c092809f3661e2f9a8ad02134e839627fd23a20c1f", "enabled": 1 } }, "pdns": { "100": { "checksum": "sha256:a1a10cd52eb9dd15bc1ccfed440f6b3d235edc7405a3932f81805d8d94000245", "enabled": 1 } }, "pegasus": { "100": { "checksum": "sha256:4280c40629dd111fd1c89ff867ac72d1e7ddde49dc3d286637e6a86b868e2303", "enabled": 1 } }, "permissivedomains": { "100": { "checksum": "sha256:2453bad4ace526f3cf2c60b358e95a5476692ef25da107b10f52f3af27c056d2", "enabled": 1 } }, "pesign": { "100": { "checksum": "sha256:6461acd0385c0b1a32bf646fc9e09da0c7ca513954ed8fe2a03f4ee7f6a64fcf", "enabled": 1 } }, "pingd": { "100": { "checksum": "sha256:f7536a518a046b793ea3f74a67d677b878baac44b28268c5ccecbf10715d89ab", "enabled": 1 } }, "piranha": { "100": { "checksum": "sha256:11436fb7942d28e3eca22bc078ee5475f632d8447008a6414f337d4bbc3515dc", "enabled": 1 } }, "pkcs": { "100": { "checksum": "sha256:c70e17d1a4d347b38fdfbb2a5dab292e3e0c538ea52fb6cfdef2714e130da0b1", "enabled": 1 } }, "pkcs11proxyd": { "100": { "checksum": "sha256:c9582c89cac1546fa1e5bf9802c5a322e52e2529256f9e5922d5813e40be3646", "enabled": 1 } }, "pki": { "100": { "checksum": "sha256:ec40fbe6355370fe69a8ff343744654b06d4134c1518c64269be1f3a49083968", "enabled": 1 } }, "plymouthd": { "100": { "checksum": "sha256:7aa52d533e28a3ebf76d879c24bb4e0a58574033d5af6d4d22b716d1156c3f90", "enabled": 1 } }, "podsleuth": { "100": { "checksum": "sha256:b32a5cc38b8edcc76b94862cee0c822a5b4d095329f53ab6f7cb014c76346e8c", "enabled": 1 } }, "policykit": { "100": { "checksum": "sha256:686d9f7652cb2b3d7ce6af2aa620c14a6cbbbdb8d26b3630cfbf6bc34d9e3e6c", "enabled": 1 } }, "polipo": { "100": { "checksum": "sha256:6098bd8a4f449c01dc7e0f4509663994259fe8848f2f21d1319bf7105bbacc4e", "enabled": 1 } }, "portmap": { "100": { "checksum": "sha256:f561aef22cda98a94a74bedda09645e50066a77a23d3bdcbb1143b0c62ffe7b2", "enabled": 1 } }, "portreserve": { "100": { "checksum": "sha256:9de99e881e9e2e7e0b78629eec721840da4aa18f78ff5a06e46b7a596c28a09a", "enabled": 1 } }, "postfix": { "100": { "checksum": "sha256:3101c4c1d54f3e175dc3fcff001c6937a9ffec7781f4095ea38fea88df7e8067", "enabled": 1 } }, "postgresql": { "100": { "checksum": "sha256:a734cc086d7d73ef2ffe7543f82dc50b57619e78e60664cb67a9513790f3335a", "enabled": 1 } }, "postgrey": { "100": { "checksum": "sha256:ef4d03336b66c1184f352f9b3fe8004d870bbf003673d4393bde24ea14b056b8", "enabled": 1 } }, "ppp": { "100": { "checksum": "sha256:83e6712ba7343dc1346e94c51b75b05839f78bd24f9324d984b7aa9631bd0377", "enabled": 1 } }, "prelink": { "100": { "checksum": "sha256:df050b0d180947788ab45862c4627ae640c92cf0f6a994a685e4cb5fe46bef76", "enabled": 1 } }, "prelude": { "100": { "checksum": "sha256:88c5fa3da64c127ed6e688f9eba5e50a8f6f98ea3243d29b8b0bc0375ef95420", "enabled": 1 } }, "privoxy": { "100": { "checksum": "sha256:e4a84567c63c892d4cdda3a9a4b15ad5188c093da679a354f00c43b6376a844d", "enabled": 1 } }, "procmail": { "100": { "checksum": "sha256:98170eed35b67b9097514bcb044a18cc3f757af5f91b5d870ea707d6048cde75", "enabled": 1 } }, "prosody": { "100": { "checksum": "sha256:07e999e033252b28ae41697ddc23b42dbcf4bdc143c9eb1c55475aabc9fc9caf", "enabled": 1 } }, "psad": { "100": { "checksum": "sha256:7fc3410de486bf89c4d35989937f424b435c9c4f5398f47f9c840b146197c6ac", "enabled": 1 } }, "ptchown": { "100": { "checksum": "sha256:129978bcb62fdcaed728fb288b321c204575246eb535354e02bfd83089cb0ded", "enabled": 1 } }, "publicfile": { "100": { "checksum": "sha256:9cc75080e25fb5602ab266f1c0d0f16843bdfc561e7af6dec32d669e31bebe98", "enabled": 1 } }, "pulseaudio": { "100": { "checksum": "sha256:a41fc5d1275d548510a2be0180741f952f0f696f443eaabf03c1abf3f80f499e", "enabled": 1 } }, "puppet": { "100": { "checksum": "sha256:81559a7d5e16e228382840986ae0e414d4a78163a9b51b5d9c05a58e07574e8d", "enabled": 1 } }, "pwauth": { "100": { "checksum": "sha256:8590f80ce91ddd4862ce2beab9ec64deb66d99c5583ff5ee3cbff2e503caaa37", "enabled": 1 } }, "qmail": { "100": { "checksum": "sha256:917a35c0ec48acfb5166c937e97269acac39541acebad9c1c410bfdbcb483da1", "enabled": 1 } }, "qpid": { "100": { "checksum": "sha256:cfdb156d23ae6c99b3dbac171ab1626202bf1ae7671fae9f6d6f7241116638dd", "enabled": 1 } }, "quantum": { "100": { "checksum": "sha256:eb4881c554de7882b4e5590a8efb35a758fc1b3d61bc1502632d6f4e571cb331", "enabled": 1 } }, "quota": { "100": { "checksum": "sha256:27d1fb8e99c6d1c75fc8efa8aeaf4303d0dcd8d03cb2992d968a3186d648f4b9", "enabled": 1 } }, "rabbitmq": { "100": { "checksum": "sha256:f0b2b81a6670b7640d49d49c364635f39272330f08bcdaa23c681bf2ac64e10f", "enabled": 1 } }, "radius": { "100": { "checksum": "sha256:791a60cff31fca43e01aa4bfe3a57c5938015db44fd1f64064778dbbcdb6e2e2", "enabled": 1 } }, "radvd": { "100": { "checksum": "sha256:1cea7f5b37f7a0e722ecbccaa09d95db2b175ec125d62e3898a99081c51c6f96", "enabled": 1 } }, "raid": { "100": { "checksum": "sha256:a94b0b917312a73eda50ea641dee49eb00f49df286133fcdb13267fd49ce5d1f", "enabled": 1 } }, "rasdaemon": { "100": { "checksum": "sha256:159d40315f3f5086a31e6f0a6a90d342783d6f0c97c5feeb9c92808c7345adcf", "enabled": 1 } }, "rdisc": { "100": { "checksum": "sha256:a61f7efd50387ebfd35b675b22a8cba86c6216c0bbd901aab5e8674b5c442777", "enabled": 1 } }, "readahead": { "100": { "checksum": "sha256:276a24e14ef12f5fadaeab2883d501cb096e01a9ce1be2178a5c50ebfa6b3fcb", "enabled": 1 } }, "realmd": { "100": { "checksum": "sha256:61561d5f14d9a6597d6e312f5429947baab045d01a729f7cc34406e859fa0015", "enabled": 1 } }, "redis": { "100": { "checksum": "sha256:f40066828d25674c525148f890d9cc84ddbb203f5a4aaad616ef2cd3a497fdc3", "enabled": 1 } }, "remotelogin": { "100": { "checksum": "sha256:742f881c1a4838ecfc1a55a7f3b78a72267644e3a64e3ec45a191599b5bd8532", "enabled": 1 } }, "restraint": { "400": { "checksum": "sha256:5dd2b902123ef00065db6ec8d173f37baa26dbe43566bd5f06594ef1243fd5fd", "enabled": 1 } }, "rhcs": { "100": { "checksum": "sha256:67f232676ac23535867e2494f04989dbd6b9b6d4bbc67df67dc2edb4d31a8be8", "enabled": 1 } }, "rhev": { "100": { "checksum": "sha256:ee2f26beaa5c6a5d25e03ef9ab30302d6b29b283283683421fab52e29e47fe3d", "enabled": 1 } }, "rhgb": { "100": { "checksum": "sha256:39c550e1c8b149dc6f308b0f9ef238315208453ee064bb1558eff9137531840f", "enabled": 1 } }, "rhnsd": { "100": { "checksum": "sha256:16bff56244925c7696fa2da5a4c986132488c352149cc88181bf6b4143fc80ba", "enabled": 1 } }, "rhsmcertd": { "100": { "checksum": "sha256:e999510837aabb3ce118ad61225a846f687588e9a321ffe675b56511191bc323", "enabled": 1 } }, "rhts": { "400": { "checksum": "sha256:9000bd99784bc22ffda4493b4985e8c5a2e65e87aeaa1cb96ba82d367a27a8be", "enabled": 1 } }, "ricci": { "100": { "checksum": "sha256:c72c61297cf864a1abda8226de08039c8ae0212808d3f7fd8725b53b955d59f6", "enabled": 1 } }, "rkhunter": { "100": { "checksum": "sha256:d48bd9c5789f4adc396773664402ddeab432caa99597267ccdf24220948e5b3c", "enabled": 1 } }, "rkt": { "100": { "checksum": "sha256:a9414e82cadd2876471465737bd8322eb833e296869ebcefcd9e722ff717d350", "enabled": 1 } }, "rlogin": { "100": { "checksum": "sha256:a4b2e25abc4099a0a54821518b7c824a2ddb7544fb0b5ddde9a0a9be159ac1b2", "enabled": 1 } }, "rngd": { "100": { "checksum": "sha256:5c867af2674586cc1c41aa3203e3704a0d1400d344a8e257bc61e9eebb86ad03", "enabled": 1 } }, "rolekit": { "100": { "checksum": "sha256:73382d4b8a12fa161dbb5ba36c94e7f0b1f82b1abdf0a4f07ca6c981e08f271b", "enabled": 1 } }, "roundup": { "100": { "checksum": "sha256:1a2503ebaa997c6b6efd5d2343ea731f73b2f0312f2e8d5578dad2c8a84a94fa", "enabled": 1 } }, "rpc": { "100": { "checksum": "sha256:e423284f5ed36e7b6c52f581b444a981d5d1c8af6c8dabe8c6cb6c71d3f49fb2", "enabled": 1 } }, "rpcbind": { "100": { "checksum": "sha256:53831134210db04fe6e6b0f05e20b8b7307ae8c11e774faec9e1b3aa2b02b5dc", "enabled": 1 } }, "rpm": { "100": { "checksum": "sha256:acbd671bd661f9f2f25d4798f1646a51075f297c8b086ea9bd3133a00e356432", "enabled": 1 } }, "rrdcached": { "100": { "checksum": "sha256:c6110313310591ee2a08b504b04ebd1b98f370b6633172f06ee7c0c7db0a963d", "enabled": 1 } }, "rshd": { "100": { "checksum": "sha256:1340ab5daac926cc1354452869ab5aa78d27ceb110543624d2ffaf93773c394b", "enabled": 1 } }, "rssh": { "100": { "checksum": "sha256:9dabc52612d567e728786c007f5017c7032c02be3a9201521a530fc91ca789f8", "enabled": 1 } }, "rsync": { "100": { "checksum": "sha256:33dffe2764dc45bbc59b406a67187c39864412bac07ee089bda30ef09cb70faa", "enabled": 1 } }, "rtas": { "100": { "checksum": "sha256:9d55dfe843e44e8a93c02ea28b14856edfdb1f820bb647992daa6af11e2dbd37", "enabled": 1 } }, "rtkit": { "100": { "checksum": "sha256:ea77b9f26c8fc61b7fc281099b2f16e75c5b196660fff55a95f96e97935a7a1b", "enabled": 1 } }, "rwho": { "100": { "checksum": "sha256:4468bfdd23924a96b4cf8c6fa1a3fa606fdd8ac69b7cb17c16a6e39a95908921", "enabled": 1 } }, "samba": { "100": { "checksum": "sha256:c97b92abaf053976c89a670d82bf06bc5c7d561ccf03e3ff1ac84be6e01cfc5c", "enabled": 1 } }, "sambagui": { "100": { "checksum": "sha256:18d1a69de368fa621e8ef3234b8ddb40261ced880bb732328a310db5a62a7a0a", "enabled": 1 } }, "sandboxX": { "100": { "checksum": "sha256:711df017c1f168e33245144d67289225439bbed701fb1146cb83e9cd63ce1f7a", "enabled": 1 } }, "sanlock": { "100": { "checksum": "sha256:093d9d9793142bb9a8c4375f5f368ca1a4d9beb0cd05329518f91bb9ea51bd06", "enabled": 1 } }, "sasl": { "100": { "checksum": "sha256:536ce94509d38b40200debf17fbddc16ec9004463fdb3fc42890dde9b3eb56f1", "enabled": 1 } }, "sbd": { "100": { "checksum": "sha256:57ecac942ea46af55728362527d70a3e135c3b4711688ddf62596b9a768d9fb0", "enabled": 1 } }, "sblim": { "100": { "checksum": "sha256:2ab2f52e6bac063f176e007b39cd8a4e43012ea075d82af20fbb3403891b6493", "enabled": 1 } }, "screen": { "100": { "checksum": "sha256:7df09c8fa09e105ecf51fee797975603a2df8d15c3a0bf00fdb1d565fe4a6b91", "enabled": 1 } }, "secadm": { "100": { "checksum": "sha256:9cf04d33aa9dec0b559c892fb20df89fbe1883544d4ac2d6bf6fc319f0a16663", "enabled": 1 } }, "sectoolm": { "100": { "checksum": "sha256:e7f9a696e0958d6bdbd6696e67a9b4af62430456d0f278e290db0ea1ee9750b7", "enabled": 1 } }, "selinuxutil": { "100": { "checksum": "sha256:c72355dc70789deb94777acd0b47c2c3ae628e8d90bffb0e0e320941e5ddf3b7", "enabled": 1 } }, "sendmail": { "100": { "checksum": "sha256:98f68238d6ca96277390c160adeed4e3e382d5ded5a88a3909cfebe986b849be", "enabled": 1 } }, "sensord": { "100": { "checksum": "sha256:10ca96a581ef4b0fa1789160fd71fb340d8b1d13906b42fab6e9119033d4f942", "enabled": 1 } }, "setrans": { "100": { "checksum": "sha256:3a172b4972f9271250b4d228541c78b0243fd0544ac983db0f590e09674f700d", "enabled": 1 } }, "setroubleshoot": { "100": { "checksum": "sha256:f78edfcb470cd9929f45b6db29ae4924a286ab30a03f80b7bdf3699bccb98314", "enabled": 1 } }, "seunshare": { "100": { "checksum": "sha256:ba2043d9665e2fd3a9e2d103671bfe647060b93d9c02eed2dca3066a0ecfb81d", "enabled": 1 } }, "sge": { "100": { "checksum": "sha256:cf843c98ff4113ded675f79df694549b4f848aecb1295f0a510101e301fbd348", "enabled": 1 } }, "shorewall": { "100": { "checksum": "sha256:c7c49d28e52aba4d168e684b9160a225fbecab373bfbb6963bbe89c93ecb867b", "enabled": 1 } }, "slocate": { "100": { "checksum": "sha256:be1825562f583305597e5ceb1298ebb60e42c4f270b4a7e3751cf9d9be1b1fac", "enabled": 1 } }, "slpd": { "100": { "checksum": "sha256:14748519962688e62b7bc7e7c03ad91c1f815c5d33c63f2d60e03340f55609a8", "enabled": 1 } }, "smartmon": { "100": { "checksum": "sha256:9f26cf1e9fa128e98c758a6325525f8547950a2440b6582202228c3c5c2c80d9", "enabled": 1 } }, "smokeping": { "100": { "checksum": "sha256:ae8cbd09d519a42bc01063c4c16f58e96cb3673acb557dcd2d09af444d742db1", "enabled": 1 } }, "smoltclient": { "100": { "checksum": "sha256:8aa5f2749eeaef5ae871dc903dad87611e369c92e9b3fc28b4944f75db785a18", "enabled": 1 } }, "smsd": { "100": { "checksum": "sha256:d36a762c836a0e4305773e352fe0f46657784b5d9bf749f02df9c6d15f68d101", "enabled": 1 } }, "snapper": { "100": { "checksum": "sha256:62bba8f6a236bae902815188cedbb5f3090acf0829247e6808787f8c913d9981", "enabled": 1 } }, "snmp": { "100": { "checksum": "sha256:68b5e9d408704e44ebf29ba76ae18afdcf6d8aef12794e8e9026997376ce12f8", "enabled": 1 } }, "snort": { "100": { "checksum": "sha256:eef39dec8d416650af3f9eeeb518b06dd9a9e09144aa579b6bd6422ba0037d70", "enabled": 1 } }, "sosreport": { "100": { "checksum": "sha256:c19dc2ed34c3d274f8e01647dc2d869ca06d4a9a3009f57c1845fac4d33ed358", "enabled": 1 } }, "soundserver": { "100": { "checksum": "sha256:a46a9508591afb1407fd14441c9c26cd495a3789e3c6792a2eba38a6642e4b97", "enabled": 1 } }, "spamassassin": { "100": { "checksum": "sha256:8255ad891466762e31763d6f4791a32aa1eea1147a812020724eab8eb07c1916", "enabled": 1 } }, "speech-dispatcher": { "100": { "checksum": "sha256:ce5ba130d5d0ae5fafe8f823b824856590f990ad7c08aa0a5930f5060c252021", "enabled": 1 } }, "squid": { "100": { "checksum": "sha256:4170a7354e69ed60e0268389f74042e02a2511a4451ca20b97a63213b8881e1e", "enabled": 1 } }, "ssh": { "100": { "checksum": "sha256:a4b4b395d2185abfd68edce0f813103ccbedd5d9748f9a41d83cc63dd1465109", "enabled": 1 } }, "sslh": { "100": { "checksum": "sha256:5b0cc219f31e88f2fa78bc31d9c6fe6c7af29b4832509635672ca9edc79409c6", "enabled": 1 } }, "sssd": { "100": { "checksum": "sha256:29cd0921e9effe356c856c3319488adf66c794cbb7d1610e5fca2b730b852939", "enabled": 1 } }, "staff": { "100": { "checksum": "sha256:943b25df416f2181aab46b3492aad9336f60a1b5b46187494f43ab516aae9c6a", "enabled": 1 } }, "stapserver": { "100": { "checksum": "sha256:788f2eb60a3d902060a6c5a08b086e2a1e96d213f86b206736da7e37eb21e51d", "enabled": 1 } }, "stratisd": { "100": { "checksum": "sha256:72c10f773d67b4209c39b4bea22e95c66d105f6f13e30f89bcd568eab6c889e3", "enabled": 1 } }, "stunnel": { "100": { "checksum": "sha256:736a46f682ff77d7c2cf54d5c264eb7b149793c12701b96e9be12bb3e6722796", "enabled": 1 } }, "su": { "100": { "checksum": "sha256:0cc5796bfe362c3b28c73f62377c029a5f2321078b6d5f90bad42764415cd038", "enabled": 1 } }, "sudo": { "100": { "checksum": "sha256:d96538a9cbb09fc38ba701cda88b2a0d199ab7826826d0043e4f07b05418bf84", "enabled": 1 } }, "svnserve": { "100": { "checksum": "sha256:a80606afbcc994e6fdc418cd83182f901d3e5b4b7b36fe262c71a25f43f10af1", "enabled": 1 } }, "swift": { "100": { "checksum": "sha256:19dfb362a8f445099eac9281522f0b13794cb9a0893a7acf0b54c15d193ef70e", "enabled": 1 } }, "sysadm": { "100": { "checksum": "sha256:f0e7b74086d47000f8335de5bade5a5a19a5e83bf581f885db92548546b7ea94", "enabled": 1 } }, "sysadm_secadm": { "100": { "checksum": "sha256:4614737ea0603530691e6158eb1bd07efa1992cb7ef52c201df3a637d3184cdf", "enabled": 1 } }, "sysnetwork": { "100": { "checksum": "sha256:f6a5a3b49885a9f780c5a9078cc968673809eaf89ecbe170fbb8a1ed4f521ea2", "enabled": 1 } }, "sysstat": { "100": { "checksum": "sha256:1fadc57b1e46515cbc038e96ae47ab74dd365a910f4d81ec9fb3044c4691260b", "enabled": 1 } }, "systemd": { "100": { "checksum": "sha256:a5f0e5c340eaf127a166cc50be8170bfce80ccee0c14f32e4cc264089350da1a", "enabled": 1 } }, "tangd": { "100": { "checksum": "sha256:fd538dbdeba0b4a1c244ba76b8dfef47f61da5a56f24f39fc24c137a9b3b303a", "enabled": 1 } }, "targetd": { "100": { "checksum": "sha256:bc0f37cdcdd0c9014e89e8be6758f7d9c97c67a4e42652459d6107314f059632", "enabled": 1 } }, "tcpd": { "100": { "checksum": "sha256:c78dcf2b9abf8d5ccf9f32b2debf6181a935a7078fe4a527991ab11d2999c4a9", "enabled": 1 } }, "tcsd": { "100": { "checksum": "sha256:e92fb82a2e509e3595d46dd464dac1029ce3a731f117fa67712d119d2878f195", "enabled": 1 } }, "telepathy": { "100": { "checksum": "sha256:fea41add022251126312da78373cb7fd05df1e9fd27547f1b4fc604a774827a1", "enabled": 1 } }, "telnet": { "100": { "checksum": "sha256:06d4733c0fc7358d738d4dbf53968c9d9017a72b01456be46633364f00a4207d", "enabled": 1 } }, "tftp": { "100": { "checksum": "sha256:8ba2497a28f4c2a31177811fc0a091a3bb9814f9e02cfc8d84c004718f661e5f", "enabled": 1 } }, "tgtd": { "100": { "checksum": "sha256:6ec8d4d38e58efa04572ac713c9148e7182e7d49713ed89955fabdd512b8eea4", "enabled": 1 } }, "thin": { "100": { "checksum": "sha256:c464da2b8e789d74ea2b2914217a194a3c07081b9f383acd2fee9ab77bc525b5", "enabled": 1 } }, "thumb": { "100": { "checksum": "sha256:2ce98252c7ff59539bb38204ee65898ba6cc701c3dc87417c11e2e7124f448a3", "enabled": 1 } }, "timedatex": { "100": { "checksum": "sha256:df36b9f44f28df1b14b4d6bff01de42c414b947a8e6f1e6efdaa7023250709aa", "enabled": 1 } }, "tlp": { "100": { "checksum": "sha256:7b1d2643c7470dc5b80dee41d18482bb6fd6de55371aba888708a28fe0bb0172", "enabled": 1 } }, "tmpreaper": { "100": { "checksum": "sha256:2a54cea48dfbeb1c9dad0e167f70aa17970c4f2c76c560330c467051fe3b574b", "enabled": 1 } }, "tomcat": { "100": { "checksum": "sha256:de3ed9b8d62d29e80e29a051419a648c154c12f6bb188814ca79120ff1dc263b", "enabled": 1 } }, "tor": { "100": { "checksum": "sha256:16c95ae098af2b964a7a94b5bb6cd1c84d5c7f1254d6411209e4d5cfe87677bc", "enabled": 1 } }, "tuned": { "100": { "checksum": "sha256:b90ac3a04d3f04c7284f75802ffd69d6c1c3d5c0e6d08c3d0f2d9270b99dd487", "enabled": 1 } }, "tvtime": { "100": { "checksum": "sha256:8f8a1f1b2fea7a9fb8c3853e02c830f5204f691e9223cbdfbc320ec6914725dc", "enabled": 1 } }, "udev": { "100": { "checksum": "sha256:24410f1221660b8443af29cb55e42180e268fce722ceed2c99aa202e7dd3cc21", "enabled": 1 } }, "ulogd": { "100": { "checksum": "sha256:dba41aee81015b99378cff2273a56effd1202c0c937c05c63a913243b0641cdc", "enabled": 1 } }, "uml": { "100": { "checksum": "sha256:29e7469ef2704943f23c5040531fee8657cfed8440ef44b6268d21e6a9afe309", "enabled": 1 } }, "unconfined": { "100": { "checksum": "sha256:54482715f4fb5bca5c68ff67b9d145d12ad3df1438db97bcadcc32a2fb0f6191", "enabled": 1 } }, "unconfineduser": { "100": { "checksum": "sha256:13e69d4cbec7926c0ac6fb796749b4286462add3051f1e94554f23e637b81277", "enabled": 1 } }, "unlabelednet": { "100": { "checksum": "sha256:cb370bbe8bc0d7bca49a4fd1fad652017f4f8587c7c9d3277155fba32987550e", "enabled": 1 } }, "unprivuser": { "100": { "checksum": "sha256:bbb2700ca73d867432851e12276a932b1553b034b1cc635f5c6681d6b62dcd3a", "enabled": 1 } }, "updfstab": { "100": { "checksum": "sha256:57a37a5c07af0f7ad80f4f01173e6cd6b604659e2d1b5605c2719dff8bbaf2fb", "enabled": 1 } }, "usbmodules": { "100": { "checksum": "sha256:683c0598bdd00543cb696f7ed8cce6b55c658e566141538fc01b3f852af5f697", "enabled": 1 } }, "usbmuxd": { "100": { "checksum": "sha256:852eb8259277c64b80c91bd1dcbbe85f629e7218ab2f51d39324dcd78a4a278e", "enabled": 1 } }, "userdomain": { "100": { "checksum": "sha256:066e429e71ebcf11014f4ff6d7647c9d6d88ff191c64eeb9793021d16f4cde97", "enabled": 1 } }, "userhelper": { "100": { "checksum": "sha256:74b817fb60fd3ed5f074ef8ff399342ddc49fb2c250b08015dc975edd48f4dfd", "enabled": 1 } }, "usermanage": { "100": { "checksum": "sha256:fa589ab303d10fadd28a3e8d27cc9bc2e55a9b28f28c3f4c7e05968cb00a7cdd", "enabled": 1 } }, "usernetctl": { "100": { "checksum": "sha256:c5e4e24e89775d797a8988e2d5f72ec7a7dd8387289ede61af7a3ce2173cf167", "enabled": 1 } }, "uucp": { "100": { "checksum": "sha256:6a3659d3706bc3af4b60e5de7efa9532dcc0c0a6f0c7735ed1300ec2120f9d01", "enabled": 1 } }, "uuidd": { "100": { "checksum": "sha256:f85ad7d20dd77416ab246ee0837b016a648176ec9956f40ff2ac6b3c2924edc5", "enabled": 1 } }, "varnishd": { "100": { "checksum": "sha256:18dab548c81b02f1b0f3efd6e25dd529bb0565e974156d55e42e274d3ccdf704", "enabled": 1 } }, "vdagent": { "100": { "checksum": "sha256:ee8af0b085b727e060ac3c82f1e38c89545505c9b26e849eda22e571064c46e7", "enabled": 1 } }, "vhostmd": { "100": { "checksum": "sha256:0f7c8c575b060e863fe17e7ee8c67cc5cc3ea31da734a5428dc62c15f3b15bf4", "enabled": 1 } }, "virt": { "100": { "checksum": "sha256:df433826471b1c65a3686b57b4b07872a695d900731feb88cd6dfb76ddcbc5d9", "enabled": 1 } }, "vlock": { "100": { "checksum": "sha256:4a9362fc5876897cae7062564d54d7f8ae12413c65c4c7fc6709f6407cc27160", "enabled": 1 } }, "vmtools": { "100": { "checksum": "sha256:fb9dda20b16232ac253b148063c9b267356b6f2831650f4c00fa01a6d0a8024a", "enabled": 1 } }, "vmware": { "100": { "checksum": "sha256:d0ce73ebc7d2f494b669257a9a68106245371b455566654c7062694bcbad35df", "enabled": 1 } }, "vnstatd": { "100": { "checksum": "sha256:1df1aaf42d9c96922226b4828c38b6d315f7a9d3cda60fe54d99be5d618e140d", "enabled": 1 } }, "vpn": { "100": { "checksum": "sha256:9ea8931bf1c97618b2e99afb8c60a13d51a84db878bffa4082f6973e23b13eb1", "enabled": 1 } }, "w3c": { "100": { "checksum": "sha256:43663b66ef8275c639a8076d92fc7da6821e0523c120e2c854839f9dc9d1db66", "enabled": 1 } }, "watchdog": { "100": { "checksum": "sha256:65b78e9b48a6cfe62f6c67c443d3bc667a58d206c09df00870949b6ae7ff8c30", "enabled": 1 } }, "wdmd": { "100": { "checksum": "sha256:65560477bd0ae271799a76f75c5a3d46ef0c29f6922aa38e727c95b7e1095a99", "enabled": 1 } }, "webadm": { "100": { "checksum": "sha256:4d4d609b3be3c2dc659694cfd2076e0c0c0d6446d16a3fb054a9e5f951b29410", "enabled": 1 } }, "webalizer": { "100": { "checksum": "sha256:867139a0cc2cb236ee54575ce6a8568cdbefd6785e8b7f64e09a3041da46b095", "enabled": 1 } }, "wine": { "100": { "checksum": "sha256:419d697ac987518dee6095070e2894c4112b50256e59d2b4f6acac585fb087f8", "enabled": 1 } }, "wireshark": { "100": { "checksum": "sha256:ce85b40df4d548aa55eb54bc546943366b654a3af7f602817f1fc499c0c8039e", "enabled": 1 } }, "xen": { "100": { "checksum": "sha256:f5d46e297e4e8e0a3f76c1fc8ae96db3ebf5b99ab538a54c171e489ac94ae1f0", "enabled": 1 } }, "xguest": { "100": { "checksum": "sha256:aeb8895098531d1607e389703c783a3c1e8a8c1ad962397debe65214ff86e29e", "enabled": 1 } }, "xserver": { "100": { "checksum": "sha256:85f1f1ed778597ec568ab7b9069779c088219d1da283a09382439c6803e7863e", "enabled": 1 } }, "zabbix": { "100": { "checksum": "sha256:476521323be1b84d7ba2539aa208d857678746a76e7e079577d3f46d251637ac", "enabled": 1 } }, "zarafa": { "100": { "checksum": "sha256:7536116b2852a578cbc5d32f7752b6dd3bb1202817db05306e1a16553c1d43b6", "enabled": 1 } }, "zebra": { "100": { "checksum": "sha256:3d18bbdc44c396c7715cce348f9248712132a1c53341d3b5760016d245f86e75", "enabled": 1 } }, "zoneminder": { "100": { "checksum": "sha256:44cf07d7e6b15709d131b8b406032d0e6395a84e1e20bc67f9320a1e97c4dfcc", "enabled": 1 } }, "zosremote": { "100": { "checksum": "sha256:1177170edbd47b6fe17fa022a247d9b75b1fb3a5a49721bcff3c7da4f480c702", "enabled": 1 } } }, "selinux_priorities": true }, "changed": false } TASK [fedora.linux_system_roles.selinux : Load SELinux modules] **************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:115 Wednesday 02 April 2025 12:16:20 -0400 (0:00:03.260) 0:03:43.844 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_modules is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:128 Wednesday 02 April 2025 12:16:20 -0400 (0:00:00.056) 0:03:43.901 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree in check mode] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:136 Wednesday 02 April 2025 12:16:20 -0400 (0:00:00.040) 0:03:43.941 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Wednesday 02 April 2025 12:16:20 -0400 (0:00:00.063) 0:03:44.005 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Wednesday 02 April 2025 12:16:21 -0400 (0:00:00.134) 0:03:44.140 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Wednesday 02 April 2025 12:16:21 -0400 (0:00:00.055) 0:03:44.196 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Wednesday 02 April 2025 12:16:21 -0400 (0:00:00.043) 0:03:44.239 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Wednesday 02 April 2025 12:16:21 -0400 (0:00:00.048) 0:03:44.288 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:14 Wednesday 02 April 2025 12:16:21 -0400 (0:00:00.154) 0:03:44.443 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_spec": { "debug": true, "log_level": "debug", "state": "started" }, "__podman_kube_str": "apiVersion: v1\nkind: Pod\nmetadata:\n labels:\n app: test\n io.containers.autoupdate: registry\n name: httpd1\nspec:\n containers:\n - command:\n - /bin/busybox-extras\n - httpd\n - -f\n - -p\n - 80\n image: quay.io/libpod/testimage:20210610\n name: httpd1\n ports:\n - containerPort: 80\n hostPort: 15001\n volumeMounts:\n - mountPath: /var/www:Z\n name: www\n - mountPath: /var/httpd-create:Z\n name: create\n workingDir: /var/www\n volumes:\n - hostPath:\n path: /tmp/lsr_sr1vi4ai_podman/httpd1\n name: www\n - hostPath:\n path: /tmp/lsr_sr1vi4ai_podman/httpd1-create\n name: create\n" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:21 Wednesday 02 April 2025 12:16:21 -0400 (0:00:00.063) 0:03:44.506 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_kube": { "apiVersion": "v1", "kind": "Pod", "metadata": { "labels": { "app": "test", "io.containers.autoupdate": "registry" }, "name": "httpd1" }, "spec": { "containers": [ { "command": [ "/bin/busybox-extras", "httpd", "-f", "-p", 80 ], "image": "quay.io/libpod/testimage:20210610", "name": "httpd1", "ports": [ { "containerPort": 80, "hostPort": 15001 } ], "volumeMounts": [ { "mountPath": "/var/www:Z", "name": "www" }, { "mountPath": "/var/httpd-create:Z", "name": "create" } ], "workingDir": "/var/www" } ], "volumes": [ { "hostPath": { "path": "/tmp/lsr_sr1vi4ai_podman/httpd1" }, "name": "www" }, { "hostPath": { "path": "/tmp/lsr_sr1vi4ai_podman/httpd1-create" }, "name": "create" } ] } }, "__podman_kube_file": "", "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "podman_basic_user" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:33 Wednesday 02 April 2025 12:16:21 -0400 (0:00:00.068) 0:03:44.574 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_name": "httpd1", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:38 Wednesday 02 April 2025 12:16:21 -0400 (0:00:00.053) 0:03:44.628 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:16:21 -0400 (0:00:00.157) 0:03:44.785 ******* ok: [managed-node2] => { "ansible_facts": { "getent_passwd": { "podman_basic_user": [ "x", "3001", "3001", "", "/home/podman_basic_user", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:16:22 -0400 (0:00:00.385) 0:03:45.171 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:16:22 -0400 (0:00:00.049) 0:03:45.221 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "3001" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:16:22 -0400 (0:00:00.062) 0:03:45.283 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1743610430.4085276, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610401.5914862, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986657, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "2059311478", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:16:22 -0400 (0:00:00.359) 0:03:45.643 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "podman_basic_user" ], "delta": "0:00:00.003337", "end": "2025-04-02 12:16:22.796877", "rc": 0, "start": "2025-04-02 12:16:22.793540" } STDOUT: 0: podman_basic_user 100000 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:16:22 -0400 (0:00:00.364) 0:03:46.008 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "-g", "podman_basic_user" ], "delta": "0:00:00.003052", "end": "2025-04-02 12:16:23.177931", "rc": 0, "start": "2025-04-02 12:16:23.174879" } STDOUT: 0: podman_basic_user 100000 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:16:23 -0400 (0:00:00.379) 0:03:46.387 ******* ok: [managed-node2] => { "ansible_facts": { "podman_subgid_info": { "podman_basic_user": { "range": 65536, "start": 100000 } }, "podman_subuid_info": { "podman_basic_user": { "range": 65536, "start": 100000 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:16:23 -0400 (0:00:00.058) 0:03:46.445 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:16:23 -0400 (0:00:00.044) 0:03:46.489 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:16:23 -0400 (0:00:00.042) 0:03:46.532 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:16:23 -0400 (0:00:00.043) 0:03:46.576 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:16:23 -0400 (0:00:00.047) 0:03:46.623 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if no kube spec is given] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:43 Wednesday 02 April 2025 12:16:23 -0400 (0:00:00.152) 0:03:46.776 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_kube", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:52 Wednesday 02 April 2025 12:16:23 -0400 (0:00:00.052) 0:03:46.828 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/podman_basic_user", "__podman_xdg_runtime_dir": "/run/user/3001" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:60 Wednesday 02 April 2025 12:16:23 -0400 (0:00:00.065) 0:03:46.894 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_path": "/home/podman_basic_user/.config/containers/ansible-kubernetes.d" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:64 Wednesday 02 April 2025 12:16:23 -0400 (0:00:00.045) 0:03:46.939 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_file": "/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:68 Wednesday 02 April 2025 12:16:23 -0400 (0:00:00.050) 0:03:46.989 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Get service name using systemd-escape] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:75 Wednesday 02 April 2025 12:16:23 -0400 (0:00:00.049) 0:03:47.039 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "systemd-escape", "--template", "podman-kube@.service", "/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml" ], "delta": "0:00:00.004994", "end": "2025-04-02 12:16:24.191252", "rc": 0, "start": "2025-04-02 12:16:24.186258" } STDOUT: podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service TASK [fedora.linux_system_roles.podman : Cleanup containers and services] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:83 Wednesday 02 April 2025 12:16:24 -0400 (0:00:00.379) 0:03:47.418 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update containers and services] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:87 Wednesday 02 April 2025 12:16:24 -0400 (0:00:00.044) 0:03:47.462 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:2 Wednesday 02 April 2025 12:16:24 -0400 (0:00:00.090) 0:03:47.553 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:16:24 -0400 (0:00:00.156) 0:03:47.709 ******* changed: [managed-node2] => { "changed": true, "cmd": [ "loginctl", "enable-linger", "podman_basic_user" ], "delta": "0:00:00.017616", "end": "2025-04-02 12:16:24.880467", "rc": 0, "start": "2025-04-02 12:16:24.862851" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:16:24 -0400 (0:00:00.408) 0:03:48.118 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:16:25 -0400 (0:00:00.048) 0:03:48.167 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_item_state | d('present') == 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the host mount volumes] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:7 Wednesday 02 April 2025 12:16:25 -0400 (0:00:00.045) 0:03:48.212 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_volumes": [ "/tmp/lsr_sr1vi4ai_podman/httpd1", "/tmp/lsr_sr1vi4ai_podman/httpd1-create" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:18 Wednesday 02 April 2025 12:16:25 -0400 (0:00:00.065) 0:03:48.277 ******* changed: [managed-node2] => (item=/tmp/lsr_sr1vi4ai_podman/httpd1) => { "ansible_loop_var": "item", "changed": true, "gid": 3001, "group": "podman_basic_user", "item": "/tmp/lsr_sr1vi4ai_podman/httpd1", "mode": "0755", "owner": "podman_basic_user", "path": "/tmp/lsr_sr1vi4ai_podman/httpd1", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 23, "state": "directory", "uid": 3001 } changed: [managed-node2] => (item=/tmp/lsr_sr1vi4ai_podman/httpd1-create) => { "ansible_loop_var": "item", "changed": true, "gid": 3001, "group": "podman_basic_user", "item": "/tmp/lsr_sr1vi4ai_podman/httpd1-create", "mode": "0755", "owner": "podman_basic_user", "path": "/tmp/lsr_sr1vi4ai_podman/httpd1-create", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 3001 } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:29 Wednesday 02 April 2025 12:16:25 -0400 (0:00:00.769) 0:03:49.047 ******* changed: [managed-node2] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Check the kubernetes yaml file] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:53 Wednesday 02 April 2025 12:16:28 -0400 (0:00:02.603) 0:03:51.650 ******* ok: [managed-node2] => { "changed": false, "failed_when_result": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Ensure the kubernetes directory is present] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:61 Wednesday 02 April 2025 12:16:28 -0400 (0:00:00.382) 0:03:52.033 ******* changed: [managed-node2] => { "changed": true, "gid": 3001, "group": "podman_basic_user", "mode": "0755", "owner": "podman_basic_user", "path": "/home/podman_basic_user/.config/containers/ansible-kubernetes.d", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 6, "state": "directory", "uid": 3001 } TASK [fedora.linux_system_roles.podman : Ensure kubernetes yaml files are present] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:70 Wednesday 02 April 2025 12:16:29 -0400 (0:00:00.369) 0:03:52.403 ******* changed: [managed-node2] => { "changed": true, "checksum": "4ad2273e81c5630cfdda46a5cba365174cbe07a0", "dest": "/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml", "gid": 3001, "group": "podman_basic_user", "md5sum": "107fcdc9f29af431f9718d78b14223c1", "mode": "0644", "owner": "podman_basic_user", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 762, "src": "/root/.ansible/tmp/ansible-tmp-1743610589.315067-13234-160122669195990/source", "state": "file", "uid": 3001 } TASK [fedora.linux_system_roles.podman : Update containers/pods] *************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:80 Wednesday 02 April 2025 12:16:29 -0400 (0:00:00.658) 0:03:53.062 ******* changed: [managed-node2] => { "actions": [ "/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml" ], "changed": true } STDOUT: Pod: 4b39b176161d4c697539de0d2e9a0599176c2647a6d2703b4808ef9a140b8567 Container: f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b STDERR: time="2025-04-02T12:16:30-04:00" level=info msg="/bin/podman filtering at log level debug" time="2025-04-02T12:16:30-04:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-04-02T12:16:30-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-04-02T12:16:30-04:00" level=info msg="Using sqlite as database backend" time="2025-04-02T12:16:30-04:00" level=debug msg="Using graph driver overlay" time="2025-04-02T12:16:30-04:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2025-04-02T12:16:30-04:00" level=debug msg="Using run root /run/user/3001/containers" time="2025-04-02T12:16:30-04:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2025-04-02T12:16:30-04:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2025-04-02T12:16:30-04:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2025-04-02T12:16:30-04:00" level=debug msg="Using transient store: false" time="2025-04-02T12:16:30-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-04-02T12:16:30-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-04-02T12:16:30-04:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2025-04-02T12:16:30-04:00" level=debug msg="Cached value indicated that native-diff is usable" time="2025-04-02T12:16:30-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2025-04-02T12:16:30-04:00" level=debug msg="Initializing event backend file" time="2025-04-02T12:16:30-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-04-02T12:16:30-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-04-02T12:16:30-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-04-02T12:16:30-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-04-02T12:16:30-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-04-02T12:16:30-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-04-02T12:16:30-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-04-02T12:16:30-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" time="2025-04-02T12:16:30-04:00" level=info msg="Setting parallel job count to 7" time="2025-04-02T12:16:30-04:00" level=debug msg="Successfully loaded 1 networks" time="2025-04-02T12:16:30-04:00" level=debug msg="found free device name cni-podman1" time="2025-04-02T12:16:30-04:00" level=debug msg="found free ipv4 network subnet 10.89.0.0/24" time="2025-04-02T12:16:30-04:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-04-02 12:16:30.319987309 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-04-02T12:16:30-04:00" level=debug msg="Successfully loaded 2 networks" time="2025-04-02T12:16:30-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-04-02T12:16:30-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:30-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-04-02T12:16:30-04:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\" does not resolve to an image ID" time="2025-04-02T12:16:30-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-04-02T12:16:30-04:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\" does not resolve to an image ID" time="2025-04-02T12:16:30-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-04-02T12:16:30-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:30-04:00" level=debug msg="FROM \"scratch\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2025-04-02T12:16:30-04:00" level=debug msg="Check for idmapped mounts support " time="2025-04-02T12:16:30-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:30-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:30-04:00" level=debug msg="overlay: test mount indicated that volatile is being used" time="2025-04-02T12:16:30-04:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/94d13db38c50b49ef19d77e255f7b915a1f3211e6d62febb29445663a8017508/empty,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/94d13db38c50b49ef19d77e255f7b915a1f3211e6d62febb29445663a8017508/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/94d13db38c50b49ef19d77e255f7b915a1f3211e6d62febb29445663a8017508/work,userxattr,volatile,context=\"system_u:object_r:container_file_t:s0:c326,c771\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Container ID: 91dce24906e928e486c63111f49274b7015b4c6e7d554ff17a8f8b068e51a5dc" time="2025-04-02T12:16:30-04:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:copy Args:[/usr/libexec/podman/catatonit /catatonit] Flags:[] Attrs:map[] Message:COPY /usr/libexec/podman/catatonit /catatonit Heredocs:[] Original:COPY /usr/libexec/podman/catatonit /catatonit}" time="2025-04-02T12:16:30-04:00" level=debug msg="COPY []string(nil), imagebuilder.Copy{FromFS:false, From:\"\", Src:[]string{\"/usr/libexec/podman/catatonit\"}, Dest:\"/catatonit\", Download:false, Chown:\"\", Chmod:\"\", Checksum:\"\", Files:[]imagebuilder.File(nil)}" time="2025-04-02T12:16:30-04:00" level=debug msg="added content file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd" time="2025-04-02T12:16:30-04:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:entrypoint Args:[/catatonit -P] Flags:[] Attrs:map[json:true] Message:ENTRYPOINT /catatonit -P Heredocs:[] Original:ENTRYPOINT [\"/catatonit\", \"-P\"]}" time="2025-04-02T12:16:30-04:00" level=debug msg="COMMIT localhost/podman-pause:4.9.4-dev-1708535009" time="2025-04-02T12:16:30-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\"" time="2025-04-02T12:16:30-04:00" level=debug msg="COMMIT \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\"" time="2025-04-02T12:16:30-04:00" level=debug msg="committing image with reference \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\" is allowed by policy" time="2025-04-02T12:16:30-04:00" level=debug msg="layer list: [\"94d13db38c50b49ef19d77e255f7b915a1f3211e6d62febb29445663a8017508\"]" time="2025-04-02T12:16:30-04:00" level=debug msg="using \"/var/tmp/buildah1081529911\" to hold temporary data" time="2025-04-02T12:16:30-04:00" level=debug msg="Tar with options on /home/podman_basic_user/.local/share/containers/storage/overlay/94d13db38c50b49ef19d77e255f7b915a1f3211e6d62febb29445663a8017508/diff" time="2025-04-02T12:16:30-04:00" level=debug msg="layer \"94d13db38c50b49ef19d77e255f7b915a1f3211e6d62febb29445663a8017508\" size is 767488 bytes, uncompressed digest sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690, possibly-compressed digest sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690" time="2025-04-02T12:16:30-04:00" level=debug msg="OCIv1 config = {\"created\":\"2025-04-02T16:16:30.452973542Z\",\"architecture\":\"amd64\",\"os\":\"linux\",\"config\":{\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Entrypoint\":[\"/catatonit\",\"-P\"],\"Labels\":{\"io.buildah.version\":\"1.33.5\"}},\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"]},\"history\":[{\"created\":\"2025-04-02T16:16:30.452504898Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-04-02T16:16:30.457979708Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-04-02T12:16:30-04:00" level=debug msg="OCIv1 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.oci.image.manifest.v1+json\",\"config\":{\"mediaType\":\"application/vnd.oci.image.config.v1+json\",\"digest\":\"sha256:1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543\",\"size\":668},\"layers\":[{\"mediaType\":\"application/vnd.oci.image.layer.v1.tar\",\"digest\":\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\",\"size\":767488}],\"annotations\":{\"org.opencontainers.image.base.digest\":\"\",\"org.opencontainers.image.base.name\":\"\"}}" time="2025-04-02T12:16:30-04:00" level=debug msg="Docker v2s2 config = {\"created\":\"2025-04-02T16:16:30.452973542Z\",\"container\":\"91dce24906e928e486c63111f49274b7015b4c6e7d554ff17a8f8b068e51a5dc\",\"container_config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":[],\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.33.5\"}},\"config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":[],\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.33.5\"}},\"architecture\":\"amd64\",\"os\":\"linux\",\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"]},\"history\":[{\"created\":\"2025-04-02T16:16:30.452504898Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-04-02T16:16:30.457979708Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-04-02T12:16:30-04:00" level=debug msg="Docker v2s2 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.docker.distribution.manifest.v2+json\",\"config\":{\"mediaType\":\"application/vnd.docker.container.image.v1+json\",\"size\":1342,\"digest\":\"sha256:d13f2d808cfa70e6213b7672ef9edf673d4884b1e839e40654941f83b0fccedd\"},\"layers\":[{\"mediaType\":\"application/vnd.docker.image.rootfs.diff.tar\",\"size\":767488,\"digest\":\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"}]}" time="2025-04-02T12:16:30-04:00" level=debug msg="Using SQLite blob info cache at /home/podman_basic_user/.local/share/containers/cache/blob-info-cache-v1.sqlite" time="2025-04-02T12:16:30-04:00" level=debug msg="IsRunningImageAllowed for image containers-storage:" time="2025-04-02T12:16:30-04:00" level=debug msg=" Using transport \"containers-storage\" policy section " time="2025-04-02T12:16:30-04:00" level=debug msg=" Requirement 0: allowed" time="2025-04-02T12:16:30-04:00" level=debug msg="Overall: allowed" time="2025-04-02T12:16:30-04:00" level=debug msg="start reading config" time="2025-04-02T12:16:30-04:00" level=debug msg="finished reading config" time="2025-04-02T12:16:30-04:00" level=debug msg="Manifest has MIME type application/vnd.oci.image.manifest.v1+json, ordered candidate list [application/vnd.oci.image.manifest.v1+json, application/vnd.docker.distribution.manifest.v2+json, application/vnd.docker.distribution.manifest.v1+prettyjws, application/vnd.docker.distribution.manifest.v1+json]" time="2025-04-02T12:16:30-04:00" level=debug msg="... will first try using the original manifest unmodified" time="2025-04-02T12:16:30-04:00" level=debug msg="Checking if we can reuse blob sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690: general substitution = true, compression for MIME type \"application/vnd.oci.image.layer.v1.tar\" = true" time="2025-04-02T12:16:30-04:00" level=debug msg="reading layer \"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"" time="2025-04-02T12:16:30-04:00" level=debug msg="No compression detected" time="2025-04-02T12:16:30-04:00" level=debug msg="Using original blob without modification" time="2025-04-02T12:16:30-04:00" level=debug msg="Applying tar in /home/podman_basic_user/.local/share/containers/storage/overlay/d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690/diff" time="2025-04-02T12:16:30-04:00" level=debug msg="finished reading layer \"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"" time="2025-04-02T12:16:30-04:00" level=debug msg="No compression detected" time="2025-04-02T12:16:30-04:00" level=debug msg="Compression change for blob sha256:1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543 (\"application/vnd.oci.image.config.v1+json\") not supported" time="2025-04-02T12:16:30-04:00" level=debug msg="Using original blob without modification" time="2025-04-02T12:16:30-04:00" level=debug msg="setting image creation date to 2025-04-02 16:16:30.452973542 +0000 UTC" time="2025-04-02T12:16:30-04:00" level=debug msg="created new image ID \"1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543\" with metadata \"{}\"" time="2025-04-02T12:16:30-04:00" level=debug msg="added name \"localhost/podman-pause:4.9.4-dev-1708535009\" to image \"1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543\"" time="2025-04-02T12:16:30-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\"" time="2025-04-02T12:16:30-04:00" level=debug msg="printing final image id \"1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Pod using bridge network mode" time="2025-04-02T12:16:30-04:00" level=debug msg="Got pod cgroup as /libpod_parent/4b39b176161d4c697539de0d2e9a0599176c2647a6d2703b4808ef9a140b8567" time="2025-04-02T12:16:30-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-04-02T12:16:30-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:30-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-04-02T12:16:30-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-04-02T12:16:30-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543)" time="2025-04-02T12:16:30-04:00" level=debug msg="exporting opaque data as blob \"sha256:1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Inspecting image 1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543" time="2025-04-02T12:16:30-04:00" level=debug msg="exporting opaque data as blob \"sha256:1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Inspecting image 1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543" time="2025-04-02T12:16:30-04:00" level=debug msg="Inspecting image 1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543" time="2025-04-02T12:16:30-04:00" level=debug msg="using systemd mode: false" time="2025-04-02T12:16:30-04:00" level=debug msg="setting container name 4b39b176161d-infra" time="2025-04-02T12:16:30-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Allocated lock 1 for container 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25" time="2025-04-02T12:16:30-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543\"" time="2025-04-02T12:16:30-04:00" level=debug msg="exporting opaque data as blob \"sha256:1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Created container \"15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Container \"15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25/userdata\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Container \"15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25\" has run directory \"/run/user/3001/containers/overlay-containers/15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25/userdata\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:30-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:30-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-04-02T12:16:30-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:30-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-04-02T12:16:30-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2025-04-02T12:16:30-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:30-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:30-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-04-02T12:16:30-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:30-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-04-02T12:16:30-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:30-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:30-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-04-02T12:16:30-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:30-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-04-02T12:16:30-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-04-02T12:16:30-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:30-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:30-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-04-02T12:16:30-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:30-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-04-02T12:16:30-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-04-02T12:16:30-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-04-02T12:16:30-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-04-02T12:16:30-04:00" level=debug msg="using systemd mode: false" time="2025-04-02T12:16:30-04:00" level=debug msg="adding container to pod httpd1" time="2025-04-02T12:16:30-04:00" level=debug msg="setting container name httpd1-httpd1" time="2025-04-02T12:16:30-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-04-02T12:16:30-04:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2025-04-02T12:16:30-04:00" level=debug msg="Adding mount /proc" time="2025-04-02T12:16:30-04:00" level=debug msg="Adding mount /dev" time="2025-04-02T12:16:30-04:00" level=debug msg="Adding mount /dev/pts" time="2025-04-02T12:16:30-04:00" level=debug msg="Adding mount /dev/mqueue" time="2025-04-02T12:16:30-04:00" level=debug msg="Adding mount /sys" time="2025-04-02T12:16:30-04:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2025-04-02T12:16:30-04:00" level=debug msg="Allocated lock 2 for container f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b" time="2025-04-02T12:16:30-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Created container \"f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Container \"f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b/userdata\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Container \"f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b\" has run directory \"/run/user/3001/containers/overlay-containers/f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b/userdata\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Strongconnecting node 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25" time="2025-04-02T12:16:30-04:00" level=debug msg="Pushed 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25 onto stack" time="2025-04-02T12:16:30-04:00" level=debug msg="Finishing node 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25. Popped 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25 off stack" time="2025-04-02T12:16:30-04:00" level=debug msg="Strongconnecting node f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b" time="2025-04-02T12:16:30-04:00" level=debug msg="Pushed f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b onto stack" time="2025-04-02T12:16:30-04:00" level=debug msg="Finishing node f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b. Popped f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b off stack" time="2025-04-02T12:16:30-04:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/FZGFET7VX4DSYH6K5NJN7HBLIW,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/add239a525a71d20e89f26cdceeffdb465defe64274853a2d37a62c98061a689/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/add239a525a71d20e89f26cdceeffdb465defe64274853a2d37a62c98061a689/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c426,c636\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Mounted container \"15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/add239a525a71d20e89f26cdceeffdb465defe64274853a2d37a62c98061a689/merged\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Created root filesystem for container 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25 at /home/podman_basic_user/.local/share/containers/storage/overlay/add239a525a71d20e89f26cdceeffdb465defe64274853a2d37a62c98061a689/merged" time="2025-04-02T12:16:30-04:00" level=debug msg="Made network namespace at /run/user/3001/netns/netns-5a6e59dd-c7c3-d14f-d70f-ebcf9881e68b for container 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25" time="2025-04-02T12:16:30-04:00" level=debug msg="creating rootless network namespace with name \"rootless-netns-d22c9f230d0691b8f418\"" time="2025-04-02T12:16:30-04:00" level=debug msg="slirp4netns command: /bin/slirp4netns --disable-host-loopback --mtu=65520 --enable-sandbox --enable-seccomp --enable-ipv6 -c -r 3 --netns-type=path /run/user/3001/netns/rootless-netns-d22c9f230d0691b8f418 tap0" time="2025-04-02T12:16:30-04:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" time="2025-04-02T12:16:30-04:00" level=debug msg="cni result for container 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25 network podman-default-kube-network: &{0.4.0 [{Name:cni-podman1 Mac:52:02:0c:1f:15:9a Sandbox:} {Name:veth8a97fc83 Mac:16:84:6e:4b:c8:f3 Sandbox:} {Name:eth0 Mac:8e:8c:3c:12:ee:fb Sandbox:/run/user/3001/netns/netns-5a6e59dd-c7c3-d14f-d70f-ebcf9881e68b}] [{Version:4 Interface:0xc000cb7978 Address:{IP:10.89.0.2 Mask:ffffff00} Gateway:10.89.0.1}] [{Dst:{IP:0.0.0.0 Mask:00000000} GW:}] {[10.89.0.1] [dns.podman] []}}" time="2025-04-02T12:16:30-04:00" level=debug msg="rootlessport: time=\"2025-04-02T12:16:30-04:00\" level=info msg=\"Starting parent driver\"\n" time="2025-04-02T12:16:30-04:00" level=debug msg="rootlessport: time=\"2025-04-02T12:16:30-04:00\" level=info msg=\"opaque=map[builtin.readypipepath:/run/user/3001/libpod/tmp/rootlessport365186404/.bp-ready.pipe builtin.socketpath:/run/user/3001/libpod/tmp/rootlessport365186404/.bp.sock]\"\n" time="2025-04-02T12:16:30-04:00" level=debug msg="rootlessport: time=\"2025-04-02T12:16:30-04:00\" level=info msg=\"Starting child driver in child netns (\\\"/proc/self/exe\\\" [rootlessport-child])\"\n" time="2025-04-02T12:16:30-04:00" level=debug msg="rootlessport: time=\"2025-04-02T12:16:30-04:00\" level=info msg=\"Waiting for initComplete\"\n" time="2025-04-02T12:16:30-04:00" level=debug msg="rootlessport: time=\"2025-04-02T12:16:30-04:00\" level=info msg=\"initComplete is closed; parent and child established the communication channel\"\ntime=\"2025-04-02T12:16:30-04:00\" level=info msg=\"Exposing ports [{ 80 15001 1 tcp}]\"\n" time="2025-04-02T12:16:30-04:00" level=debug msg="rootlessport: time=\"2025-04-02T12:16:30-04:00\" level=info msg=Ready\n" time="2025-04-02T12:16:30-04:00" level=debug msg="rootlessport is ready" time="2025-04-02T12:16:30-04:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2025-04-02T12:16:30-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-04-02T12:16:30-04:00" level=debug msg="Workdir \"/\" resolved to host path \"/home/podman_basic_user/.local/share/containers/storage/overlay/add239a525a71d20e89f26cdceeffdb465defe64274853a2d37a62c98061a689/merged\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Created OCI spec for container 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25/userdata/config.json" time="2025-04-02T12:16:30-04:00" level=debug msg="Got pod cgroup as " time="2025-04-02T12:16:30-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-04-02T12:16:30-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25 -u 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25 -r /usr/bin/runc -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25/userdata -p /run/user/3001/containers/overlay-containers/15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25/userdata/pidfile -n 4b39b176161d-infra --exit-dir /run/user/3001/libpod/tmp/exits --full-attach -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg cgroupfs --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25]" time="2025-04-02T12:16:30-04:00" level=info msg="Failed to add conmon to cgroupfs sandbox cgroup: creating cgroup for cpu: mkdir /sys/fs/cgroup/cpu/libpod_parent: permission denied" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2025-04-02T12:16:30-04:00" level=debug msg="Received: 25856" time="2025-04-02T12:16:30-04:00" level=info msg="Got Conmon PID as 25846" time="2025-04-02T12:16:30-04:00" level=debug msg="Created container 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25 in OCI runtime" time="2025-04-02T12:16:30-04:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2025-04-02T12:16:30-04:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2025-04-02T12:16:30-04:00" level=debug msg="Starting container 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25 with command [/catatonit -P]" time="2025-04-02T12:16:30-04:00" level=debug msg="Started container 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25" time="2025-04-02T12:16:30-04:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/TQMG2VJFZSJRG4TRP4BZLQYKKC,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/a49aabc4e6b99e922d2ecc9268062c0213aa47b643a3f64082785cdb629d82bf/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/a49aabc4e6b99e922d2ecc9268062c0213aa47b643a3f64082785cdb629d82bf/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c426,c636\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Mounted container \"f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/a49aabc4e6b99e922d2ecc9268062c0213aa47b643a3f64082785cdb629d82bf/merged\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Created root filesystem for container f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b at /home/podman_basic_user/.local/share/containers/storage/overlay/a49aabc4e6b99e922d2ecc9268062c0213aa47b643a3f64082785cdb629d82bf/merged" time="2025-04-02T12:16:30-04:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2025-04-02T12:16:30-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-04-02T12:16:30-04:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2025-04-02T12:16:30-04:00" level=debug msg="Created OCI spec for container f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b/userdata/config.json" time="2025-04-02T12:16:30-04:00" level=debug msg="Got pod cgroup as " time="2025-04-02T12:16:30-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-04-02T12:16:30-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b -u f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b -r /usr/bin/runc -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b/userdata -p /run/user/3001/containers/overlay-containers/f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b/userdata/pidfile -n httpd1-httpd1 --exit-dir /run/user/3001/libpod/tmp/exits --full-attach -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg cgroupfs --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b]" time="2025-04-02T12:16:30-04:00" level=info msg="Failed to add conmon to cgroupfs sandbox cgroup: creating cgroup for memory: mkdir /sys/fs/cgroup/memory/conmon: permission denied" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2025-04-02T12:16:31-04:00" level=debug msg="Received: 25877" time="2025-04-02T12:16:31-04:00" level=info msg="Got Conmon PID as 25867" time="2025-04-02T12:16:31-04:00" level=debug msg="Created container f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b in OCI runtime" time="2025-04-02T12:16:31-04:00" level=debug msg="Starting container f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b with command [/bin/busybox-extras httpd -f -p 80]" time="2025-04-02T12:16:31-04:00" level=debug msg="Started container f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b" time="2025-04-02T12:16:31-04:00" level=debug msg="Called kube.PersistentPostRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-04-02T12:16:31-04:00" level=debug msg="Shutting down engines" TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:88 Wednesday 02 April 2025 12:16:31 -0400 (0:00:01.269) 0:03:54.331 ******* ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Enable service] *********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:100 Wednesday 02 April 2025 12:16:31 -0400 (0:00:00.697) 0:03:55.028 ******* changed: [managed-node2] => { "changed": true, "enabled": true, "name": "podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "run-user-3001.mount podman\\x2dkube.slice basic.target -.mount network-online.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "A template for running K8s workloads via podman-kube-play", "DevicePolicy": "auto", "Documentation": "man:podman-kube-play(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "Environment": "PODMAN_SYSTEMD_UNIT=podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/user/podman-kube@.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "basic.target podman\\x2dkube.slice", "RequiresMountsFor": "/run/user/3001/containers /home/podman_basic_user", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "podman\\x2dkube.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 10s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0", "WorkingDirectory": "!/home/podman_basic_user" } } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:113 Wednesday 02 April 2025 12:16:32 -0400 (0:00:00.568) 0:03:55.597 ******* changed: [managed-node2] => { "changed": true, "name": "podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service", "state": "started", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "podman\\x2dkube.slice -.mount network-online.target run-user-3001.mount basic.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "default.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "A template for running K8s workloads via podman-kube-play", "DevicePolicy": "auto", "Documentation": "man:podman-kube-play(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "Environment": "PODMAN_SYSTEMD_UNIT=podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/user/podman-kube@.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "basic.target podman\\x2dkube.slice", "RequiresMountsFor": "/run/user/3001/containers /home/podman_basic_user", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "podman\\x2dkube.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 10s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "default.target", "Wants": "network-online.target", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0", "WorkingDirectory": "!/home/podman_basic_user" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:127 Wednesday 02 April 2025 12:16:33 -0400 (0:00:01.487) 0:03:57.084 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:14 Wednesday 02 April 2025 12:16:34 -0400 (0:00:00.074) 0:03:57.159 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_spec": { "debug": true, "log_level": "debug", "state": "started" }, "__podman_kube_str": "apiVersion: v1\nkind: Pod\nmetadata:\n labels:\n app: test\n io.containers.autoupdate: registry\n name: httpd2\nspec:\n containers:\n - command:\n - /bin/busybox-extras\n - httpd\n - -f\n - -p\n - 80\n image: quay.io/libpod/testimage:20210610\n name: httpd2\n ports:\n - containerPort: 80\n hostPort: 15002\n volumeMounts:\n - mountPath: /var/www:Z\n name: www\n - mountPath: /var/httpd-create:Z\n name: create\n workingDir: /var/www\n volumes:\n - hostPath:\n path: /tmp/lsr_sr1vi4ai_podman/httpd2\n name: www\n - hostPath:\n path: /tmp/lsr_sr1vi4ai_podman/httpd2-create\n name: create\n" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:21 Wednesday 02 April 2025 12:16:34 -0400 (0:00:00.093) 0:03:57.253 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_kube": { "apiVersion": "v1", "kind": "Pod", "metadata": { "labels": { "app": "test", "io.containers.autoupdate": "registry" }, "name": "httpd2" }, "spec": { "containers": [ { "command": [ "/bin/busybox-extras", "httpd", "-f", "-p", 80 ], "image": "quay.io/libpod/testimage:20210610", "name": "httpd2", "ports": [ { "containerPort": 80, "hostPort": 15002 } ], "volumeMounts": [ { "mountPath": "/var/www:Z", "name": "www" }, { "mountPath": "/var/httpd-create:Z", "name": "create" } ], "workingDir": "/var/www" } ], "volumes": [ { "hostPath": { "path": "/tmp/lsr_sr1vi4ai_podman/httpd2" }, "name": "www" }, { "hostPath": { "path": "/tmp/lsr_sr1vi4ai_podman/httpd2-create" }, "name": "create" } ] } }, "__podman_kube_file": "", "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:33 Wednesday 02 April 2025 12:16:34 -0400 (0:00:00.094) 0:03:57.347 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_name": "httpd2", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:38 Wednesday 02 April 2025 12:16:34 -0400 (0:00:00.074) 0:03:57.422 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:16:34 -0400 (0:00:00.137) 0:03:57.559 ******* ok: [managed-node2] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "root", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:16:34 -0400 (0:00:00.409) 0:03:57.968 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:16:34 -0400 (0:00:00.047) 0:03:58.016 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:16:35 -0400 (0:00:00.248) 0:03:58.265 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1743610430.4085276, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610401.5914862, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986657, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "2059311478", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:16:35 -0400 (0:00:00.377) 0:03:58.642 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:16:35 -0400 (0:00:00.063) 0:03:58.705 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:16:35 -0400 (0:00:00.056) 0:03:58.762 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:16:35 -0400 (0:00:00.050) 0:03:58.812 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:16:35 -0400 (0:00:00.054) 0:03:58.867 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:16:35 -0400 (0:00:00.048) 0:03:58.916 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:16:35 -0400 (0:00:00.043) 0:03:58.959 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:16:35 -0400 (0:00:00.044) 0:03:59.004 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if no kube spec is given] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:43 Wednesday 02 April 2025 12:16:35 -0400 (0:00:00.043) 0:03:59.047 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_kube", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:52 Wednesday 02 April 2025 12:16:35 -0400 (0:00:00.039) 0:03:59.086 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:60 Wednesday 02 April 2025 12:16:36 -0400 (0:00:00.304) 0:03:59.391 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_path": "/etc/containers/ansible-kubernetes.d" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:64 Wednesday 02 April 2025 12:16:36 -0400 (0:00:00.076) 0:03:59.467 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_file": "/etc/containers/ansible-kubernetes.d/httpd2.yml" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:68 Wednesday 02 April 2025 12:16:36 -0400 (0:00:00.081) 0:03:59.549 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Get service name using systemd-escape] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:75 Wednesday 02 April 2025 12:16:36 -0400 (0:00:00.081) 0:03:59.630 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "systemd-escape", "--template", "podman-kube@.service", "/etc/containers/ansible-kubernetes.d/httpd2.yml" ], "delta": "0:00:00.005360", "end": "2025-04-02 12:16:36.848238", "rc": 0, "start": "2025-04-02 12:16:36.842878" } STDOUT: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service TASK [fedora.linux_system_roles.podman : Cleanup containers and services] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:83 Wednesday 02 April 2025 12:16:36 -0400 (0:00:00.454) 0:04:00.085 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update containers and services] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:87 Wednesday 02 April 2025 12:16:37 -0400 (0:00:00.065) 0:04:00.151 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:2 Wednesday 02 April 2025 12:16:37 -0400 (0:00:00.149) 0:04:00.300 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:16:37 -0400 (0:00:00.124) 0:04:00.425 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:16:37 -0400 (0:00:00.068) 0:04:00.494 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:16:37 -0400 (0:00:00.164) 0:04:00.658 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the host mount volumes] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:7 Wednesday 02 April 2025 12:16:37 -0400 (0:00:00.090) 0:04:00.749 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_volumes": [ "/tmp/lsr_sr1vi4ai_podman/httpd2", "/tmp/lsr_sr1vi4ai_podman/httpd2-create" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:18 Wednesday 02 April 2025 12:16:37 -0400 (0:00:00.084) 0:04:00.834 ******* ok: [managed-node2] => (item=/tmp/lsr_sr1vi4ai_podman/httpd2) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "/tmp/lsr_sr1vi4ai_podman/httpd2", "mode": "0755", "owner": "root", "path": "/tmp/lsr_sr1vi4ai_podman/httpd2", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 23, "state": "directory", "uid": 0 } changed: [managed-node2] => (item=/tmp/lsr_sr1vi4ai_podman/httpd2-create) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": "/tmp/lsr_sr1vi4ai_podman/httpd2-create", "mode": "0755", "owner": "root", "path": "/tmp/lsr_sr1vi4ai_podman/httpd2-create", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:29 Wednesday 02 April 2025 12:16:38 -0400 (0:00:00.726) 0:04:01.560 ******* ok: [managed-node2] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Check the kubernetes yaml file] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:53 Wednesday 02 April 2025 12:16:39 -0400 (0:00:01.548) 0:04:03.108 ******* ok: [managed-node2] => { "changed": false, "failed_when_result": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Ensure the kubernetes directory is present] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:61 Wednesday 02 April 2025 12:16:40 -0400 (0:00:00.372) 0:04:03.481 ******* ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/ansible-kubernetes.d", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure kubernetes yaml files are present] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:70 Wednesday 02 April 2025 12:16:40 -0400 (0:00:00.421) 0:04:03.902 ******* changed: [managed-node2] => { "changed": true, "checksum": "a3811d4e9a8822a1bb1782651c45a7ba596de3a2", "dest": "/etc/containers/ansible-kubernetes.d/httpd2.yml", "gid": 0, "group": "root", "md5sum": "0b84ac3f6b9577f1d97aa334a31ca786", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 762, "src": "/root/.ansible/tmp/ansible-tmp-1743610600.8094587-13586-112409744055035/source", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Update containers/pods] *************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:80 Wednesday 02 April 2025 12:16:41 -0400 (0:00:00.675) 0:04:04.578 ******* changed: [managed-node2] => { "actions": [ "/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml" ], "changed": true } STDOUT: Pod: c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a Container: 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1 STDERR: time="2025-04-02T12:16:41-04:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2025-04-02T12:16:41-04:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-04-02T12:16:41-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-04-02T12:16:41-04:00" level=info msg="Using sqlite as database backend" time="2025-04-02T12:16:41-04:00" level=debug msg="Using graph driver overlay" time="2025-04-02T12:16:41-04:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Using run root /run/containers/storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2025-04-02T12:16:41-04:00" level=debug msg="Using tmp dir /run/libpod" time="2025-04-02T12:16:41-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2025-04-02T12:16:41-04:00" level=debug msg="Using transient store: false" time="2025-04-02T12:16:41-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-04-02T12:16:41-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-04-02T12:16:41-04:00" level=debug msg="Cached value indicated that metacopy is being used" time="2025-04-02T12:16:41-04:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2025-04-02T12:16:41-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2025-04-02T12:16:41-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2025-04-02T12:16:41-04:00" level=debug msg="Initializing event backend file" time="2025-04-02T12:16:41-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-04-02T12:16:41-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-04-02T12:16:41-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-04-02T12:16:41-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-04-02T12:16:41-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-04-02T12:16:41-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-04-02T12:16:41-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-04-02T12:16:41-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" time="2025-04-02T12:16:41-04:00" level=info msg="Setting parallel job count to 7" time="2025-04-02T12:16:41-04:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-04-02 12:14:18.041574535 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-04-02T12:16:41-04:00" level=debug msg="Successfully loaded 2 networks" time="2025-04-02T12:16:41-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:41-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-04-02T12:16:41-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@0c83f44aec84a1988fb583251f46d35742f665c899385313475da2ed177c5660\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@0c83f44aec84a1988fb583251f46d35742f665c899385313475da2ed177c5660)" time="2025-04-02T12:16:41-04:00" level=debug msg="exporting opaque data as blob \"sha256:0c83f44aec84a1988fb583251f46d35742f665c899385313475da2ed177c5660\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Pod using bridge network mode" time="2025-04-02T12:16:41-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice for parent machine.slice and name libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a" time="2025-04-02T12:16:41-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice" time="2025-04-02T12:16:41-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice" time="2025-04-02T12:16:41-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:41-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-04-02T12:16:41-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@0c83f44aec84a1988fb583251f46d35742f665c899385313475da2ed177c5660\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@0c83f44aec84a1988fb583251f46d35742f665c899385313475da2ed177c5660)" time="2025-04-02T12:16:41-04:00" level=debug msg="exporting opaque data as blob \"sha256:0c83f44aec84a1988fb583251f46d35742f665c899385313475da2ed177c5660\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Inspecting image 0c83f44aec84a1988fb583251f46d35742f665c899385313475da2ed177c5660" time="2025-04-02T12:16:41-04:00" level=debug msg="exporting opaque data as blob \"sha256:0c83f44aec84a1988fb583251f46d35742f665c899385313475da2ed177c5660\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Inspecting image 0c83f44aec84a1988fb583251f46d35742f665c899385313475da2ed177c5660" time="2025-04-02T12:16:41-04:00" level=debug msg="Inspecting image 0c83f44aec84a1988fb583251f46d35742f665c899385313475da2ed177c5660" time="2025-04-02T12:16:41-04:00" level=debug msg="using systemd mode: false" time="2025-04-02T12:16:41-04:00" level=debug msg="setting container name c136295f156f-infra" time="2025-04-02T12:16:41-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Allocated lock 1 for container f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6" time="2025-04-02T12:16:41-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@0c83f44aec84a1988fb583251f46d35742f665c899385313475da2ed177c5660\"" time="2025-04-02T12:16:41-04:00" level=debug msg="exporting opaque data as blob \"sha256:0c83f44aec84a1988fb583251f46d35742f665c899385313475da2ed177c5660\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2025-04-02T12:16:41-04:00" level=debug msg="Check for idmapped mounts support " time="2025-04-02T12:16:41-04:00" level=debug msg="Created container \"f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Container \"f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6\" has work directory \"/var/lib/containers/storage/overlay-containers/f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6/userdata\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Container \"f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6\" has run directory \"/run/containers/storage/overlay-containers/f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6/userdata\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:41-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-04-02T12:16:41-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-04-02T12:16:41-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2025-04-02T12:16:41-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:41-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-04-02T12:16:41-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-04-02T12:16:41-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:41-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-04-02T12:16:41-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-04-02T12:16:41-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-04-02T12:16:41-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:41-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-04-02T12:16:41-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-04-02T12:16:41-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-04-02T12:16:41-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-04-02T12:16:41-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-04-02T12:16:41-04:00" level=debug msg="using systemd mode: false" time="2025-04-02T12:16:41-04:00" level=debug msg="adding container to pod httpd2" time="2025-04-02T12:16:41-04:00" level=debug msg="setting container name httpd2-httpd2" time="2025-04-02T12:16:41-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-04-02T12:16:41-04:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2025-04-02T12:16:41-04:00" level=debug msg="Adding mount /proc" time="2025-04-02T12:16:41-04:00" level=debug msg="Adding mount /dev" time="2025-04-02T12:16:41-04:00" level=debug msg="Adding mount /dev/pts" time="2025-04-02T12:16:41-04:00" level=debug msg="Adding mount /dev/mqueue" time="2025-04-02T12:16:41-04:00" level=debug msg="Adding mount /sys" time="2025-04-02T12:16:41-04:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2025-04-02T12:16:41-04:00" level=debug msg="Allocated lock 2 for container 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1" time="2025-04-02T12:16:41-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Created container \"07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Container \"07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1\" has work directory \"/var/lib/containers/storage/overlay-containers/07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1/userdata\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Container \"07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1\" has run directory \"/run/containers/storage/overlay-containers/07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1/userdata\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Strongconnecting node f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6" time="2025-04-02T12:16:41-04:00" level=debug msg="Pushed f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6 onto stack" time="2025-04-02T12:16:41-04:00" level=debug msg="Finishing node f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6. Popped f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6 off stack" time="2025-04-02T12:16:41-04:00" level=debug msg="Strongconnecting node 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1" time="2025-04-02T12:16:41-04:00" level=debug msg="Pushed 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1 onto stack" time="2025-04-02T12:16:41-04:00" level=debug msg="Finishing node 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1. Popped 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1 off stack" time="2025-04-02T12:16:41-04:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/GDJMUPER7RS3E2YWZ32KQ44FIX,upperdir=/var/lib/containers/storage/overlay/e034212cb125fb2fbe7343a5d3ca260ceb356023a668e00e0470c456f3db9fc8/diff,workdir=/var/lib/containers/storage/overlay/e034212cb125fb2fbe7343a5d3ca260ceb356023a668e00e0470c456f3db9fc8/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c641,c1006\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Mounted container \"f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6\" at \"/var/lib/containers/storage/overlay/e034212cb125fb2fbe7343a5d3ca260ceb356023a668e00e0470c456f3db9fc8/merged\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Created root filesystem for container f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6 at /var/lib/containers/storage/overlay/e034212cb125fb2fbe7343a5d3ca260ceb356023a668e00e0470c456f3db9fc8/merged" time="2025-04-02T12:16:41-04:00" level=debug msg="Made network namespace at /run/netns/netns-4077cdc6-43eb-a845-b235-1712e179794f for container f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6" time="2025-04-02T12:16:42-04:00" level=debug msg="cni result for container f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6 network podman-default-kube-network: &{0.4.0 [{Name:cni-podman1 Mac:12:02:c2:48:d2:1a Sandbox:} {Name:veth5b8cd7f1 Mac:22:e1:36:42:c3:b0 Sandbox:} {Name:eth0 Mac:9e:ab:aa:86:81:84 Sandbox:/run/netns/netns-4077cdc6-43eb-a845-b235-1712e179794f}] [{Version:4 Interface:0xc00086fba8 Address:{IP:10.89.0.2 Mask:ffffff00} Gateway:10.89.0.1}] [{Dst:{IP:0.0.0.0 Mask:00000000} GW:}] {[10.89.0.1] [dns.podman] []}}" time="2025-04-02T12:16:42-04:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2025-04-02T12:16:42-04:00" level=debug msg="Setting Cgroups for container f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6 to machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice:libpod:f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6" time="2025-04-02T12:16:42-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-04-02T12:16:42-04:00" level=debug msg="Workdir \"/\" resolved to host path \"/var/lib/containers/storage/overlay/e034212cb125fb2fbe7343a5d3ca260ceb356023a668e00e0470c456f3db9fc8/merged\"" time="2025-04-02T12:16:42-04:00" level=debug msg="Created OCI spec for container f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6 at /var/lib/containers/storage/overlay-containers/f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6/userdata/config.json" time="2025-04-02T12:16:42-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice for parent machine.slice and name libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a" time="2025-04-02T12:16:42-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice" time="2025-04-02T12:16:42-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice" time="2025-04-02T12:16:42-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-04-02T12:16:42-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6 -u f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6 -r /usr/bin/runc -b /var/lib/containers/storage/overlay-containers/f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6/userdata -p /run/containers/storage/overlay-containers/f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6/userdata/pidfile -n c136295f156f-infra --exit-dir /run/libpod/exits --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6]" time="2025-04-02T12:16:42-04:00" level=info msg="Running conmon under slice machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice and unitName libpod-conmon-f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6.scope" time="2025-04-02T12:16:42-04:00" level=debug msg="Received: 28160" time="2025-04-02T12:16:42-04:00" level=info msg="Got Conmon PID as 28150" time="2025-04-02T12:16:42-04:00" level=debug msg="Created container f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6 in OCI runtime" time="2025-04-02T12:16:42-04:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2025-04-02T12:16:42-04:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2025-04-02T12:16:42-04:00" level=debug msg="Starting container f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6 with command [/catatonit -P]" time="2025-04-02T12:16:42-04:00" level=debug msg="Started container f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6" time="2025-04-02T12:16:42-04:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/O7YQPBL2AO7YYECS2J5W2BMVTA,upperdir=/var/lib/containers/storage/overlay/4a5e3927d0aa2cbfc4839e5bf95dd796483458fe346415ead4ec47ddb95353ee/diff,workdir=/var/lib/containers/storage/overlay/4a5e3927d0aa2cbfc4839e5bf95dd796483458fe346415ead4ec47ddb95353ee/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c641,c1006\"" time="2025-04-02T12:16:42-04:00" level=debug msg="Mounted container \"07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1\" at \"/var/lib/containers/storage/overlay/4a5e3927d0aa2cbfc4839e5bf95dd796483458fe346415ead4ec47ddb95353ee/merged\"" time="2025-04-02T12:16:42-04:00" level=debug msg="Created root filesystem for container 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1 at /var/lib/containers/storage/overlay/4a5e3927d0aa2cbfc4839e5bf95dd796483458fe346415ead4ec47ddb95353ee/merged" time="2025-04-02T12:16:42-04:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2025-04-02T12:16:42-04:00" level=debug msg="Setting Cgroups for container 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1 to machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice:libpod:07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1" time="2025-04-02T12:16:42-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-04-02T12:16:42-04:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2025-04-02T12:16:42-04:00" level=debug msg="Created OCI spec for container 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1 at /var/lib/containers/storage/overlay-containers/07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1/userdata/config.json" time="2025-04-02T12:16:42-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice for parent machine.slice and name libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a" time="2025-04-02T12:16:42-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice" time="2025-04-02T12:16:42-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice" time="2025-04-02T12:16:42-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-04-02T12:16:42-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1 -u 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1 -r /usr/bin/runc -b /var/lib/containers/storage/overlay-containers/07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1/userdata -p /run/containers/storage/overlay-containers/07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1]" time="2025-04-02T12:16:42-04:00" level=info msg="Running conmon under slice machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice and unitName libpod-conmon-07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1.scope" time="2025-04-02T12:16:42-04:00" level=debug msg="Received: 28182" time="2025-04-02T12:16:42-04:00" level=info msg="Got Conmon PID as 28171" time="2025-04-02T12:16:42-04:00" level=debug msg="Created container 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1 in OCI runtime" time="2025-04-02T12:16:42-04:00" level=debug msg="Starting container 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1 with command [/bin/busybox-extras httpd -f -p 80]" time="2025-04-02T12:16:42-04:00" level=debug msg="Started container 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1" time="2025-04-02T12:16:42-04:00" level=debug msg="Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-04-02T12:16:42-04:00" level=debug msg="Shutting down engines" TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:88 Wednesday 02 April 2025 12:16:42 -0400 (0:00:01.059) 0:04:05.637 ******* ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Enable service] *********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:100 Wednesday 02 April 2025 12:16:43 -0400 (0:00:00.714) 0:04:06.352 ******* changed: [managed-node2] => { "changed": true, "enabled": true, "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "systemd-journald.socket network-online.target sysinit.target -.mount system-podman\\x2dkube.slice basic.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "A template for running K8s workloads via podman-kube-play", "DevicePolicy": "auto", "Documentation": "man:podman-kube-play(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "Environment": "PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true /etc/containers/ansible-kubernetes.d/httpd2.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/ansible-kubernetes.d/httpd2.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/podman-kube@.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system-podman\\x2dkube.slice sysinit.target -.mount", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system-podman\\x2dkube.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 10s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:113 Wednesday 02 April 2025 12:16:43 -0400 (0:00:00.684) 0:04:07.037 ******* changed: [managed-node2] => { "changed": true, "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "state": "started", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "systemd-journald.socket sysinit.target basic.target -.mount network-online.target system-podman\\x2dkube.slice", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "A template for running K8s workloads via podman-kube-play", "DevicePolicy": "auto", "Documentation": "man:podman-kube-play(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "Environment": "PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true /etc/containers/ansible-kubernetes.d/httpd2.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/ansible-kubernetes.d/httpd2.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/podman-kube@.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "-.mount system-podman\\x2dkube.slice sysinit.target", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system-podman\\x2dkube.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 10s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:127 Wednesday 02 April 2025 12:16:45 -0400 (0:00:01.668) 0:04:08.706 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:14 Wednesday 02 April 2025 12:16:45 -0400 (0:00:00.079) 0:04:08.785 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_spec": { "state": "started" }, "__podman_kube_str": "apiVersion: v1\nkind: Pod\nmetadata:\n labels:\n app: test\n io.containers.autoupdate: registry\n name: httpd3\nspec:\n containers:\n - name: httpd3\n image: quay.io/libpod/testimage:20210610\n command:\n - /bin/busybox-extras\n - httpd\n - -f\n - -p\n - 80\n ports:\n - containerPort: 80\n hostPort: 15003\n volumeMounts:\n - mountPath: /var/www:Z\n name: www\n - mountPath: /var/httpd-create:Z\n name: create\n workingDir: /var/www\n volumes:\n - name: www\n hostPath:\n path: \"/tmp/lsr_sr1vi4ai_podman/httpd3\"\n - name: create\n hostPath:\n path: \"/tmp/lsr_sr1vi4ai_podman/httpd3-create\"" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:21 Wednesday 02 April 2025 12:16:45 -0400 (0:00:00.106) 0:04:08.891 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_kube": { "apiVersion": "v1", "kind": "Pod", "metadata": { "labels": { "app": "test", "io.containers.autoupdate": "registry" }, "name": "httpd3" }, "spec": { "containers": [ { "command": [ "/bin/busybox-extras", "httpd", "-f", "-p", 80 ], "image": "quay.io/libpod/testimage:20210610", "name": "httpd3", "ports": [ { "containerPort": 80, "hostPort": 15003 } ], "volumeMounts": [ { "mountPath": "/var/www:Z", "name": "www" }, { "mountPath": "/var/httpd-create:Z", "name": "create" } ], "workingDir": "/var/www" } ], "volumes": [ { "hostPath": { "path": "/tmp/lsr_sr1vi4ai_podman/httpd3" }, "name": "www" }, { "hostPath": { "path": "/tmp/lsr_sr1vi4ai_podman/httpd3-create" }, "name": "create" } ] } }, "__podman_kube_file": "", "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:33 Wednesday 02 April 2025 12:16:45 -0400 (0:00:00.102) 0:04:08.994 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_name": "httpd3", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:38 Wednesday 02 April 2025 12:16:45 -0400 (0:00:00.073) 0:04:09.068 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:16:46 -0400 (0:00:00.131) 0:04:09.200 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:16:46 -0400 (0:00:00.079) 0:04:09.279 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:16:46 -0400 (0:00:00.079) 0:04:09.359 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:16:46 -0400 (0:00:00.101) 0:04:09.461 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1743610430.4085276, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610401.5914862, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986657, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "2059311478", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:16:46 -0400 (0:00:00.443) 0:04:09.904 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:16:46 -0400 (0:00:00.191) 0:04:10.096 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:16:47 -0400 (0:00:00.071) 0:04:10.168 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:16:47 -0400 (0:00:00.073) 0:04:10.241 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:16:47 -0400 (0:00:00.077) 0:04:10.318 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:16:47 -0400 (0:00:00.048) 0:04:10.366 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:16:47 -0400 (0:00:00.050) 0:04:10.416 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:16:47 -0400 (0:00:00.051) 0:04:10.468 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if no kube spec is given] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:43 Wednesday 02 April 2025 12:16:47 -0400 (0:00:00.045) 0:04:10.514 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_kube", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:52 Wednesday 02 April 2025 12:16:47 -0400 (0:00:00.040) 0:04:10.555 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:60 Wednesday 02 April 2025 12:16:47 -0400 (0:00:00.062) 0:04:10.618 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_path": "/etc/containers/ansible-kubernetes.d" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:64 Wednesday 02 April 2025 12:16:47 -0400 (0:00:00.045) 0:04:10.663 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_file": "/etc/containers/ansible-kubernetes.d/httpd3.yml" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:68 Wednesday 02 April 2025 12:16:47 -0400 (0:00:00.153) 0:04:10.816 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Get service name using systemd-escape] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:75 Wednesday 02 April 2025 12:16:47 -0400 (0:00:00.081) 0:04:10.898 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "systemd-escape", "--template", "podman-kube@.service", "/etc/containers/ansible-kubernetes.d/httpd3.yml" ], "delta": "0:00:00.005552", "end": "2025-04-02 12:16:48.089734", "rc": 0, "start": "2025-04-02 12:16:48.084182" } STDOUT: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service TASK [fedora.linux_system_roles.podman : Cleanup containers and services] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:83 Wednesday 02 April 2025 12:16:48 -0400 (0:00:00.429) 0:04:11.327 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update containers and services] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:87 Wednesday 02 April 2025 12:16:48 -0400 (0:00:00.065) 0:04:11.392 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:2 Wednesday 02 April 2025 12:16:48 -0400 (0:00:00.152) 0:04:11.545 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:16:48 -0400 (0:00:00.092) 0:04:11.638 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:16:48 -0400 (0:00:00.049) 0:04:11.688 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:16:48 -0400 (0:00:00.047) 0:04:11.735 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the host mount volumes] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:7 Wednesday 02 April 2025 12:16:48 -0400 (0:00:00.042) 0:04:11.778 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_volumes": [ "/tmp/lsr_sr1vi4ai_podman/httpd3", "/tmp/lsr_sr1vi4ai_podman/httpd3-create" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:18 Wednesday 02 April 2025 12:16:48 -0400 (0:00:00.242) 0:04:12.020 ******* ok: [managed-node2] => (item=/tmp/lsr_sr1vi4ai_podman/httpd3) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "/tmp/lsr_sr1vi4ai_podman/httpd3", "mode": "0755", "owner": "root", "path": "/tmp/lsr_sr1vi4ai_podman/httpd3", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 23, "state": "directory", "uid": 0 } changed: [managed-node2] => (item=/tmp/lsr_sr1vi4ai_podman/httpd3-create) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": "/tmp/lsr_sr1vi4ai_podman/httpd3-create", "mode": "0755", "owner": "root", "path": "/tmp/lsr_sr1vi4ai_podman/httpd3-create", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:29 Wednesday 02 April 2025 12:16:49 -0400 (0:00:00.752) 0:04:12.773 ******* ok: [managed-node2] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Check the kubernetes yaml file] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:53 Wednesday 02 April 2025 12:16:50 -0400 (0:00:01.212) 0:04:13.986 ******* ok: [managed-node2] => { "changed": false, "failed_when_result": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Ensure the kubernetes directory is present] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:61 Wednesday 02 April 2025 12:16:51 -0400 (0:00:00.430) 0:04:14.416 ******* ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/ansible-kubernetes.d", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 24, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure kubernetes yaml files are present] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:70 Wednesday 02 April 2025 12:16:51 -0400 (0:00:00.444) 0:04:14.861 ******* changed: [managed-node2] => { "changed": true, "checksum": "594c0616b7a43ab4e4801162b9fc7add01f01ce1", "dest": "/etc/containers/ansible-kubernetes.d/httpd3.yml", "gid": 0, "group": "root", "md5sum": "a3d0a6e5b2f01a3f195194e1ede79ac0", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 762, "src": "/root/.ansible/tmp/ansible-tmp-1743610611.7771573-13950-168904350400664/source", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Update containers/pods] *************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:80 Wednesday 02 April 2025 12:16:52 -0400 (0:00:00.761) 0:04:15.622 ******* changed: [managed-node2] => { "actions": [ "/usr/bin/podman play kube --start=true /etc/containers/ansible-kubernetes.d/httpd3.yml" ], "changed": true } STDOUT: Pod: c4c98f775b52953a116e15ae01c6ef94ce8e7195636e4f4d862d67e97e3eaaa8 Container: a4dfa039017d9aa56d2353778e99414d6a646ddbc1098f51ecf08096c7f34b89 TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:88 Wednesday 02 April 2025 12:16:53 -0400 (0:00:01.034) 0:04:16.657 ******* ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Enable service] *********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:100 Wednesday 02 April 2025 12:16:54 -0400 (0:00:00.702) 0:04:17.359 ******* changed: [managed-node2] => { "changed": true, "enabled": true, "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "network-online.target system-podman\\x2dkube.slice systemd-journald.socket basic.target -.mount sysinit.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "A template for running K8s workloads via podman-kube-play", "DevicePolicy": "auto", "Documentation": "man:podman-kube-play(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "Environment": "PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true /etc/containers/ansible-kubernetes.d/httpd3.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/ansible-kubernetes.d/httpd3.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/podman-kube@.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "-.mount sysinit.target system-podman\\x2dkube.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system-podman\\x2dkube.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 10s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:113 Wednesday 02 April 2025 12:16:54 -0400 (0:00:00.744) 0:04:18.104 ******* changed: [managed-node2] => { "changed": true, "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "state": "started", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "network-online.target sysinit.target systemd-journald.socket basic.target system-podman\\x2dkube.slice -.mount", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "A template for running K8s workloads via podman-kube-play", "DevicePolicy": "auto", "Documentation": "man:podman-kube-play(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "Environment": "PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true /etc/containers/ansible-kubernetes.d/httpd3.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/ansible-kubernetes.d/httpd3.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/podman-kube@.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "-.mount sysinit.target system-podman\\x2dkube.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system-podman\\x2dkube.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 10s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:127 Wednesday 02 April 2025 12:16:56 -0400 (0:00:01.618) 0:04:19.723 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Wednesday 02 April 2025 12:16:56 -0400 (0:00:00.081) 0:04:19.804 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:198 Wednesday 02 April 2025 12:16:56 -0400 (0:00:00.178) 0:04:19.983 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:204 Wednesday 02 April 2025 12:16:56 -0400 (0:00:00.065) 0:04:20.049 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:213 Wednesday 02 April 2025 12:16:56 -0400 (0:00:00.065) 0:04:20.115 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Check if pods are running] *********************************************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:299 Wednesday 02 April 2025 12:16:57 -0400 (0:00:00.093) 0:04:20.208 ******* ok: [managed-node2] => (item=['httpd1', 'podman_basic_user', 3001]) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "podman", "pod", "inspect", "httpd1", "--format", "{{.State}}" ], "delta": "0:00:00.092369", "end": "2025-04-02 12:16:57.499408", "failed_when_result": false, "item": [ "httpd1", "podman_basic_user", 3001 ], "rc": 0, "start": "2025-04-02 12:16:57.407039" } STDOUT: Running ok: [managed-node2] => (item=['httpd2', 'root', 0]) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "podman", "pod", "inspect", "httpd2", "--format", "{{.State}}" ], "delta": "0:00:00.043939", "end": "2025-04-02 12:16:57.891146", "failed_when_result": false, "item": [ "httpd2", "root", 0 ], "rc": 0, "start": "2025-04-02 12:16:57.847207" } STDOUT: Running ok: [managed-node2] => (item=['httpd3', 'root', 0]) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "podman", "pod", "inspect", "httpd3", "--format", "{{.State}}" ], "delta": "0:00:00.040121", "end": "2025-04-02 12:16:58.294176", "failed_when_result": false, "item": [ "httpd3", "root", 0 ], "rc": 0, "start": "2025-04-02 12:16:58.254055" } STDOUT: Running TASK [Check Services] ********************************************************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:310 Wednesday 02 April 2025 12:16:58 -0400 (0:00:01.304) 0:04:21.512 ******* ok: [managed-node2] => (item=['httpd1', 'podman_basic_user', 3001]) => { "ansible_loop_var": "item", "changed": false, "cmd": "set -euo pipefail\nsystemctl --user list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd1[.]yml[.]service[ ]+loaded[ ]+active '\n", "delta": "0:00:00.010413", "end": "2025-04-02 12:16:58.737830", "item": [ "httpd1", "podman_basic_user", 3001 ], "rc": 0, "start": "2025-04-02 12:16:58.727417" } STDOUT: podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service loaded active running A template for running K8s workloads via podman-kube-play ok: [managed-node2] => (item=['httpd2', 'root', 0]) => { "ansible_loop_var": "item", "changed": false, "cmd": "set -euo pipefail\nsystemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd2[.]yml[.]service[ ]+loaded[ ]+active '\n", "delta": "0:00:00.011491", "end": "2025-04-02 12:16:59.107217", "item": [ "httpd2", "root", 0 ], "rc": 0, "start": "2025-04-02 12:16:59.095726" } STDOUT: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service loaded active running A template for running K8s workloads via podman-kube-play ok: [managed-node2] => (item=['httpd3', 'root', 0]) => { "ansible_loop_var": "item", "changed": false, "cmd": "set -euo pipefail\nsystemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd3[.]yml[.]service[ ]+loaded[ ]+active '\n", "delta": "0:00:00.011650", "end": "2025-04-02 12:16:59.452670", "item": [ "httpd3", "root", 0 ], "rc": 0, "start": "2025-04-02 12:16:59.441020" } STDOUT: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service loaded active running A template for running K8s workloads via podman-kube-play TASK [Check ports, data] ******************************************************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:327 Wednesday 02 April 2025 12:16:59 -0400 (0:00:01.185) 0:04:22.698 ******* ok: [managed-node2] => (item=15001) => { "accept_ranges": "bytes", "ansible_loop_var": "item", "changed": false, "connection": "close", "content": "123", "content_length": "3", "content_type": "text/plain", "cookies": {}, "cookies_string": "", "date": "Wed, 02 Apr 2025 16:17:00 GMT", "elapsed": 0, "failed_when_result": false, "item": 15001, "last_modified": "Wed, 02 Apr 2025 16:12:42 GMT", "redirected": false, "status": 200, "url": "http://localhost:15001/index.txt" } MSG: OK (3 bytes) ok: [managed-node2] => (item=15002) => { "accept_ranges": "bytes", "ansible_loop_var": "item", "changed": false, "connection": "close", "content": "123", "content_length": "3", "content_type": "text/plain", "cookies": {}, "cookies_string": "", "date": "Wed, 02 Apr 2025 16:17:00 GMT", "elapsed": 0, "failed_when_result": false, "item": 15002, "last_modified": "Wed, 02 Apr 2025 16:12:42 GMT", "redirected": false, "status": 200, "url": "http://localhost:15002/index.txt" } MSG: OK (3 bytes) TASK [Check host directories] ************************************************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:335 Wednesday 02 April 2025 12:17:00 -0400 (0:00:01.249) 0:04:23.948 ******* ok: [managed-node2] => (item=['httpd1', 'podman_basic_user', 3001]) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "ls", "-alrtF", "/tmp/lsr_sr1vi4ai_podman/httpd1-create" ], "delta": "0:00:00.004232", "end": "2025-04-02 12:17:01.123752", "item": [ "httpd1", "podman_basic_user", 3001 ], "rc": 0, "start": "2025-04-02 12:17:01.119520" } STDOUT: total 0 drwxr-xr-x. 2 podman_basic_user podman_basic_user 6 Apr 2 12:16 ./ drwxrwxrwx. 8 root root 111 Apr 2 12:16 ../ ok: [managed-node2] => (item=['httpd2', 'root', 0]) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "ls", "-alrtF", "/tmp/lsr_sr1vi4ai_podman/httpd2-create" ], "delta": "0:00:00.003904", "end": "2025-04-02 12:17:01.454429", "item": [ "httpd2", "root", 0 ], "rc": 0, "start": "2025-04-02 12:17:01.450525" } STDOUT: total 0 drwxr-xr-x. 2 root root 6 Apr 2 12:16 ./ drwxrwxrwx. 8 root root 111 Apr 2 12:16 ../ ok: [managed-node2] => (item=['httpd3', 'root', 0]) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "ls", "-alrtF", "/tmp/lsr_sr1vi4ai_podman/httpd3-create" ], "delta": "0:00:00.003703", "end": "2025-04-02 12:17:01.784886", "item": [ "httpd3", "root", 0 ], "rc": 0, "start": "2025-04-02 12:17:01.781183" } STDOUT: total 0 drwxrwxrwx. 8 root root 111 Apr 2 12:16 ../ drwxr-xr-x. 2 root root 6 Apr 2 12:16 ./ TASK [Run role again to test for idempotency] ********************************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:340 Wednesday 02 April 2025 12:17:01 -0400 (0:00:01.071) 0:04:25.019 ******* TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Wednesday 02 April 2025 12:17:02 -0400 (0:00:00.226) 0:04:25.246 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Wednesday 02 April 2025 12:17:02 -0400 (0:00:00.080) 0:04:25.326 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Wednesday 02 April 2025 12:17:02 -0400 (0:00:00.052) 0:04:25.379 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Wednesday 02 April 2025 12:17:02 -0400 (0:00:00.046) 0:04:25.425 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Wednesday 02 April 2025 12:17:02 -0400 (0:00:00.052) 0:04:25.478 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Wednesday 02 April 2025 12:17:02 -0400 (0:00:00.075) 0:04:25.554 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Wednesday 02 April 2025 12:17:02 -0400 (0:00:00.079) 0:04:25.634 ******* ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node2] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Wednesday 02 April 2025 12:17:02 -0400 (0:00:00.149) 0:04:25.783 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Wednesday 02 April 2025 12:17:04 -0400 (0:00:01.691) 0:04:27.474 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Wednesday 02 April 2025 12:17:04 -0400 (0:00:00.043) 0:04:27.517 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Wednesday 02 April 2025 12:17:04 -0400 (0:00:00.049) 0:04:27.567 ******* skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Wednesday 02 April 2025 12:17:04 -0400 (0:00:00.043) 0:04:27.611 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Wednesday 02 April 2025 12:17:04 -0400 (0:00:00.043) 0:04:27.654 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Wednesday 02 April 2025 12:17:04 -0400 (0:00:00.044) 0:04:27.698 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.028471", "end": "2025-04-02 12:17:04.879606", "rc": 0, "start": "2025-04-02 12:17:04.851135" } STDOUT: podman version 4.9.4-dev TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Wednesday 02 April 2025 12:17:04 -0400 (0:00:00.421) 0:04:28.120 ******* ok: [managed-node2] => { "ansible_facts": { "podman_version": "4.9.4-dev" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Wednesday 02 April 2025 12:17:05 -0400 (0:00:00.074) 0:04:28.195 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Wednesday 02 April 2025 12:17:05 -0400 (0:00:00.071) 0:04:28.266 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "(podman_quadlet_specs | length > 0) or (podman_secrets | length > 0)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Wednesday 02 April 2025 12:17:05 -0400 (0:00:00.076) 0:04:28.343 ******* META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Wednesday 02 April 2025 12:17:05 -0400 (0:00:00.070) 0:04:28.414 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Wednesday 02 April 2025 12:17:05 -0400 (0:00:00.144) 0:04:28.559 ******* META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Wednesday 02 April 2025 12:17:05 -0400 (0:00:00.058) 0:04:28.617 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:17:05 -0400 (0:00:00.079) 0:04:28.697 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:17:05 -0400 (0:00:00.049) 0:04:28.746 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:17:05 -0400 (0:00:00.051) 0:04:28.797 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:17:05 -0400 (0:00:00.087) 0:04:28.885 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1743610430.4085276, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610401.5914862, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986657, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "2059311478", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:17:06 -0400 (0:00:00.420) 0:04:29.305 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:17:06 -0400 (0:00:00.069) 0:04:29.375 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:17:06 -0400 (0:00:00.052) 0:04:29.428 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:17:06 -0400 (0:00:00.119) 0:04:29.548 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:17:06 -0400 (0:00:00.045) 0:04:29.593 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:17:06 -0400 (0:00:00.042) 0:04:29.636 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:17:06 -0400 (0:00:00.042) 0:04:29.679 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:17:06 -0400 (0:00:00.043) 0:04:29.722 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Wednesday 02 April 2025 12:17:06 -0400 (0:00:00.042) 0:04:29.765 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Wednesday 02 April 2025 12:17:06 -0400 (0:00:00.062) 0:04:29.827 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Wednesday 02 April 2025 12:17:06 -0400 (0:00:00.129) 0:04:29.957 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Wednesday 02 April 2025 12:17:06 -0400 (0:00:00.066) 0:04:30.024 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Wednesday 02 April 2025 12:17:06 -0400 (0:00:00.045) 0:04:30.069 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Wednesday 02 April 2025 12:17:07 -0400 (0:00:00.164) 0:04:30.233 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Wednesday 02 April 2025 12:17:07 -0400 (0:00:00.041) 0:04:30.275 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Wednesday 02 April 2025 12:17:07 -0400 (0:00:00.040) 0:04:30.315 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Wednesday 02 April 2025 12:17:07 -0400 (0:00:00.076) 0:04:30.392 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Wednesday 02 April 2025 12:17:07 -0400 (0:00:00.052) 0:04:30.444 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Wednesday 02 April 2025 12:17:07 -0400 (0:00:00.064) 0:04:30.508 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Wednesday 02 April 2025 12:17:07 -0400 (0:00:00.088) 0:04:30.597 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Wednesday 02 April 2025 12:17:07 -0400 (0:00:00.046) 0:04:30.644 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Wednesday 02 April 2025 12:17:07 -0400 (0:00:00.116) 0:04:30.760 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Wednesday 02 April 2025 12:17:07 -0400 (0:00:00.042) 0:04:30.802 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Wednesday 02 April 2025 12:17:07 -0400 (0:00:00.042) 0:04:30.845 ******* TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Wednesday 02 April 2025 12:17:07 -0400 (0:00:00.127) 0:04:30.972 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Wednesday 02 April 2025 12:17:07 -0400 (0:00:00.103) 0:04:31.076 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Wednesday 02 April 2025 12:17:07 -0400 (0:00:00.056) 0:04:31.132 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Wednesday 02 April 2025 12:17:08 -0400 (0:00:00.052) 0:04:31.184 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Wednesday 02 April 2025 12:17:08 -0400 (0:00:00.048) 0:04:31.233 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Wednesday 02 April 2025 12:17:08 -0400 (0:00:00.115) 0:04:31.348 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 Wednesday 02 April 2025 12:17:08 -0400 (0:00:00.045) 0:04:31.393 ******* ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43 Wednesday 02 April 2025 12:17:11 -0400 (0:00:02.918) 0:04:34.312 ******* skipping: [managed-node2] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48 Wednesday 02 April 2025 12:17:11 -0400 (0:00:00.040) 0:04:34.353 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53 Wednesday 02 April 2025 12:17:11 -0400 (0:00:00.040) 0:04:34.394 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Collect service facts] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Wednesday 02 April 2025 12:17:11 -0400 (0:00:00.043) 0:04:34.438 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9 Wednesday 02 April 2025 12:17:11 -0400 (0:00:00.062) 0:04:34.500 ******* skipping: [managed-node2] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22 Wednesday 02 April 2025 12:17:11 -0400 (0:00:00.087) 0:04:34.587 ******* ok: [managed-node2] => { "changed": false, "name": "firewalld", "status": { "ActiveEnterTimestamp": "Wed 2025-04-02 12:13:57 EDT", "ActiveEnterTimestampMonotonic": "326139129", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "dbus.socket basic.target sysinit.target polkit.service dbus.service system.slice", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Wed 2025-04-02 12:13:56 EDT", "AssertTimestampMonotonic": "325830531", "Before": "shutdown.target multi-user.target network-pre.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Wed 2025-04-02 12:13:56 EDT", "ConditionTimestampMonotonic": "325830530", "ConfigurationDirectoryMode": "0755", "Conflicts": "ip6tables.service iptables.service shutdown.target nftables.service ipset.service ebtables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "man:firewalld(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "12711", "ExecMainStartTimestamp": "Wed 2025-04-02 12:13:56 EDT", "ExecMainStartTimestampMonotonic": "325832688", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Wed 2025-04-02 12:13:56 EDT", "InactiveExitTimestampMonotonic": "325832722", "InvocationID": "41b8036e16214fc68c244a42727639e9", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "12711", "MemoryAccounting": "yes", "MemoryCurrent": "40456192", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket system.slice sysinit.target", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Wed 2025-04-02 12:13:57 EDT", "StateChangeTimestampMonotonic": "326139129", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogTimestamp": "Wed 2025-04-02 12:13:57 EDT", "WatchdogTimestampMonotonic": "326139126", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 Wednesday 02 April 2025 12:17:12 -0400 (0:00:00.594) 0:04:35.182 ******* ok: [managed-node2] => { "changed": false, "enabled": true, "name": "firewalld", "state": "started", "status": { "ActiveEnterTimestamp": "Wed 2025-04-02 12:13:57 EDT", "ActiveEnterTimestampMonotonic": "326139129", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "dbus.socket basic.target sysinit.target polkit.service dbus.service system.slice", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Wed 2025-04-02 12:13:56 EDT", "AssertTimestampMonotonic": "325830531", "Before": "shutdown.target multi-user.target network-pre.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Wed 2025-04-02 12:13:56 EDT", "ConditionTimestampMonotonic": "325830530", "ConfigurationDirectoryMode": "0755", "Conflicts": "ip6tables.service iptables.service shutdown.target nftables.service ipset.service ebtables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "man:firewalld(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "12711", "ExecMainStartTimestamp": "Wed 2025-04-02 12:13:56 EDT", "ExecMainStartTimestampMonotonic": "325832688", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Wed 2025-04-02 12:13:56 EDT", "InactiveExitTimestampMonotonic": "325832722", "InvocationID": "41b8036e16214fc68c244a42727639e9", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "12711", "MemoryAccounting": "yes", "MemoryCurrent": "40456192", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket system.slice sysinit.target", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Wed 2025-04-02 12:13:57 EDT", "StateChangeTimestampMonotonic": "326139129", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogTimestamp": "Wed 2025-04-02 12:13:57 EDT", "WatchdogTimestampMonotonic": "326139126", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34 Wednesday 02 April 2025 12:17:12 -0400 (0:00:00.557) 0:04:35.739 ******* ok: [managed-node2] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/libexec/platform-python", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43 Wednesday 02 April 2025 12:17:12 -0400 (0:00:00.062) 0:04:35.802 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55 Wednesday 02 April 2025 12:17:12 -0400 (0:00:00.129) 0:04:35.931 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Wednesday 02 April 2025 12:17:12 -0400 (0:00:00.041) 0:04:35.972 ******* ok: [managed-node2] => (item={'port': '15001-15003/tcp', 'state': 'enabled'}) => { "__firewall_changed": false, "ansible_loop_var": "item", "changed": false, "item": { "port": "15001-15003/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120 Wednesday 02 April 2025 12:17:13 -0400 (0:00:00.603) 0:04:36.576 ******* skipping: [managed-node2] => (item={'port': '15001-15003/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "'detailed' in fw[0]", "item": { "port": "15001-15003/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Wednesday 02 April 2025 12:17:13 -0400 (0:00:00.084) 0:04:36.660 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "'detailed' in fw[0]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139 Wednesday 02 April 2025 12:17:13 -0400 (0:00:00.060) 0:04:36.720 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144 Wednesday 02 April 2025 12:17:13 -0400 (0:00:00.053) 0:04:36.774 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153 Wednesday 02 April 2025 12:17:13 -0400 (0:00:00.048) 0:04:36.822 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163 Wednesday 02 April 2025 12:17:13 -0400 (0:00:00.042) 0:04:36.865 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169 Wednesday 02 April 2025 12:17:13 -0400 (0:00:00.041) 0:04:36.907 ******* skipping: [managed-node2] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Wednesday 02 April 2025 12:17:13 -0400 (0:00:00.065) 0:04:36.972 ******* redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.seboolean to ansible.posix.seboolean TASK [fedora.linux_system_roles.selinux : Set ansible_facts required by role and install packages] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:2 Wednesday 02 April 2025 12:17:14 -0400 (0:00:00.307) 0:04:37.280 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml for managed-node2 TASK [fedora.linux_system_roles.selinux : Ensure ansible_facts used by role] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:2 Wednesday 02 April 2025 12:17:14 -0400 (0:00:00.128) 0:04:37.409 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Ensure SELinux packages] ************* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:7 Wednesday 02 April 2025 12:17:14 -0400 (0:00:00.089) 0:04:37.498 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml for managed-node2 TASK [fedora.linux_system_roles.selinux : Check if system is ostree] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:5 Wednesday 02 April 2025 12:17:14 -0400 (0:00:00.130) 0:04:37.628 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set flag to indicate system is ostree] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:10 Wednesday 02 April 2025 12:17:14 -0400 (0:00:00.094) 0:04:37.723 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:17 Wednesday 02 April 2025 12:17:14 -0400 (0:00:00.076) 0:04:37.800 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set flag if transactional-update exists] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:22 Wednesday 02 April 2025 12:17:14 -0400 (0:00:00.079) 0:04:37.879 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __selinux_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux python2 tools] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:26 Wednesday 02 April 2025 12:17:14 -0400 (0:00:00.076) 0:04:37.955 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_python_version is version('3', '<')", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35 Wednesday 02 April 2025 12:17:14 -0400 (0:00:00.183) 0:04:38.138 ******* ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:46 Wednesday 02 April 2025 12:17:17 -0400 (0:00:02.906) 0:04:41.044 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_os_family == \"Suse\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux tool semanage] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 Wednesday 02 April 2025 12:17:17 -0400 (0:00:00.050) 0:04:41.095 ******* ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.selinux : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:72 Wednesday 02 April 2025 12:17:20 -0400 (0:00:02.881) 0:04:43.976 ******* skipping: [managed-node2] => { "false_condition": "__selinux_is_transactional | d(false)" } TASK [fedora.linux_system_roles.selinux : Reboot transactional update systems] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:77 Wednesday 02 April 2025 12:17:20 -0400 (0:00:00.075) 0:04:44.052 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Fail if reboot is needed and not set] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:82 Wednesday 02 April 2025 12:17:20 -0400 (0:00:00.069) 0:04:44.121 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Refresh facts] *********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:89 Wednesday 02 April 2025 12:17:21 -0400 (0:00:00.070) 0:04:44.191 ******* ok: [managed-node2] TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if enabled] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:5 Wednesday 02 April 2025 12:17:21 -0400 (0:00:00.820) 0:04:45.011 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_selinux.status == \"enabled\" and (selinux_state or selinux_policy)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if disabled] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:13 Wednesday 02 April 2025 12:17:21 -0400 (0:00:00.041) 0:04:45.053 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_selinux.status == \"disabled\" and selinux_state", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set selinux_reboot_required] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:21 Wednesday 02 April 2025 12:17:21 -0400 (0:00:00.044) 0:04:45.097 ******* ok: [managed-node2] => { "ansible_facts": { "selinux_reboot_required": false }, "changed": false } TASK [fedora.linux_system_roles.selinux : Fail if reboot is required] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:25 Wednesday 02 April 2025 12:17:22 -0400 (0:00:00.076) 0:04:45.174 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_reboot_required", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Warn if SELinux is disabled] ********* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:30 Wednesday 02 April 2025 12:17:22 -0400 (0:00:00.142) 0:04:45.316 ******* skipping: [managed-node2] => { "false_condition": "ansible_selinux.status == \"disabled\"" } TASK [fedora.linux_system_roles.selinux : Drop all local modifications] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:35 Wednesday 02 April 2025 12:17:22 -0400 (0:00:00.043) 0:04:45.360 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_all_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux boolean local modifications] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:43 Wednesday 02 April 2025 12:17:22 -0400 (0:00:00.041) 0:04:45.401 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_booleans_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux file context local modifications] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:48 Wednesday 02 April 2025 12:17:22 -0400 (0:00:00.041) 0:04:45.443 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_fcontexts_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux port local modifications] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:53 Wednesday 02 April 2025 12:17:22 -0400 (0:00:00.042) 0:04:45.485 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_ports_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux login local modifications] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:58 Wednesday 02 April 2025 12:17:22 -0400 (0:00:00.053) 0:04:45.539 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_logins_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set SELinux booleans] **************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:63 Wednesday 02 April 2025 12:17:22 -0400 (0:00:00.060) 0:04:45.600 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set SELinux file contexts] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:74 Wednesday 02 April 2025 12:17:22 -0400 (0:00:00.044) 0:04:45.644 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set an SELinux label on a port] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:87 Wednesday 02 April 2025 12:17:22 -0400 (0:00:00.051) 0:04:45.695 ******* ok: [managed-node2] => (item={'ports': '15001-15003', 'setype': 'http_port_t'}) => { "__selinux_item": { "ports": "15001-15003", "setype": "http_port_t" }, "ansible_loop_var": "__selinux_item", "changed": false, "ports": [ "15001-15003" ], "proto": "tcp", "setype": "http_port_t", "state": "present" } TASK [fedora.linux_system_roles.selinux : Set linux user to SELinux user mapping] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:99 Wednesday 02 April 2025 12:17:23 -0400 (0:00:00.991) 0:04:46.687 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Get SELinux modules facts] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:112 Wednesday 02 April 2025 12:17:23 -0400 (0:00:00.049) 0:04:46.737 ******* ok: [managed-node2] => { "ansible_facts": { "selinux_checksums": true, "selinux_installed_modules": { "abrt": { "100": { "checksum": "sha256:13dad22da122be9f7d5df4dbedae6a515323542fdc1a7e466d7a1a3d36d29731", "enabled": 1 } }, "accountsd": { "100": { "checksum": "sha256:8bd25829d921be0b5adf92ddaca7ab94cedca1d57796749cfa63571b6550e3da", "enabled": 1 } }, "acct": { "100": { "checksum": "sha256:2699d826efd46176017695c768804c505a54b277b05f1feb9c43a613bab4e6aa", "enabled": 1 } }, "afs": { "100": { "checksum": "sha256:99920dd4e0855870f7e6f9666928d13fe18ddccca9d38b92ea70a6ce3c8c7539", "enabled": 1 } }, "aiccu": { "100": { "checksum": "sha256:a7aedc8354b4335412871adfd2ab5b0c6da1ea63c8dd797718e4214a5d511bb5", "enabled": 1 } }, "aide": { "100": { "checksum": "sha256:8adb5c3a5ed74695e975eecbf290640b179eb6345a7740745ecfe3164efe209f", "enabled": 1 } }, "ajaxterm": { "100": { "checksum": "sha256:d3a03c2837d5dde7145e27902ff8578e00734ab34e8ea1a45aee58b83e9ad6d1", "enabled": 1 } }, "alsa": { "100": { "checksum": "sha256:202f94345fba8f4bc942dc9b75bbb6eea3b4cb02411cf6ed79858d72aa883c89", "enabled": 1 } }, "amanda": { "100": { "checksum": "sha256:f9a99d97370017307349a154ce479969395bbbfe434e4829573269f770efdd0d", "enabled": 1 } }, "amtu": { "100": { "checksum": "sha256:bc9934a2ae61fa117614f201479966d788484f3a7382de4ebad99790a465e2b7", "enabled": 1 } }, "anaconda": { "100": { "checksum": "sha256:b8aabc624243533d483c3dd5574a490a43e7ec0f2f7940798c12b4089bbd0642", "enabled": 1 } }, "antivirus": { "100": { "checksum": "sha256:1de6460ccaea5a5749eba17489b9765035c8202eb9492485ff39157564001a2c", "enabled": 1 } }, "apache": { "100": { "checksum": "sha256:1a0c38364558bebdae3efaa1fcf8be232184dcddcaab345bba1c40bf239dd0ed", "enabled": 1 } }, "apcupsd": { "100": { "checksum": "sha256:175308edb201092c22791f419d32da3f661e7ccfb9c5d5855ad753405c10023b", "enabled": 1 } }, "apm": { "100": { "checksum": "sha256:a1410f65d6bf017caedaffaa59016877686099fb7df3c4d801136de79a61795e", "enabled": 1 } }, "application": { "100": { "checksum": "sha256:a8e9d90aa1188068ca66be55c4d8abf9982666171bbdd8d4da1f2a254c34a080", "enabled": 1 } }, "arpwatch": { "100": { "checksum": "sha256:2cb8afd237d6bc5693e5d54be1a455b6ed632fbbe76cea406163f9c48d00e79f", "enabled": 1 } }, "asterisk": { "100": { "checksum": "sha256:0b66b387174001e926cf1454c3516bb32d96610a0f598065fe6d7a917ca897fe", "enabled": 1 } }, "auditadm": { "100": { "checksum": "sha256:dcd9e7f5e71fb9f7aace30b5755efcbf85fe88f884d4253cc9abcad1c44e5f4d", "enabled": 1 } }, "authconfig": { "100": { "checksum": "sha256:bdb8072e463c84cb01e6933093428be2b6ee5299d82e26730b12dd2b66d89355", "enabled": 1 } }, "authlogin": { "100": { "checksum": "sha256:a89b04c7a40bb373de2bbb0a2210cca454e7d4a805321fbe65462ae5551db656", "enabled": 1 } }, "automount": { "100": { "checksum": "sha256:41ec4e0c5c46118cb4dfa8c8b1834f330dce4ffdea3d534a8d5007a63b3e5262", "enabled": 1 } }, "avahi": { "100": { "checksum": "sha256:7628cb8340258102798a6e36902d0210e2051ffb9fb4f7a1e4c62a612edfe6fa", "enabled": 1 } }, "awstats": { "100": { "checksum": "sha256:9b92e64a3331076ad443862aa2ba98a2c4d9b00638bf19bb9726f572dee5eff4", "enabled": 1 } }, "bacula": { "100": { "checksum": "sha256:32cedcc57f6a973ac5adc16d8df343fc1ca4b3716f7cdcdae0d2490a6e5765ac", "enabled": 1 } }, "base": { "100": { "checksum": "sha256:d99ed290beecf2b10a557a21b06b63cabc28dab4050f2e7197d2cb9e30519fd3", "enabled": 1 } }, "bcfg2": { "100": { "checksum": "sha256:ea510637d47b7fabc3f617f8a6f3ca3172bf9215c2d6b64ad19cd5d8819c8b6b", "enabled": 1 } }, "bind": { "100": { "checksum": "sha256:39520749f8aba46f975a87187975d8dcd014ad67d22515951f51fa3fd1b0478f", "enabled": 1 } }, "bitlbee": { "100": { "checksum": "sha256:bf04e481614825a35c26a547b19098ff1c8acd0d915c5b4f938b9fa595459d00", "enabled": 1 } }, "blkmapd": { "100": { "checksum": "sha256:ca870c95742bf987a2e739286cbcb998b58c091a422251fdd8de57228b28fd96", "enabled": 1 } }, "blueman": { "100": { "checksum": "sha256:7e4b2b3df3962273436b561c806c816fe4b1e5d6781efa33a7109b05f796edd7", "enabled": 1 } }, "bluetooth": { "100": { "checksum": "sha256:da457ef2ce595c3bf9f70697029ea90e96472ae562f685a7f919a7778a778d09", "enabled": 1 } }, "boinc": { "100": { "checksum": "sha256:d74bd3b6b3850c30b5bbf77822ab82b43f36600e4f76cd68674ef361328afb05", "enabled": 1 } }, "boltd": { "100": { "checksum": "sha256:4ccf41e247c5a7066042a0ebaae492805a1d640f777e8e771701f340a76bce30", "enabled": 1 } }, "bootloader": { "100": { "checksum": "sha256:46e55021d6c4cede091a992ab33521bb1aba4ca1d44879d778973b279204933c", "enabled": 1 } }, "brctl": { "100": { "checksum": "sha256:f9645adde2441e43369a255c6a194f01c6f5800347ad710ce3e147df884b98aa", "enabled": 1 } }, "brltty": { "100": { "checksum": "sha256:603734d4772f482f282eb217c03647f705d66de27fc927c64e02787369b0f78a", "enabled": 1 } }, "bugzilla": { "100": { "checksum": "sha256:326d2a188603c908cdae3c9dcdae6bda37b98ec4cc23f3b31878e2bbd0cd33b2", "enabled": 1 } }, "bumblebee": { "100": { "checksum": "sha256:e8ca8d5318a68243441fdb993fbab6d566f7462fd5557b55733f8ddbfcc4b276", "enabled": 1 } }, "cachefilesd": { "100": { "checksum": "sha256:86fe9c1aa8b2d7a6bdd9bd8d0c7a41a7ae0e4e14e32eaea6cb920367c2f495d7", "enabled": 1 } }, "calamaris": { "100": { "checksum": "sha256:1069377693a5d730d57e4ddd6f73ce20b67b595aae90a16459e852d238163b48", "enabled": 1 } }, "callweaver": { "100": { "checksum": "sha256:880b626c3d04c5669d64ee617ee36a18566e91adeaac67b9527b0a795543575e", "enabled": 1 } }, "canna": { "100": { "checksum": "sha256:b9256764ca5e34142e8cffea57fafc2fa66f78dc8c05761f97fa9becd1d77311", "enabled": 1 } }, "ccs": { "100": { "checksum": "sha256:ad293ee5e252966d14fa6bf09240f143460df4b928672a33a398a5793777c4e4", "enabled": 1 } }, "cdrecord": { "100": { "checksum": "sha256:dda8d62c3bf2503ff9762bd031c35a76cac8059d08592fe23e4d3fe11b0ac8cc", "enabled": 1 } }, "certmaster": { "100": { "checksum": "sha256:b431dd84f2c6b971bc573674fa6c4ee2fedf910b0123ba5d9acb5011c208fd72", "enabled": 1 } }, "certmonger": { "100": { "checksum": "sha256:965ec65dfc98cbabce2350bd52fa7ce92c2f4ab4704348f1555f2a3d9edfd1b8", "enabled": 1 } }, "certwatch": { "100": { "checksum": "sha256:77f0299f67e43927eacb553d1002beeebc3098b4bee64d8dc3dadb8fd23fbb5c", "enabled": 1 } }, "cfengine": { "100": { "checksum": "sha256:c78b908838f1d64ee9ebb0a51b7fa438527716936471a573e1b4b7c393bd6b8d", "enabled": 1 } }, "cgdcbxd": { "100": { "checksum": "sha256:5d3633e0b77db69721e4f64167d7e5f7779c3e5fa76e095d25f8467f2a0bdfec", "enabled": 1 } }, "cgroup": { "100": { "checksum": "sha256:9368c6c54bd5ec6f20e4c3b47c86e60af07346c4e86e525b6bd7288b54b7e774", "enabled": 1 } }, "chrome": { "100": { "checksum": "sha256:d31ce9d2fe78cafcd5e3c8decf22ae1e9ea6f74026ca65b6320afe9a33cd609a", "enabled": 1 } }, "chronyd": { "100": { "checksum": "sha256:7d9624729861397cf7720c2324c65489a3d30485e6a884ab1ff9a8ca22efa678", "enabled": 1 } }, "cinder": { "100": { "checksum": "sha256:fc169721c78f5b0857ed8312e59ba4c134b685c4c322dae242b92e815e35e6fb", "enabled": 1 } }, "cipe": { "100": { "checksum": "sha256:02c20398b9eff51ed431b7ad739a6015d2451b4bf6e3e5da380606d85a77852c", "enabled": 1 } }, "clock": { "100": { "checksum": "sha256:4f90655d2243cfc32ea7436a953cccb8a34af895f83361235a3a5cda40dbc75f", "enabled": 1 } }, "clogd": { "100": { "checksum": "sha256:ba78a422a10b65591c48cb038f8a55614944163f3140275852d293fb0c548bfa", "enabled": 1 } }, "cloudform": { "100": { "checksum": "sha256:481f5fbc7810a5a81851edbe5a6b124141257f5fbbb83d8830ae0a34924ed3d9", "enabled": 1 } }, "cmirrord": { "100": { "checksum": "sha256:8f8fb986f15b8b7c5c250d250fdbbb2f78874e13394105c9c486488a16e94c91", "enabled": 1 } }, "cobbler": { "100": { "checksum": "sha256:e0e264b9cc83962dbbb27c152a72f01c6a355467c4e845b52e65c8b88d8d75d6", "enabled": 1 } }, "cockpit": { "100": { "checksum": "sha256:cb7fccd94903a6e256a586d758085f6f59c0f8b1c5b4cb99536915526d2224ec", "enabled": 1 } }, "collectd": { "100": { "checksum": "sha256:7f08e2e248d33162dc9b237c37ed3a3dba0511bbcc71d87482e95093fb8c6456", "enabled": 1 } }, "colord": { "100": { "checksum": "sha256:86e58c9f12c519a2c3b090b64a276722374054ea900c775b2f8ab4ef2867dcf0", "enabled": 1 } }, "comsat": { "100": { "checksum": "sha256:1d57ffaad6b96e3ca8ac82c23b52d58d81e1f69f5d54a648a16da8ffa8070e53", "enabled": 1 } }, "condor": { "100": { "checksum": "sha256:dbc3f2f0c12f9aeed14056fd7e7c46a4ecab3569198f891643172cd032f3fc00", "enabled": 1 } }, "conman": { "100": { "checksum": "sha256:1270caf15af248a487cd5ce728daae2699ffd9139823c805ec49213ab1c835cb", "enabled": 1 } }, "conntrackd": { "100": { "checksum": "sha256:56fd7d7a550dbc4188b93afd0fde8c706623b3a5d26db265ee016967ba4ddfee", "enabled": 1 } }, "consolekit": { "100": { "checksum": "sha256:5bd7a7acc191766583d933b04321e64657138959bf40a4d2986b013b942c4ba8", "enabled": 1 } }, "container": { "200": { "checksum": "sha256:301be7dafa07cdc68b4e5ade7e1a07017fab3efd85986bdfab7faa9466a95836", "enabled": 1 } }, "couchdb": { "100": { "checksum": "sha256:12b2e3e7314bda4e76d3883901e6470927e85343f742fb44b174ce968f1ad8b5", "enabled": 1 } }, "courier": { "100": { "checksum": "sha256:40ae5f173004741838002644e5bff73cf16f2f3a1928c45fa17674f9a0df5148", "enabled": 1 } }, "cpucontrol": { "100": { "checksum": "sha256:1485a6d64d00619898d2789d27391f2a57a7fb1f0e8c73daf59baca8641564a3", "enabled": 1 } }, "cpufreqselector": { "100": { "checksum": "sha256:687564eb09acf3e7f1475fe2a133941c36999bd037aa8a794feea2d9f2c26385", "enabled": 1 } }, "cpuplug": { "100": { "checksum": "sha256:c16e376ff6c51da1911e68a8a7d42f5730eda45febfd0875c78cac4b9cf6e78c", "enabled": 1 } }, "cron": { "100": { "checksum": "sha256:6be0252b3c6bcbfb4c51dfd3ae1ae262f5de153234917ac4d342b18ae0292060", "enabled": 1 } }, "ctdb": { "100": { "checksum": "sha256:06dd65a4361bf8076c14b322dd30003295c0b9d75bf1ae610961b13a1f9431da", "enabled": 1 } }, "cups": { "100": { "checksum": "sha256:3d5e5bbf131d98d95f7f1431893eb137bd833dbfd8469f9c386d72bb4e8f9b9a", "enabled": 1 } }, "cvs": { "100": { "checksum": "sha256:bbc8d76cc8609849d5b078c5b2ac7364470a06d77d67b97d5f58429d7b679e33", "enabled": 1 } }, "cyphesis": { "100": { "checksum": "sha256:b1a41211ae3cf69b819df517eccd0fda2088c27685dad68de64531b9794ec518", "enabled": 1 } }, "cyrus": { "100": { "checksum": "sha256:60defb1f6feeb1d607734c4912e52e03bf5b0c27cb6f31a37fa7e05f3497b323", "enabled": 1 } }, "daemontools": { "100": { "checksum": "sha256:1034e2442c975dd2ccf84791b1a826d02032f13762d57c3485e51e2b9a7dc03f", "enabled": 1 } }, "dbadm": { "100": { "checksum": "sha256:40306590ef444152ae18b65040d85442c14853a9cc4c31b0224c4d19517d66ea", "enabled": 1 } }, "dbskk": { "100": { "checksum": "sha256:24559eff82b251f9814ae88c36a7cbacda1ed419a80145aef545306e88cb0da8", "enabled": 1 } }, "dbus": { "100": { "checksum": "sha256:50ea4eb05a06315449092c939e2307436ac6461e47ca69f0d42cc4e321e86280", "enabled": 1 } }, "dcc": { "100": { "checksum": "sha256:06e414b0a83b49968f62018cecde48dcfe68b2e9d699915367b3e04461188a0d", "enabled": 1 } }, "ddclient": { "100": { "checksum": "sha256:73ca2525a14e3161524f6e8fc0d016430a536002f1cb3833db1334670b458436", "enabled": 1 } }, "denyhosts": { "100": { "checksum": "sha256:1bd00b13b9bda18274a771d66f7cba8fe62e5e95ea8f51415da6b1fa1336df1b", "enabled": 1 } }, "devicekit": { "100": { "checksum": "sha256:03b01b781881cc60438bc357bd60596970b8ac019b415969bca8a08358fcbfd1", "enabled": 1 } }, "dhcp": { "100": { "checksum": "sha256:2ad95a78468f7f4ea9a8c044c79c0a4ca9924b41432390ea2863a85c806c9a00", "enabled": 1 } }, "dictd": { "100": { "checksum": "sha256:c30c819f142d3c719d0ec5741af5a65161770ff140097fe63f7559d55b897500", "enabled": 1 } }, "dirsrv": { "100": { "checksum": "sha256:50efdc68200d27ce1a5db99a780aa7b0e84988669961d436d348c7bb8310d181", "enabled": 1 } }, "dirsrv-admin": { "100": { "checksum": "sha256:8d9234157484f6ae8ba22039b44fa19f4de8137be9321e5da393d72d85d89487", "enabled": 1 } }, "dmesg": { "100": { "checksum": "sha256:8b834312a2cb99ab89862f839a1315e78794dd92758785f84c9559285dfbe679", "enabled": 1 } }, "dmidecode": { "100": { "checksum": "sha256:2c7fb8c6c52f385b819713f0444a96cfd4e65b7dcb3ca79b932cc12ad9ce903d", "enabled": 1 } }, "dnsmasq": { "100": { "checksum": "sha256:44f66c5d4f635600ee9d0ba3fdea3896218f1420b5ead89e0f22d71a447f9e97", "enabled": 1 } }, "dnssec": { "100": { "checksum": "sha256:49427a9e92b87db77706e2b81ece254c99d3cd6ba020211e2afae65fab7ad066", "enabled": 1 } }, "dovecot": { "100": { "checksum": "sha256:cc8c3a2ee0233a7f1fdf38837b72ce5fd15efef782a36ab4b9aa2ec339b46fa6", "enabled": 1 } }, "drbd": { "100": { "checksum": "sha256:b66be23c1ded4e548e5369b744c7c2a4dfd7065582517525221177ca67657525", "enabled": 1 } }, "dspam": { "100": { "checksum": "sha256:5dd7221ba40e9b912367289fed8ca116c14da4fb8bd7f28f421c4008855bb9fc", "enabled": 1 } }, "entropyd": { "100": { "checksum": "sha256:0f68aeeb1da72efb8c9b1bb7db0a4180b6938672b16f33d1abcd65f5481d85a9", "enabled": 1 } }, "exim": { "100": { "checksum": "sha256:f4c4473ee49394e0e4629023772464a046c476f92b4a727acdf9f6c92711b952", "enabled": 1 } }, "fail2ban": { "100": { "checksum": "sha256:2383cb88b81bc5d87be9f3201a42da526532c4ea8e6d3b3f5023005c0ddf6f17", "enabled": 1 } }, "fcoe": { "100": { "checksum": "sha256:913e66ac5f5ce364e5ea556acfbf77845c25a4beb5ee64599613aa00127c1492", "enabled": 1 } }, "fetchmail": { "100": { "checksum": "sha256:63f00993bae4285eff5e993d208ea786785c4331e6947b3a48a97d31145b2e98", "enabled": 1 } }, "finger": { "100": { "checksum": "sha256:16c506d472b007f7d36850810ca0fcfd9482d30ce9c0ba790174b78294fd1d74", "enabled": 1 } }, "firewalld": { "100": { "checksum": "sha256:bbf58446f30b93de19e5a19087ee012f8e347fef5e7e8012e64b31a0ec21ab09", "enabled": 1 } }, "firewallgui": { "100": { "checksum": "sha256:b61ff17eee03141c9c7bd79d63331ecea733cba4b5b43b87d5141a40cdccdd69", "enabled": 1 } }, "firstboot": { "100": { "checksum": "sha256:c5540b8385c84075dd657e390d77ae886aa9d74b65444b9aa1d858f375819a8c", "enabled": 1 } }, "fprintd": { "100": { "checksum": "sha256:c1ffb7734a0359a7390830d9c6477ab61c45fc026368bfd5e2246523a6439464", "enabled": 1 } }, "freeipmi": { "100": { "checksum": "sha256:9af2291d75a2d643f53ff7a98bcabf22effb617329178efea45372d714825de1", "enabled": 1 } }, "freqset": { "100": { "checksum": "sha256:28bf77389f3e41743b30727a891609172a891466e92c28a919f43e628cc23a4d", "enabled": 1 } }, "fstools": { "100": { "checksum": "sha256:140caf542903419ee2471fd99ab06aa45899c400402c2580b395b182f24bd225", "enabled": 1 } }, "ftp": { "100": { "checksum": "sha256:7e8456fdf7807b30e1c257e568ba10305696cf5abdebc70988c288079884d46b", "enabled": 1 } }, "fwupd": { "100": { "checksum": "sha256:1dd6a45b73c7ce77a87af1e87354ada5aa5b2841aaaa045a6b4ae3c4d09f0f8b", "enabled": 1 } }, "games": { "100": { "checksum": "sha256:950d8be99d5349a3d893ba601c518e6b2af0d56c5b55514a45dbd8a3c61c9ecc", "enabled": 1 } }, "gdomap": { "100": { "checksum": "sha256:5040cb99d007fe9368bd37a9a6bf82f891c220ef652443896a0f2f6ca6f818e1", "enabled": 1 } }, "geoclue": { "100": { "checksum": "sha256:f0155b43152b6b4b850d1c4fb7daf16fd77992313b8be314ddb4901314bf913d", "enabled": 1 } }, "getty": { "100": { "checksum": "sha256:a60d07665b0ebd25fd54a9d82dad5eb7acbc11a2842dba56d7b9524d26ce14ce", "enabled": 1 } }, "git": { "100": { "checksum": "sha256:5eaccf209092db49c9a48d84e1387c1de76cb153c774c0bd615c001afab28664", "enabled": 1 } }, "gitosis": { "100": { "checksum": "sha256:b522382b64f36cf387cd892b45e916c861bd0a09697bc983eb55b53b0efd3081", "enabled": 1 } }, "glance": { "100": { "checksum": "sha256:2c51d19fca6ee40e137245ecb425edc77666d75c42ba583bf74cf13f10ace055", "enabled": 1 } }, "gnome": { "100": { "checksum": "sha256:420b9cefa6bdb542f6da10de7b36704a91509cf64cd2497e5693a858cfca5e41", "enabled": 1 } }, "gpg": { "100": { "checksum": "sha256:f821aa6ca5837a2d2de8180e74c267f68da951960c989fb13ebde5833c93738e", "enabled": 1 } }, "gpm": { "100": { "checksum": "sha256:bf30c4945be0065672fb47f70ad251b1079ada339f61f2679293cb0226d0d57a", "enabled": 1 } }, "gpsd": { "100": { "checksum": "sha256:5373b2332959d6c41c32160018274ab61e3f1abd0f0a5cc2302c45b141a39a9b", "enabled": 1 } }, "gssproxy": { "100": { "checksum": "sha256:7528c47be91a81ac19f2f54458309baeb0a232d83a1ccb2bd89fbc8cefb1ddc8", "enabled": 1 } }, "guest": { "100": { "checksum": "sha256:91f43e4d5ae283f0aa13c49efea93293dbdecd2b2f4f75db89371eda65b7523e", "enabled": 1 } }, "hddtemp": { "100": { "checksum": "sha256:f170e1da6acae4fd7108d22c8cf262916e034f0d3edbdebf3265a922a5355373", "enabled": 1 } }, "hostapd": { "100": { "checksum": "sha256:8b15f72328885c08bfda38082a62feeaa2c6692223a4d2bd1a572820d454a742", "enabled": 1 } }, "hostname": { "100": { "checksum": "sha256:e9fc1c4032c0346f751e1ef8ad1b3fe3425401b70a6c4354d4485472288e0bc5", "enabled": 1 } }, "hsqldb": { "100": { "checksum": "sha256:f70b198e5a5157722b69dc89109c4074a475e1085356cc610cc9b700567c154d", "enabled": 1 } }, "hwloc": { "100": { "checksum": "sha256:370e9eea2b927a2715018b667e9a56ad09af301a90811cd9b041da79f5384b38", "enabled": 1 } }, "hypervkvp": { "100": { "checksum": "sha256:b54ce6f4960a02d35e19d60bf8a07f7866514893e3193a5f4822c8580a46caa4", "enabled": 1 } }, "ibacm": { "100": { "checksum": "sha256:663b35f3874583ae074924bc068a8dc4c7c144adb60007da6103d1e3505ee37a", "enabled": 1 } }, "icecast": { "100": { "checksum": "sha256:dedaddef1d7447d25a1e7ff01e60e4545606e556c6770bd3fa94d9331de7a5d7", "enabled": 1 } }, "inetd": { "100": { "checksum": "sha256:ae408578a7160f2feae10269365558c43d9570b392642a92cc20f8ad47c58cce", "enabled": 1 } }, "init": { "100": { "checksum": "sha256:7ff95566a4f2bdb8ca3ec67acdade39e35fdabc57c2f00b989bab3f699f997f8", "enabled": 1 } }, "inn": { "100": { "checksum": "sha256:9ad99284192a443aa582e73b46667388b7a219dafae8dfce71a58a82bbae2f6c", "enabled": 1 } }, "insights_client": { "100": { "checksum": "sha256:0e41289d8dce065dcd41fd6cc1e1282efd4a58e7f9e3a2f1abc32f520fbbcc1e", "enabled": 1 } }, "iodine": { "100": { "checksum": "sha256:32501ab66def044fbc340cb5c656d5743c738bbd6fca5626c36c687419cd8d32", "enabled": 1 } }, "iotop": { "100": { "checksum": "sha256:d15656cd91a4e4e178a13f7cf910cfc552cc30db881a11ec88833f947edb4561", "enabled": 1 } }, "ipmievd": { "100": { "checksum": "sha256:d34fe186922c0e5726ca361343ec3846833ec3e4ab9b019b3d7bac1337383a16", "enabled": 1 } }, "ipsec": { "100": { "checksum": "sha256:d36c66c2c79d338c61c90d4136433e1e3a73435e920eb36d70682dfd5e147e59", "enabled": 1 } }, "iptables": { "100": { "checksum": "sha256:5a674017cc648e3262757464e5413503154cc1f593da545ce2c4f946991012bc", "enabled": 1 } }, "irc": { "100": { "checksum": "sha256:d72428ccbff5521367e00699c142bba64b2bbd44fed35deb29f9530cc0448378", "enabled": 1 } }, "irqbalance": { "100": { "checksum": "sha256:15650b2f39ccdfbcb1e4e867a35fce3c2768097e611e0c8ad9cb79ae6c66dd58", "enabled": 1 } }, "iscsi": { "100": { "checksum": "sha256:ccb27142f793095c79f531aae924baaeee5914c84228a09c09b9eca839f3524e", "enabled": 1 } }, "isns": { "100": { "checksum": "sha256:90b42f610fa328cdfb98bd0450bd052566f203e51e4a913dd6faded6da7fbe2c", "enabled": 1 } }, "jabber": { "100": { "checksum": "sha256:5ad49d140265305dc72781a6826d1de4614a33f83bd512acdc2263038ad41206", "enabled": 1 } }, "jetty": { "100": { "checksum": "sha256:d910afd1bfe836543ded50974dc24ae7bd5fd2609d6a9b2403316dffcd39832d", "enabled": 1 } }, "jockey": { "100": { "checksum": "sha256:d9a67ce1976ed2e79826d25f33dcb0b0bbde6c090600b605bbaaae45856d12f6", "enabled": 1 } }, "journalctl": { "100": { "checksum": "sha256:9ddb71271d0dbe5cede6179c0ca263e297dc6b65197bde2f7b14ce71f8dde369", "enabled": 1 } }, "kdbus": { "100": { "checksum": "sha256:5969c78be4a03cc91e426bc19b13c5188b5bf8ac11f5e2c21c098c3d68a7e3e3", "enabled": 1 } }, "kdump": { "100": { "checksum": "sha256:fdde3852d1decda649133c6345680f9353b86a6da2a98a83a8be101c9c25f103", "enabled": 1 } }, "kdumpgui": { "100": { "checksum": "sha256:66c67280c70a9b897b0f952067438e0eee05f9f48913508b38d745ef88747f32", "enabled": 1 } }, "keepalived": { "100": { "checksum": "sha256:c1177567c7bf67bb2d0de17760cecf56e0bb34f50d6fe060dec64ae97a76ecdb", "enabled": 1 } }, "kerberos": { "100": { "checksum": "sha256:826fbe83705494e009b242b88857c425eacba49aadae506ffa2012c80e60f7ae", "enabled": 1 } }, "keyboardd": { "100": { "checksum": "sha256:f199811d9ddc8db83864a09c543567fcb2f117b3241967b092bff7c9fdbfbfb6", "enabled": 1 } }, "keystone": { "100": { "checksum": "sha256:b0a7227a870ea987035e0cd524ad956a68287d0a67dd7135de41c6d5977ff4c2", "enabled": 1 } }, "kismet": { "100": { "checksum": "sha256:488fb5fd17cf1f630f3e48a853da05f86c06fc58219dc2ae59251865734bf800", "enabled": 1 } }, "kmscon": { "100": { "checksum": "sha256:d64019b11b6a37f6cdc5579d56eb1e19b6a7231501e1cfe2a838d26a2eac6033", "enabled": 1 } }, "kpatch": { "100": { "checksum": "sha256:00070d71dfe2632491305387ffb264127dca4387425015e4cb013d6bce5f95c3", "enabled": 1 } }, "ksmtuned": { "100": { "checksum": "sha256:891f082452240ad2e726bad777ea787d0f0f8695cc2a75f7439a2badda030d24", "enabled": 1 } }, "ktalk": { "100": { "checksum": "sha256:2df6f3dbad4a513ee1c113e496e8d2f5a19f56015f4a21e7478f2f5b53f36359", "enabled": 1 } }, "l2tp": { "100": { "checksum": "sha256:8e4cb0b0e0d1293d669de0b0e50f68d6d6fbe8e8d830a236a1c0e676f2326fb2", "enabled": 1 } }, "ldap": { "100": { "checksum": "sha256:d0177bb5873d0e6f9595020a8f39ba06b19e4636ea610175a3afef4aec2719cb", "enabled": 1 } }, "libraries": { "100": { "checksum": "sha256:6d5f128f2d4fd9137a7c70d0d024703547796a71f70017b3550a31d3450e0435", "enabled": 1 } }, "likewise": { "100": { "checksum": "sha256:e7eebd050230b358b43435d37ce308c3ba15e2516f4045abf7d26f03ebfbc11c", "enabled": 1 } }, "linuxptp": { "100": { "checksum": "sha256:4132cd51913a3044e453ed0b972db2ef511fdc7b2a1b592d1070177651066ab9", "enabled": 1 } }, "lircd": { "100": { "checksum": "sha256:cc81b79d2834e58bef7928f525c1a1eee5547e81d195444b3bc2741e396ae46b", "enabled": 1 } }, "livecd": { "100": { "checksum": "sha256:805c7bc4ded621b44ecf333d558328e115bba652fcbc91f436cefc948497688e", "enabled": 1 } }, "lldpad": { "100": { "checksum": "sha256:358c4b262655cffbf20f7484aedb22f094509f44d52a1fa3efe3edeafd99317e", "enabled": 1 } }, "loadkeys": { "100": { "checksum": "sha256:26f9e78406ecdc968ed670b32db1d10805e66875631558f092f08a6e1f2170dc", "enabled": 1 } }, "locallogin": { "100": { "checksum": "sha256:e07d92775ed25e7a3627bf977452844c67acf473b33075475f433f8be76dd755", "enabled": 1 } }, "lockdev": { "100": { "checksum": "sha256:1f946da2054cc1693209749df12ff01ab6456247d6225733aebb3a7d70a46e20", "enabled": 1 } }, "logadm": { "100": { "checksum": "sha256:70546c4b3d01f15bc7a69747dbb12fc6bcef5d899f6301f62c0c612c7069082a", "enabled": 1 } }, "logging": { "100": { "checksum": "sha256:656067c78ff1246a1a758a213d44307f91cb79336fe74a47015af425e58266fc", "enabled": 1 } }, "logrotate": { "100": { "checksum": "sha256:76cc40f1943fe21959793499bffaf35d0fe53ffc3f6c5a8b31eb96e738a286c2", "enabled": 1 } }, "logwatch": { "100": { "checksum": "sha256:cf4450b03e28762040c29f2a28af238cd4905d1c6bd4c73d656b266c7b9a8a6c", "enabled": 1 } }, "lpd": { "100": { "checksum": "sha256:9358dc35659b9570d3e8119a088b2693d7de505ea25996dc139517a857888857", "enabled": 1 } }, "lsm": { "100": { "checksum": "sha256:1247dc4bccfbc9ee42292db4415b21ae00bdef3dc2faeb267f045413da4a1b1b", "enabled": 1 } }, "lttng-tools": { "100": { "checksum": "sha256:79e4a2224ede13cd5f2c0e6e7c61e83efabaf1d05b86f6f7a710599bfc48edaf", "enabled": 1 } }, "lvm": { "100": { "checksum": "sha256:f56137657dd61a1a8a8844d5d1db01fc03330d17e05457d03f64756b344c32ef", "enabled": 1 } }, "mailman": { "100": { "checksum": "sha256:e47811cf3bd8204eaa02c4aab92f3d426f0a3ef97161e1579845d1e03df1fc1d", "enabled": 1 } }, "mailscanner": { "100": { "checksum": "sha256:8d447072ab5005ead27f1cb4d96dcbedf09a11182f660c6f59c6d56fd81235d8", "enabled": 1 } }, "man2html": { "100": { "checksum": "sha256:224584babd9e83c242d54fd8c5cd03379b0556005268aac22b15734b913f12e6", "enabled": 1 } }, "mandb": { "100": { "checksum": "sha256:ae44b8ec7a90ebbc45fdafe89663197b36e47120ad90eb22b475939055ea6924", "enabled": 1 } }, "mcelog": { "100": { "checksum": "sha256:c5d98ec368b145c74b4bf0ea8da3980b17af0c2d00654c5a6973241625f97b12", "enabled": 1 } }, "mediawiki": { "100": { "checksum": "sha256:43f1c6f7cfdeaa26891824167cf637a8670785c2674b45d85ce4a7ac77190a36", "enabled": 1 } }, "memcached": { "100": { "checksum": "sha256:f0f9c7367e9bd196aa463916bd5aab02f6966dad9564a0f2fd070bb2e8410aeb", "enabled": 1 } }, "milter": { "100": { "checksum": "sha256:db190bacd2b84a29971cd1940cd15d606abbfded5c9b956894717afd91fc7a0d", "enabled": 1 } }, "minidlna": { "100": { "checksum": "sha256:0d6ac660d641c1cf707a814ed08e19b9e21547a3eaa7134cab84dbc5fee6b5b2", "enabled": 1 } }, "minissdpd": { "100": { "checksum": "sha256:dd2ab85bcba6d204f9dbc7304e8a4940e5d1733d4b9cf4fcb0f4072982c585c3", "enabled": 1 } }, "mip6d": { "100": { "checksum": "sha256:406edf2c78ba0e692d5a78f3c5ca8d641d00131b143332adeaad9f325959683a", "enabled": 1 } }, "mirrormanager": { "100": { "checksum": "sha256:7084de59beaaaf4f630357ec53beff8d0a0ee532ac180fe58e23bfe98f1fdaee", "enabled": 1 } }, "miscfiles": { "100": { "checksum": "sha256:7e7e87e302bf847a4c59d69e5af60729e61bada0cc5d6ec17a25a6514476cb48", "enabled": 1 } }, "mock": { "100": { "checksum": "sha256:ae352eccf2f2c9ee8f0d9635517d9ae3c9bba83c617deca8f989e2aae8dd35fa", "enabled": 1 } }, "modemmanager": { "100": { "checksum": "sha256:84a60147d2b0121ff6ede6199583cdb5619480d015b2a675c6a0569f91c12d66", "enabled": 1 } }, "modutils": { "100": { "checksum": "sha256:67c3914aeb25e38fc6bd0793fddc41122dba1547d54e91a78065545fea3b9c87", "enabled": 1 } }, "mojomojo": { "100": { "checksum": "sha256:6030afcea9f8d46f25dd7785737edd25eb0f1e50b76eafe4d9103196b722d47e", "enabled": 1 } }, "mon_statd": { "100": { "checksum": "sha256:6ba3a594d01a11bc32e7cb554f7386314b5089eb4416fb776edb552a7d53c41d", "enabled": 1 } }, "mongodb": { "100": { "checksum": "sha256:1b2d30558bec7fc08d1d388ae2bb0becd2233c99c9fb173fd00809786ce5eed9", "enabled": 1 } }, "motion": { "100": { "checksum": "sha256:346e172be35df168eb0e4fbc8e176b0fda87de9bc5787f7a5ab7667cfe1e3c3b", "enabled": 1 } }, "mount": { "100": { "checksum": "sha256:f66c53d993dcd47ea1ff3d797f8fd69fb8161a4ff8a59f54f66a2de9462a55a7", "enabled": 1 } }, "mozilla": { "100": { "checksum": "sha256:7696dbb77c54531cf2574c7ede9f085cf64611dcf7a612530dce2de19f7a8b9f", "enabled": 1 } }, "mpd": { "100": { "checksum": "sha256:0f67c18c9101b53f57ef857a74d6044701e1d2c347f829a03c0579c545fdbef3", "enabled": 1 } }, "mplayer": { "100": { "checksum": "sha256:f82c0a72506f1011e47ba98e51d5edf906f58fc190d797f5d1a0b8e5cc7d0762", "enabled": 1 } }, "mrtg": { "100": { "checksum": "sha256:afcd9267261b334900420461279b8555fdb4bd783af880fa4606d8afc65e0712", "enabled": 1 } }, "mta": { "100": { "checksum": "sha256:b0f9753424c504a288f55d495105f6d475d69287b718190ae5192cf7d6ddfde6", "enabled": 1 } }, "munin": { "100": { "checksum": "sha256:29f87ec15fa19e975c83288d55e56bab64855a24c4d8826fe4138eda9a46cc97", "enabled": 1 } }, "mysql": { "100": { "checksum": "sha256:b028af8f4e726feb8c26037f7c6d6f97383977bd5ee6141ab4e8e1d096d6481f", "enabled": 1 } }, "mythtv": { "100": { "checksum": "sha256:e025b2dbf50901632da0ee2aa658105a322275eb120d782cbbf25f2895231154", "enabled": 1 } }, "naemon": { "100": { "checksum": "sha256:a19b3b0540dc52d9506ca7e5d804c2fe9115b3ea28bfd9273030e841e12eb277", "enabled": 1 } }, "nagios": { "100": { "checksum": "sha256:39ca80027ac8585f368bcd57f555ba87bf409f7b7d6c4292c09fd06cc1691c80", "enabled": 1 } }, "namespace": { "100": { "checksum": "sha256:ef73850f29b4ff4ff904d506d545bf366fd1e7c2ba82a7a7c9a4513e3eee45d9", "enabled": 1 } }, "ncftool": { "100": { "checksum": "sha256:2c9356101a9ddbec94afdd12ca669ba93a1d422c302f9e17b78b18670617d2a1", "enabled": 1 } }, "netlabel": { "100": { "checksum": "sha256:9a32ce04c1dd8e120588c15b3057f838bedce8f14c91576b667295d47800e0ad", "enabled": 1 } }, "netutils": { "100": { "checksum": "sha256:5e0a20ae09b00fac69ee30a0d55ff73fa692d8350c9c0b0343af61e4f0dd654f", "enabled": 1 } }, "networkmanager": { "100": { "checksum": "sha256:9c67b21155929e43e4efd3fc81a85fddc9f1030b47ee4a275789014c1311b972", "enabled": 1 } }, "ninfod": { "100": { "checksum": "sha256:85cac2885d75522eb07189efcc3feeb7775fc6daf5cf3f1a28a1fd2109fe148c", "enabled": 1 } }, "nis": { "100": { "checksum": "sha256:b5b133d60b98068eb9480c54285050ae9b49d2fb309eac8994cc91c865ee02d4", "enabled": 1 } }, "nova": { "100": { "checksum": "sha256:59919a89d30a5d4b60d6971fa636fb62605d59d214ec614adc279f6cbe2c2b27", "enabled": 1 } }, "nscd": { "100": { "checksum": "sha256:578bc975477539c659f3608b1445a0c7a9bc7c3f2dcf65b3e55f3a3af89ea564", "enabled": 1 } }, "nsd": { "100": { "checksum": "sha256:d5b03cdc6c8bbc222b8e3d30680b1a7d2d1a49837e7d509aafcf6b2a3a32195b", "enabled": 1 } }, "nslcd": { "100": { "checksum": "sha256:18b003071f4c36307616f7d5de8cff6d4e376af31cb96ce1a5ad6ae3011dfd09", "enabled": 1 } }, "ntop": { "100": { "checksum": "sha256:f942c7fbe636b9d60327ef9dade1120340c16a2992a6b50db5fbaecd44ffd63d", "enabled": 1 } }, "ntp": { "100": { "checksum": "sha256:686664a71e74b0edd643ab9d556b1aab092fa707935da5ea928a66f54a3c84e0", "enabled": 1 } }, "numad": { "100": { "checksum": "sha256:dabc5ce6244d0b0939e9a07bd6bc232e8b666529a0b7b29527e586db8224862c", "enabled": 1 } }, "nut": { "100": { "checksum": "sha256:653e708dec531e483992b25944a689ec9369478d039a5ec62c98294ab73ce8c4", "enabled": 1 } }, "nx": { "100": { "checksum": "sha256:4ae55fe839abaaf0ea52b79a5c8f6a906575b83cca29532c2dd52337fb3d5790", "enabled": 1 } }, "obex": { "100": { "checksum": "sha256:7b2c87e864b6008f734e1effa48cee1399f41843b9d80d3fd95fbd19e058598f", "enabled": 1 } }, "oddjob": { "100": { "checksum": "sha256:9de0b544b2373ea0f1b7217f9179898479dbff0da36ea9857783de57d06585cf", "enabled": 1 } }, "opafm": { "100": { "checksum": "sha256:761bf911674d23053eceabbbda8da16c73af5f300929a33a64513dc6e3b2d0af", "enabled": 1 } }, "openct": { "100": { "checksum": "sha256:5674f8e8c975570649e3065460786cb4521a86370bffef5a9de18c69813fe68e", "enabled": 1 } }, "opendnssec": { "100": { "checksum": "sha256:bdef6dbb24ae22548634759ac823a8c3e21fde6368cfdfd742480f7027e63ddd", "enabled": 1 } }, "openfortivpn": { "100": { "checksum": "sha256:1a1bff55993510cb6481383b299e1f1a6349ec76e4947bfc8c5b1347e4d30bf4", "enabled": 1 } }, "openhpid": { "100": { "checksum": "sha256:ad3f3f3ba4442930560b291c022e674e6a50e4a37fe027926299b2f6cdec14bd", "enabled": 1 } }, "openshift": { "100": { "checksum": "sha256:329e4b9d1df5012ace94cbe9cba7dfa7ee7d9f242090072c71aaacbeea78986a", "enabled": 1 } }, "openshift-origin": { "100": { "checksum": "sha256:31cbbb069354f984e4af75b387778fae1ff4dc6c3e60533357d005ffa960b51c", "enabled": 1 } }, "opensm": { "100": { "checksum": "sha256:c0e1bf0a8eb50e0b41fa69bf5b65e2a7c324e4bc7255933a5d2bac3b9ae6f4de", "enabled": 1 } }, "openvpn": { "100": { "checksum": "sha256:a4d12ae8ad77d65d0fcabb20aa4a83886e782d732123f686f88a7d7472384104", "enabled": 1 } }, "openvswitch": { "100": { "checksum": "sha256:a54f8a8ea5abb8a33734ecef9d9ad1c0dd090a6e0c5187e80de52f522d2d5e39", "enabled": 1 } }, "openwsman": { "100": { "checksum": "sha256:d6b7bb8f7749265bdaf938abecb2f8f78c6e9e8dc06c1c26b48da227af5a8654", "enabled": 1 } }, "oracleasm": { "100": { "checksum": "sha256:67e31eec391bac337ebacb78c096589af4b7e8be6aa05c34cf187ba922a2abde", "enabled": 1 } }, "osad": { "100": { "checksum": "sha256:6635ff0231bfc3d88c771553d495941ee0f98871edfe6c86205b087186b3a72f", "enabled": 1 } }, "pads": { "100": { "checksum": "sha256:5b4531e9231d399ebec8e6b6870a812c6a64b2daffde35fa57a009b24a01809f", "enabled": 1 } }, "passenger": { "100": { "checksum": "sha256:912a1c442559d6ab48453d87e2b997bdee3017a54a0b60aeaf7d4603fde0f34b", "enabled": 1 } }, "pcmcia": { "100": { "checksum": "sha256:456b3520c26e5f2a913437318715712ae00f64932a27ab1bb8b8b42e0524fa05", "enabled": 1 } }, "pcp": { "100": { "checksum": "sha256:5302332fba7e6724ab7a3c32bd523b10322c20011c6e42ae4e769a49f3efabdd", "enabled": 1 } }, "pcscd": { "100": { "checksum": "sha256:2ee37df066a9ff80439b08c092809f3661e2f9a8ad02134e839627fd23a20c1f", "enabled": 1 } }, "pdns": { "100": { "checksum": "sha256:a1a10cd52eb9dd15bc1ccfed440f6b3d235edc7405a3932f81805d8d94000245", "enabled": 1 } }, "pegasus": { "100": { "checksum": "sha256:4280c40629dd111fd1c89ff867ac72d1e7ddde49dc3d286637e6a86b868e2303", "enabled": 1 } }, "permissivedomains": { "100": { "checksum": "sha256:2453bad4ace526f3cf2c60b358e95a5476692ef25da107b10f52f3af27c056d2", "enabled": 1 } }, "pesign": { "100": { "checksum": "sha256:6461acd0385c0b1a32bf646fc9e09da0c7ca513954ed8fe2a03f4ee7f6a64fcf", "enabled": 1 } }, "pingd": { "100": { "checksum": "sha256:f7536a518a046b793ea3f74a67d677b878baac44b28268c5ccecbf10715d89ab", "enabled": 1 } }, "piranha": { "100": { "checksum": "sha256:11436fb7942d28e3eca22bc078ee5475f632d8447008a6414f337d4bbc3515dc", "enabled": 1 } }, "pkcs": { "100": { "checksum": "sha256:c70e17d1a4d347b38fdfbb2a5dab292e3e0c538ea52fb6cfdef2714e130da0b1", "enabled": 1 } }, "pkcs11proxyd": { "100": { "checksum": "sha256:c9582c89cac1546fa1e5bf9802c5a322e52e2529256f9e5922d5813e40be3646", "enabled": 1 } }, "pki": { "100": { "checksum": "sha256:ec40fbe6355370fe69a8ff343744654b06d4134c1518c64269be1f3a49083968", "enabled": 1 } }, "plymouthd": { "100": { "checksum": "sha256:7aa52d533e28a3ebf76d879c24bb4e0a58574033d5af6d4d22b716d1156c3f90", "enabled": 1 } }, "podsleuth": { "100": { "checksum": "sha256:b32a5cc38b8edcc76b94862cee0c822a5b4d095329f53ab6f7cb014c76346e8c", "enabled": 1 } }, "policykit": { "100": { "checksum": "sha256:686d9f7652cb2b3d7ce6af2aa620c14a6cbbbdb8d26b3630cfbf6bc34d9e3e6c", "enabled": 1 } }, "polipo": { "100": { "checksum": "sha256:6098bd8a4f449c01dc7e0f4509663994259fe8848f2f21d1319bf7105bbacc4e", "enabled": 1 } }, "portmap": { "100": { "checksum": "sha256:f561aef22cda98a94a74bedda09645e50066a77a23d3bdcbb1143b0c62ffe7b2", "enabled": 1 } }, "portreserve": { "100": { "checksum": "sha256:9de99e881e9e2e7e0b78629eec721840da4aa18f78ff5a06e46b7a596c28a09a", "enabled": 1 } }, "postfix": { "100": { "checksum": "sha256:3101c4c1d54f3e175dc3fcff001c6937a9ffec7781f4095ea38fea88df7e8067", "enabled": 1 } }, "postgresql": { "100": { "checksum": "sha256:a734cc086d7d73ef2ffe7543f82dc50b57619e78e60664cb67a9513790f3335a", "enabled": 1 } }, "postgrey": { "100": { "checksum": "sha256:ef4d03336b66c1184f352f9b3fe8004d870bbf003673d4393bde24ea14b056b8", "enabled": 1 } }, "ppp": { "100": { "checksum": "sha256:83e6712ba7343dc1346e94c51b75b05839f78bd24f9324d984b7aa9631bd0377", "enabled": 1 } }, "prelink": { "100": { "checksum": "sha256:df050b0d180947788ab45862c4627ae640c92cf0f6a994a685e4cb5fe46bef76", "enabled": 1 } }, "prelude": { "100": { "checksum": "sha256:88c5fa3da64c127ed6e688f9eba5e50a8f6f98ea3243d29b8b0bc0375ef95420", "enabled": 1 } }, "privoxy": { "100": { "checksum": "sha256:e4a84567c63c892d4cdda3a9a4b15ad5188c093da679a354f00c43b6376a844d", "enabled": 1 } }, "procmail": { "100": { "checksum": "sha256:98170eed35b67b9097514bcb044a18cc3f757af5f91b5d870ea707d6048cde75", "enabled": 1 } }, "prosody": { "100": { "checksum": "sha256:07e999e033252b28ae41697ddc23b42dbcf4bdc143c9eb1c55475aabc9fc9caf", "enabled": 1 } }, "psad": { "100": { "checksum": "sha256:7fc3410de486bf89c4d35989937f424b435c9c4f5398f47f9c840b146197c6ac", "enabled": 1 } }, "ptchown": { "100": { "checksum": "sha256:129978bcb62fdcaed728fb288b321c204575246eb535354e02bfd83089cb0ded", "enabled": 1 } }, "publicfile": { "100": { "checksum": "sha256:9cc75080e25fb5602ab266f1c0d0f16843bdfc561e7af6dec32d669e31bebe98", "enabled": 1 } }, "pulseaudio": { "100": { "checksum": "sha256:a41fc5d1275d548510a2be0180741f952f0f696f443eaabf03c1abf3f80f499e", "enabled": 1 } }, "puppet": { "100": { "checksum": "sha256:81559a7d5e16e228382840986ae0e414d4a78163a9b51b5d9c05a58e07574e8d", "enabled": 1 } }, "pwauth": { "100": { "checksum": "sha256:8590f80ce91ddd4862ce2beab9ec64deb66d99c5583ff5ee3cbff2e503caaa37", "enabled": 1 } }, "qmail": { "100": { "checksum": "sha256:917a35c0ec48acfb5166c937e97269acac39541acebad9c1c410bfdbcb483da1", "enabled": 1 } }, "qpid": { "100": { "checksum": "sha256:cfdb156d23ae6c99b3dbac171ab1626202bf1ae7671fae9f6d6f7241116638dd", "enabled": 1 } }, "quantum": { "100": { "checksum": "sha256:eb4881c554de7882b4e5590a8efb35a758fc1b3d61bc1502632d6f4e571cb331", "enabled": 1 } }, "quota": { "100": { "checksum": "sha256:27d1fb8e99c6d1c75fc8efa8aeaf4303d0dcd8d03cb2992d968a3186d648f4b9", "enabled": 1 } }, "rabbitmq": { "100": { "checksum": "sha256:f0b2b81a6670b7640d49d49c364635f39272330f08bcdaa23c681bf2ac64e10f", "enabled": 1 } }, "radius": { "100": { "checksum": "sha256:791a60cff31fca43e01aa4bfe3a57c5938015db44fd1f64064778dbbcdb6e2e2", "enabled": 1 } }, "radvd": { "100": { "checksum": "sha256:1cea7f5b37f7a0e722ecbccaa09d95db2b175ec125d62e3898a99081c51c6f96", "enabled": 1 } }, "raid": { "100": { "checksum": "sha256:a94b0b917312a73eda50ea641dee49eb00f49df286133fcdb13267fd49ce5d1f", "enabled": 1 } }, "rasdaemon": { "100": { "checksum": "sha256:159d40315f3f5086a31e6f0a6a90d342783d6f0c97c5feeb9c92808c7345adcf", "enabled": 1 } }, "rdisc": { "100": { "checksum": "sha256:a61f7efd50387ebfd35b675b22a8cba86c6216c0bbd901aab5e8674b5c442777", "enabled": 1 } }, "readahead": { "100": { "checksum": "sha256:276a24e14ef12f5fadaeab2883d501cb096e01a9ce1be2178a5c50ebfa6b3fcb", "enabled": 1 } }, "realmd": { "100": { "checksum": "sha256:61561d5f14d9a6597d6e312f5429947baab045d01a729f7cc34406e859fa0015", "enabled": 1 } }, "redis": { "100": { "checksum": "sha256:f40066828d25674c525148f890d9cc84ddbb203f5a4aaad616ef2cd3a497fdc3", "enabled": 1 } }, "remotelogin": { "100": { "checksum": "sha256:742f881c1a4838ecfc1a55a7f3b78a72267644e3a64e3ec45a191599b5bd8532", "enabled": 1 } }, "restraint": { "400": { "checksum": "sha256:5dd2b902123ef00065db6ec8d173f37baa26dbe43566bd5f06594ef1243fd5fd", "enabled": 1 } }, "rhcs": { "100": { "checksum": "sha256:67f232676ac23535867e2494f04989dbd6b9b6d4bbc67df67dc2edb4d31a8be8", "enabled": 1 } }, "rhev": { "100": { "checksum": "sha256:ee2f26beaa5c6a5d25e03ef9ab30302d6b29b283283683421fab52e29e47fe3d", "enabled": 1 } }, "rhgb": { "100": { "checksum": "sha256:39c550e1c8b149dc6f308b0f9ef238315208453ee064bb1558eff9137531840f", "enabled": 1 } }, "rhnsd": { "100": { "checksum": "sha256:16bff56244925c7696fa2da5a4c986132488c352149cc88181bf6b4143fc80ba", "enabled": 1 } }, "rhsmcertd": { "100": { "checksum": "sha256:e999510837aabb3ce118ad61225a846f687588e9a321ffe675b56511191bc323", "enabled": 1 } }, "rhts": { "400": { "checksum": "sha256:9000bd99784bc22ffda4493b4985e8c5a2e65e87aeaa1cb96ba82d367a27a8be", "enabled": 1 } }, "ricci": { "100": { "checksum": "sha256:c72c61297cf864a1abda8226de08039c8ae0212808d3f7fd8725b53b955d59f6", "enabled": 1 } }, "rkhunter": { "100": { "checksum": "sha256:d48bd9c5789f4adc396773664402ddeab432caa99597267ccdf24220948e5b3c", "enabled": 1 } }, "rkt": { "100": { "checksum": "sha256:a9414e82cadd2876471465737bd8322eb833e296869ebcefcd9e722ff717d350", "enabled": 1 } }, "rlogin": { "100": { "checksum": "sha256:a4b2e25abc4099a0a54821518b7c824a2ddb7544fb0b5ddde9a0a9be159ac1b2", "enabled": 1 } }, "rngd": { "100": { "checksum": "sha256:5c867af2674586cc1c41aa3203e3704a0d1400d344a8e257bc61e9eebb86ad03", "enabled": 1 } }, "rolekit": { "100": { "checksum": "sha256:73382d4b8a12fa161dbb5ba36c94e7f0b1f82b1abdf0a4f07ca6c981e08f271b", "enabled": 1 } }, "roundup": { "100": { "checksum": "sha256:1a2503ebaa997c6b6efd5d2343ea731f73b2f0312f2e8d5578dad2c8a84a94fa", "enabled": 1 } }, "rpc": { "100": { "checksum": "sha256:e423284f5ed36e7b6c52f581b444a981d5d1c8af6c8dabe8c6cb6c71d3f49fb2", "enabled": 1 } }, "rpcbind": { "100": { "checksum": "sha256:53831134210db04fe6e6b0f05e20b8b7307ae8c11e774faec9e1b3aa2b02b5dc", "enabled": 1 } }, "rpm": { "100": { "checksum": "sha256:acbd671bd661f9f2f25d4798f1646a51075f297c8b086ea9bd3133a00e356432", "enabled": 1 } }, "rrdcached": { "100": { "checksum": "sha256:c6110313310591ee2a08b504b04ebd1b98f370b6633172f06ee7c0c7db0a963d", "enabled": 1 } }, "rshd": { "100": { "checksum": "sha256:1340ab5daac926cc1354452869ab5aa78d27ceb110543624d2ffaf93773c394b", "enabled": 1 } }, "rssh": { "100": { "checksum": "sha256:9dabc52612d567e728786c007f5017c7032c02be3a9201521a530fc91ca789f8", "enabled": 1 } }, "rsync": { "100": { "checksum": "sha256:33dffe2764dc45bbc59b406a67187c39864412bac07ee089bda30ef09cb70faa", "enabled": 1 } }, "rtas": { "100": { "checksum": "sha256:9d55dfe843e44e8a93c02ea28b14856edfdb1f820bb647992daa6af11e2dbd37", "enabled": 1 } }, "rtkit": { "100": { "checksum": "sha256:ea77b9f26c8fc61b7fc281099b2f16e75c5b196660fff55a95f96e97935a7a1b", "enabled": 1 } }, "rwho": { "100": { "checksum": "sha256:4468bfdd23924a96b4cf8c6fa1a3fa606fdd8ac69b7cb17c16a6e39a95908921", "enabled": 1 } }, "samba": { "100": { "checksum": "sha256:c97b92abaf053976c89a670d82bf06bc5c7d561ccf03e3ff1ac84be6e01cfc5c", "enabled": 1 } }, "sambagui": { "100": { "checksum": "sha256:18d1a69de368fa621e8ef3234b8ddb40261ced880bb732328a310db5a62a7a0a", "enabled": 1 } }, "sandboxX": { "100": { "checksum": "sha256:711df017c1f168e33245144d67289225439bbed701fb1146cb83e9cd63ce1f7a", "enabled": 1 } }, "sanlock": { "100": { "checksum": "sha256:093d9d9793142bb9a8c4375f5f368ca1a4d9beb0cd05329518f91bb9ea51bd06", "enabled": 1 } }, "sasl": { "100": { "checksum": "sha256:536ce94509d38b40200debf17fbddc16ec9004463fdb3fc42890dde9b3eb56f1", "enabled": 1 } }, "sbd": { "100": { "checksum": "sha256:57ecac942ea46af55728362527d70a3e135c3b4711688ddf62596b9a768d9fb0", "enabled": 1 } }, "sblim": { "100": { "checksum": "sha256:2ab2f52e6bac063f176e007b39cd8a4e43012ea075d82af20fbb3403891b6493", "enabled": 1 } }, "screen": { "100": { "checksum": "sha256:7df09c8fa09e105ecf51fee797975603a2df8d15c3a0bf00fdb1d565fe4a6b91", "enabled": 1 } }, "secadm": { "100": { "checksum": "sha256:9cf04d33aa9dec0b559c892fb20df89fbe1883544d4ac2d6bf6fc319f0a16663", "enabled": 1 } }, "sectoolm": { "100": { "checksum": "sha256:e7f9a696e0958d6bdbd6696e67a9b4af62430456d0f278e290db0ea1ee9750b7", "enabled": 1 } }, "selinuxutil": { "100": { "checksum": "sha256:c72355dc70789deb94777acd0b47c2c3ae628e8d90bffb0e0e320941e5ddf3b7", "enabled": 1 } }, "sendmail": { "100": { "checksum": "sha256:98f68238d6ca96277390c160adeed4e3e382d5ded5a88a3909cfebe986b849be", "enabled": 1 } }, "sensord": { "100": { "checksum": "sha256:10ca96a581ef4b0fa1789160fd71fb340d8b1d13906b42fab6e9119033d4f942", "enabled": 1 } }, "setrans": { "100": { "checksum": "sha256:3a172b4972f9271250b4d228541c78b0243fd0544ac983db0f590e09674f700d", "enabled": 1 } }, "setroubleshoot": { "100": { "checksum": "sha256:f78edfcb470cd9929f45b6db29ae4924a286ab30a03f80b7bdf3699bccb98314", "enabled": 1 } }, "seunshare": { "100": { "checksum": "sha256:ba2043d9665e2fd3a9e2d103671bfe647060b93d9c02eed2dca3066a0ecfb81d", "enabled": 1 } }, "sge": { "100": { "checksum": "sha256:cf843c98ff4113ded675f79df694549b4f848aecb1295f0a510101e301fbd348", "enabled": 1 } }, "shorewall": { "100": { "checksum": "sha256:c7c49d28e52aba4d168e684b9160a225fbecab373bfbb6963bbe89c93ecb867b", "enabled": 1 } }, "slocate": { "100": { "checksum": "sha256:be1825562f583305597e5ceb1298ebb60e42c4f270b4a7e3751cf9d9be1b1fac", "enabled": 1 } }, "slpd": { "100": { "checksum": "sha256:14748519962688e62b7bc7e7c03ad91c1f815c5d33c63f2d60e03340f55609a8", "enabled": 1 } }, "smartmon": { "100": { "checksum": "sha256:9f26cf1e9fa128e98c758a6325525f8547950a2440b6582202228c3c5c2c80d9", "enabled": 1 } }, "smokeping": { "100": { "checksum": "sha256:ae8cbd09d519a42bc01063c4c16f58e96cb3673acb557dcd2d09af444d742db1", "enabled": 1 } }, "smoltclient": { "100": { "checksum": "sha256:8aa5f2749eeaef5ae871dc903dad87611e369c92e9b3fc28b4944f75db785a18", "enabled": 1 } }, "smsd": { "100": { "checksum": "sha256:d36a762c836a0e4305773e352fe0f46657784b5d9bf749f02df9c6d15f68d101", "enabled": 1 } }, "snapper": { "100": { "checksum": "sha256:62bba8f6a236bae902815188cedbb5f3090acf0829247e6808787f8c913d9981", "enabled": 1 } }, "snmp": { "100": { "checksum": "sha256:68b5e9d408704e44ebf29ba76ae18afdcf6d8aef12794e8e9026997376ce12f8", "enabled": 1 } }, "snort": { "100": { "checksum": "sha256:eef39dec8d416650af3f9eeeb518b06dd9a9e09144aa579b6bd6422ba0037d70", "enabled": 1 } }, "sosreport": { "100": { "checksum": "sha256:c19dc2ed34c3d274f8e01647dc2d869ca06d4a9a3009f57c1845fac4d33ed358", "enabled": 1 } }, "soundserver": { "100": { "checksum": "sha256:a46a9508591afb1407fd14441c9c26cd495a3789e3c6792a2eba38a6642e4b97", "enabled": 1 } }, "spamassassin": { "100": { "checksum": "sha256:8255ad891466762e31763d6f4791a32aa1eea1147a812020724eab8eb07c1916", "enabled": 1 } }, "speech-dispatcher": { "100": { "checksum": "sha256:ce5ba130d5d0ae5fafe8f823b824856590f990ad7c08aa0a5930f5060c252021", "enabled": 1 } }, "squid": { "100": { "checksum": "sha256:4170a7354e69ed60e0268389f74042e02a2511a4451ca20b97a63213b8881e1e", "enabled": 1 } }, "ssh": { "100": { "checksum": "sha256:a4b4b395d2185abfd68edce0f813103ccbedd5d9748f9a41d83cc63dd1465109", "enabled": 1 } }, "sslh": { "100": { "checksum": "sha256:5b0cc219f31e88f2fa78bc31d9c6fe6c7af29b4832509635672ca9edc79409c6", "enabled": 1 } }, "sssd": { "100": { "checksum": "sha256:29cd0921e9effe356c856c3319488adf66c794cbb7d1610e5fca2b730b852939", "enabled": 1 } }, "staff": { "100": { "checksum": "sha256:943b25df416f2181aab46b3492aad9336f60a1b5b46187494f43ab516aae9c6a", "enabled": 1 } }, "stapserver": { "100": { "checksum": "sha256:788f2eb60a3d902060a6c5a08b086e2a1e96d213f86b206736da7e37eb21e51d", "enabled": 1 } }, "stratisd": { "100": { "checksum": "sha256:72c10f773d67b4209c39b4bea22e95c66d105f6f13e30f89bcd568eab6c889e3", "enabled": 1 } }, "stunnel": { "100": { "checksum": "sha256:736a46f682ff77d7c2cf54d5c264eb7b149793c12701b96e9be12bb3e6722796", "enabled": 1 } }, "su": { "100": { "checksum": "sha256:0cc5796bfe362c3b28c73f62377c029a5f2321078b6d5f90bad42764415cd038", "enabled": 1 } }, "sudo": { "100": { "checksum": "sha256:d96538a9cbb09fc38ba701cda88b2a0d199ab7826826d0043e4f07b05418bf84", "enabled": 1 } }, "svnserve": { "100": { "checksum": "sha256:a80606afbcc994e6fdc418cd83182f901d3e5b4b7b36fe262c71a25f43f10af1", "enabled": 1 } }, "swift": { "100": { "checksum": "sha256:19dfb362a8f445099eac9281522f0b13794cb9a0893a7acf0b54c15d193ef70e", "enabled": 1 } }, "sysadm": { "100": { "checksum": "sha256:f0e7b74086d47000f8335de5bade5a5a19a5e83bf581f885db92548546b7ea94", "enabled": 1 } }, "sysadm_secadm": { "100": { "checksum": "sha256:4614737ea0603530691e6158eb1bd07efa1992cb7ef52c201df3a637d3184cdf", "enabled": 1 } }, "sysnetwork": { "100": { "checksum": "sha256:f6a5a3b49885a9f780c5a9078cc968673809eaf89ecbe170fbb8a1ed4f521ea2", "enabled": 1 } }, "sysstat": { "100": { "checksum": "sha256:1fadc57b1e46515cbc038e96ae47ab74dd365a910f4d81ec9fb3044c4691260b", "enabled": 1 } }, "systemd": { "100": { "checksum": "sha256:a5f0e5c340eaf127a166cc50be8170bfce80ccee0c14f32e4cc264089350da1a", "enabled": 1 } }, "tangd": { "100": { "checksum": "sha256:fd538dbdeba0b4a1c244ba76b8dfef47f61da5a56f24f39fc24c137a9b3b303a", "enabled": 1 } }, "targetd": { "100": { "checksum": "sha256:bc0f37cdcdd0c9014e89e8be6758f7d9c97c67a4e42652459d6107314f059632", "enabled": 1 } }, "tcpd": { "100": { "checksum": "sha256:c78dcf2b9abf8d5ccf9f32b2debf6181a935a7078fe4a527991ab11d2999c4a9", "enabled": 1 } }, "tcsd": { "100": { "checksum": "sha256:e92fb82a2e509e3595d46dd464dac1029ce3a731f117fa67712d119d2878f195", "enabled": 1 } }, "telepathy": { "100": { "checksum": "sha256:fea41add022251126312da78373cb7fd05df1e9fd27547f1b4fc604a774827a1", "enabled": 1 } }, "telnet": { "100": { "checksum": "sha256:06d4733c0fc7358d738d4dbf53968c9d9017a72b01456be46633364f00a4207d", "enabled": 1 } }, "tftp": { "100": { "checksum": "sha256:8ba2497a28f4c2a31177811fc0a091a3bb9814f9e02cfc8d84c004718f661e5f", "enabled": 1 } }, "tgtd": { "100": { "checksum": "sha256:6ec8d4d38e58efa04572ac713c9148e7182e7d49713ed89955fabdd512b8eea4", "enabled": 1 } }, "thin": { "100": { "checksum": "sha256:c464da2b8e789d74ea2b2914217a194a3c07081b9f383acd2fee9ab77bc525b5", "enabled": 1 } }, "thumb": { "100": { "checksum": "sha256:2ce98252c7ff59539bb38204ee65898ba6cc701c3dc87417c11e2e7124f448a3", "enabled": 1 } }, "timedatex": { "100": { "checksum": "sha256:df36b9f44f28df1b14b4d6bff01de42c414b947a8e6f1e6efdaa7023250709aa", "enabled": 1 } }, "tlp": { "100": { "checksum": "sha256:7b1d2643c7470dc5b80dee41d18482bb6fd6de55371aba888708a28fe0bb0172", "enabled": 1 } }, "tmpreaper": { "100": { "checksum": "sha256:2a54cea48dfbeb1c9dad0e167f70aa17970c4f2c76c560330c467051fe3b574b", "enabled": 1 } }, "tomcat": { "100": { "checksum": "sha256:de3ed9b8d62d29e80e29a051419a648c154c12f6bb188814ca79120ff1dc263b", "enabled": 1 } }, "tor": { "100": { "checksum": "sha256:16c95ae098af2b964a7a94b5bb6cd1c84d5c7f1254d6411209e4d5cfe87677bc", "enabled": 1 } }, "tuned": { "100": { "checksum": "sha256:b90ac3a04d3f04c7284f75802ffd69d6c1c3d5c0e6d08c3d0f2d9270b99dd487", "enabled": 1 } }, "tvtime": { "100": { "checksum": "sha256:8f8a1f1b2fea7a9fb8c3853e02c830f5204f691e9223cbdfbc320ec6914725dc", "enabled": 1 } }, "udev": { "100": { "checksum": "sha256:24410f1221660b8443af29cb55e42180e268fce722ceed2c99aa202e7dd3cc21", "enabled": 1 } }, "ulogd": { "100": { "checksum": "sha256:dba41aee81015b99378cff2273a56effd1202c0c937c05c63a913243b0641cdc", "enabled": 1 } }, "uml": { "100": { "checksum": "sha256:29e7469ef2704943f23c5040531fee8657cfed8440ef44b6268d21e6a9afe309", "enabled": 1 } }, "unconfined": { "100": { "checksum": "sha256:54482715f4fb5bca5c68ff67b9d145d12ad3df1438db97bcadcc32a2fb0f6191", "enabled": 1 } }, "unconfineduser": { "100": { "checksum": "sha256:13e69d4cbec7926c0ac6fb796749b4286462add3051f1e94554f23e637b81277", "enabled": 1 } }, "unlabelednet": { "100": { "checksum": "sha256:cb370bbe8bc0d7bca49a4fd1fad652017f4f8587c7c9d3277155fba32987550e", "enabled": 1 } }, "unprivuser": { "100": { "checksum": "sha256:bbb2700ca73d867432851e12276a932b1553b034b1cc635f5c6681d6b62dcd3a", "enabled": 1 } }, "updfstab": { "100": { "checksum": "sha256:57a37a5c07af0f7ad80f4f01173e6cd6b604659e2d1b5605c2719dff8bbaf2fb", "enabled": 1 } }, "usbmodules": { "100": { "checksum": "sha256:683c0598bdd00543cb696f7ed8cce6b55c658e566141538fc01b3f852af5f697", "enabled": 1 } }, "usbmuxd": { "100": { "checksum": "sha256:852eb8259277c64b80c91bd1dcbbe85f629e7218ab2f51d39324dcd78a4a278e", "enabled": 1 } }, "userdomain": { "100": { "checksum": "sha256:066e429e71ebcf11014f4ff6d7647c9d6d88ff191c64eeb9793021d16f4cde97", "enabled": 1 } }, "userhelper": { "100": { "checksum": "sha256:74b817fb60fd3ed5f074ef8ff399342ddc49fb2c250b08015dc975edd48f4dfd", "enabled": 1 } }, "usermanage": { "100": { "checksum": "sha256:fa589ab303d10fadd28a3e8d27cc9bc2e55a9b28f28c3f4c7e05968cb00a7cdd", "enabled": 1 } }, "usernetctl": { "100": { "checksum": "sha256:c5e4e24e89775d797a8988e2d5f72ec7a7dd8387289ede61af7a3ce2173cf167", "enabled": 1 } }, "uucp": { "100": { "checksum": "sha256:6a3659d3706bc3af4b60e5de7efa9532dcc0c0a6f0c7735ed1300ec2120f9d01", "enabled": 1 } }, "uuidd": { "100": { "checksum": "sha256:f85ad7d20dd77416ab246ee0837b016a648176ec9956f40ff2ac6b3c2924edc5", "enabled": 1 } }, "varnishd": { "100": { "checksum": "sha256:18dab548c81b02f1b0f3efd6e25dd529bb0565e974156d55e42e274d3ccdf704", "enabled": 1 } }, "vdagent": { "100": { "checksum": "sha256:ee8af0b085b727e060ac3c82f1e38c89545505c9b26e849eda22e571064c46e7", "enabled": 1 } }, "vhostmd": { "100": { "checksum": "sha256:0f7c8c575b060e863fe17e7ee8c67cc5cc3ea31da734a5428dc62c15f3b15bf4", "enabled": 1 } }, "virt": { "100": { "checksum": "sha256:df433826471b1c65a3686b57b4b07872a695d900731feb88cd6dfb76ddcbc5d9", "enabled": 1 } }, "vlock": { "100": { "checksum": "sha256:4a9362fc5876897cae7062564d54d7f8ae12413c65c4c7fc6709f6407cc27160", "enabled": 1 } }, "vmtools": { "100": { "checksum": "sha256:fb9dda20b16232ac253b148063c9b267356b6f2831650f4c00fa01a6d0a8024a", "enabled": 1 } }, "vmware": { "100": { "checksum": "sha256:d0ce73ebc7d2f494b669257a9a68106245371b455566654c7062694bcbad35df", "enabled": 1 } }, "vnstatd": { "100": { "checksum": "sha256:1df1aaf42d9c96922226b4828c38b6d315f7a9d3cda60fe54d99be5d618e140d", "enabled": 1 } }, "vpn": { "100": { "checksum": "sha256:9ea8931bf1c97618b2e99afb8c60a13d51a84db878bffa4082f6973e23b13eb1", "enabled": 1 } }, "w3c": { "100": { "checksum": "sha256:43663b66ef8275c639a8076d92fc7da6821e0523c120e2c854839f9dc9d1db66", "enabled": 1 } }, "watchdog": { "100": { "checksum": "sha256:65b78e9b48a6cfe62f6c67c443d3bc667a58d206c09df00870949b6ae7ff8c30", "enabled": 1 } }, "wdmd": { "100": { "checksum": "sha256:65560477bd0ae271799a76f75c5a3d46ef0c29f6922aa38e727c95b7e1095a99", "enabled": 1 } }, "webadm": { "100": { "checksum": "sha256:4d4d609b3be3c2dc659694cfd2076e0c0c0d6446d16a3fb054a9e5f951b29410", "enabled": 1 } }, "webalizer": { "100": { "checksum": "sha256:867139a0cc2cb236ee54575ce6a8568cdbefd6785e8b7f64e09a3041da46b095", "enabled": 1 } }, "wine": { "100": { "checksum": "sha256:419d697ac987518dee6095070e2894c4112b50256e59d2b4f6acac585fb087f8", "enabled": 1 } }, "wireshark": { "100": { "checksum": "sha256:ce85b40df4d548aa55eb54bc546943366b654a3af7f602817f1fc499c0c8039e", "enabled": 1 } }, "xen": { "100": { "checksum": "sha256:f5d46e297e4e8e0a3f76c1fc8ae96db3ebf5b99ab538a54c171e489ac94ae1f0", "enabled": 1 } }, "xguest": { "100": { "checksum": "sha256:aeb8895098531d1607e389703c783a3c1e8a8c1ad962397debe65214ff86e29e", "enabled": 1 } }, "xserver": { "100": { "checksum": "sha256:85f1f1ed778597ec568ab7b9069779c088219d1da283a09382439c6803e7863e", "enabled": 1 } }, "zabbix": { "100": { "checksum": "sha256:476521323be1b84d7ba2539aa208d857678746a76e7e079577d3f46d251637ac", "enabled": 1 } }, "zarafa": { "100": { "checksum": "sha256:7536116b2852a578cbc5d32f7752b6dd3bb1202817db05306e1a16553c1d43b6", "enabled": 1 } }, "zebra": { "100": { "checksum": "sha256:3d18bbdc44c396c7715cce348f9248712132a1c53341d3b5760016d245f86e75", "enabled": 1 } }, "zoneminder": { "100": { "checksum": "sha256:44cf07d7e6b15709d131b8b406032d0e6395a84e1e20bc67f9320a1e97c4dfcc", "enabled": 1 } }, "zosremote": { "100": { "checksum": "sha256:1177170edbd47b6fe17fa022a247d9b75b1fb3a5a49721bcff3c7da4f480c702", "enabled": 1 } } }, "selinux_priorities": true }, "changed": false } TASK [fedora.linux_system_roles.selinux : Load SELinux modules] **************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:115 Wednesday 02 April 2025 12:17:27 -0400 (0:00:03.551) 0:04:50.288 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "selinux_modules is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:128 Wednesday 02 April 2025 12:17:27 -0400 (0:00:00.077) 0:04:50.365 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree in check mode] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:136 Wednesday 02 April 2025 12:17:27 -0400 (0:00:00.066) 0:04:50.432 ******* skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Wednesday 02 April 2025 12:17:27 -0400 (0:00:00.111) 0:04:50.544 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Wednesday 02 April 2025 12:17:27 -0400 (0:00:00.073) 0:04:50.617 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Wednesday 02 April 2025 12:17:27 -0400 (0:00:00.065) 0:04:50.682 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Wednesday 02 April 2025 12:17:27 -0400 (0:00:00.064) 0:04:50.746 ******* skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Wednesday 02 April 2025 12:17:27 -0400 (0:00:00.063) 0:04:50.810 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:14 Wednesday 02 April 2025 12:17:28 -0400 (0:00:00.348) 0:04:51.159 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_spec": { "debug": true, "log_level": "debug", "state": "started" }, "__podman_kube_str": "apiVersion: v1\nkind: Pod\nmetadata:\n labels:\n app: test\n io.containers.autoupdate: registry\n name: httpd1\nspec:\n containers:\n - command:\n - /bin/busybox-extras\n - httpd\n - -f\n - -p\n - 80\n image: quay.io/libpod/testimage:20210610\n name: httpd1\n ports:\n - containerPort: 80\n hostPort: 15001\n volumeMounts:\n - mountPath: /var/www:Z\n name: www\n - mountPath: /var/httpd-create:Z\n name: create\n workingDir: /var/www\n volumes:\n - hostPath:\n path: /tmp/lsr_sr1vi4ai_podman/httpd1\n name: www\n - hostPath:\n path: /tmp/lsr_sr1vi4ai_podman/httpd1-create\n name: create\n" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:21 Wednesday 02 April 2025 12:17:28 -0400 (0:00:00.083) 0:04:51.242 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_kube": { "apiVersion": "v1", "kind": "Pod", "metadata": { "labels": { "app": "test", "io.containers.autoupdate": "registry" }, "name": "httpd1" }, "spec": { "containers": [ { "command": [ "/bin/busybox-extras", "httpd", "-f", "-p", 80 ], "image": "quay.io/libpod/testimage:20210610", "name": "httpd1", "ports": [ { "containerPort": 80, "hostPort": 15001 } ], "volumeMounts": [ { "mountPath": "/var/www:Z", "name": "www" }, { "mountPath": "/var/httpd-create:Z", "name": "create" } ], "workingDir": "/var/www" } ], "volumes": [ { "hostPath": { "path": "/tmp/lsr_sr1vi4ai_podman/httpd1" }, "name": "www" }, { "hostPath": { "path": "/tmp/lsr_sr1vi4ai_podman/httpd1-create" }, "name": "create" } ] } }, "__podman_kube_file": "", "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "podman_basic_user" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:33 Wednesday 02 April 2025 12:17:28 -0400 (0:00:00.069) 0:04:51.312 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_name": "httpd1", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:38 Wednesday 02 April 2025 12:17:28 -0400 (0:00:00.055) 0:04:51.368 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:17:28 -0400 (0:00:00.080) 0:04:51.448 ******* ok: [managed-node2] => { "ansible_facts": { "getent_passwd": { "podman_basic_user": [ "x", "3001", "3001", "", "/home/podman_basic_user", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:17:28 -0400 (0:00:00.383) 0:04:51.831 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:17:28 -0400 (0:00:00.052) 0:04:51.884 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_group": "3001" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:17:28 -0400 (0:00:00.062) 0:04:51.946 ******* ok: [managed-node2] => { "changed": false, "stat": { "atime": 1743610430.4085276, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610401.5914862, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986657, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "2059311478", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:17:29 -0400 (0:00:00.367) 0:04:52.314 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "podman_basic_user" ], "delta": "0:00:00.003320", "end": "2025-04-02 12:17:29.471307", "rc": 0, "start": "2025-04-02 12:17:29.467987" } STDOUT: 0: podman_basic_user 100000 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:17:29 -0400 (0:00:00.368) 0:04:52.682 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "-g", "podman_basic_user" ], "delta": "0:00:00.003111", "end": "2025-04-02 12:17:29.837818", "rc": 0, "start": "2025-04-02 12:17:29.834707" } STDOUT: 0: podman_basic_user 100000 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:17:30 -0400 (0:00:00.456) 0:04:53.139 ******* ok: [managed-node2] => { "ansible_facts": { "podman_subgid_info": { "podman_basic_user": { "range": 65536, "start": 100000 } }, "podman_subuid_info": { "podman_basic_user": { "range": 65536, "start": 100000 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:17:30 -0400 (0:00:00.060) 0:04:53.200 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:17:30 -0400 (0:00:00.045) 0:04:53.245 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:17:30 -0400 (0:00:00.048) 0:04:53.293 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:17:30 -0400 (0:00:00.052) 0:04:53.346 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:17:30 -0400 (0:00:00.066) 0:04:53.412 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if no kube spec is given] ******** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:43 Wednesday 02 April 2025 12:17:30 -0400 (0:00:00.053) 0:04:53.465 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_kube", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:52 Wednesday 02 April 2025 12:17:30 -0400 (0:00:00.050) 0:04:53.515 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/podman_basic_user", "__podman_xdg_runtime_dir": "/run/user/3001" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:60 Wednesday 02 April 2025 12:17:30 -0400 (0:00:00.065) 0:04:53.581 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_path": "/home/podman_basic_user/.config/containers/ansible-kubernetes.d" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:64 Wednesday 02 April 2025 12:17:30 -0400 (0:00:00.046) 0:04:53.628 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_kube_file": "/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:68 Wednesday 02 April 2025 12:17:30 -0400 (0:00:00.048) 0:04:53.677 ******* ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Get service name using systemd-escape] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:75 Wednesday 02 April 2025 12:17:30 -0400 (0:00:00.170) 0:04:53.847 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "systemd-escape", "--template", "podman-kube@.service", "/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml" ], "delta": "0:00:00.005173", "end": "2025-04-02 12:17:31.037225", "rc": 0, "start": "2025-04-02 12:17:31.032052" } STDOUT: podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service TASK [fedora.linux_system_roles.podman : Cleanup containers and services] ****** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:83 Wednesday 02 April 2025 12:17:31 -0400 (0:00:00.426) 0:04:54.273 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update containers and services] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_kube_spec.yml:87 Wednesday 02 April 2025 12:17:31 -0400 (0:00:00.069) 0:04:54.343 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:2 Wednesday 02 April 2025 12:17:31 -0400 (0:00:00.153) 0:04:54.496 ******* included: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:17:31 -0400 (0:00:00.089) 0:04:54.586 ******* ok: [managed-node2] => { "changed": false, "cmd": [ "loginctl", "enable-linger", "podman_basic_user" ], "delta": null, "end": null, "rc": 0, "start": null } STDOUT: skipped, since /var/lib/systemd/linger/podman_basic_user exists MSG: Did not run command since '/var/lib/systemd/linger/podman_basic_user' exists TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:17:31 -0400 (0:00:00.373) 0:04:54.959 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:17:31 -0400 (0:00:00.060) 0:04:55.020 ******* skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_item_state | d('present') == 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the host mount volumes] *********** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:7 Wednesday 02 April 2025 12:17:31 -0400 (0:00:00.052) 0:04:55.073 ******* ok: [managed-node2] => { "ansible_facts": { "__podman_volumes": [ "/tmp/lsr_sr1vi4ai_podman/httpd1", "/tmp/lsr_sr1vi4ai_podman/httpd1-create" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:18 Wednesday 02 April 2025 12:17:31 -0400 (0:00:00.064) 0:04:55.137 ******* ok: [managed-node2] => (item=/tmp/lsr_sr1vi4ai_podman/httpd1) => { "ansible_loop_var": "item", "changed": false, "gid": 3001, "group": "podman_basic_user", "item": "/tmp/lsr_sr1vi4ai_podman/httpd1", "mode": "0755", "owner": "podman_basic_user", "path": "/tmp/lsr_sr1vi4ai_podman/httpd1", "secontext": "system_u:object_r:container_file_t:s0:c50,c773", "size": 23, "state": "directory", "uid": 3001 } ok: [managed-node2] => (item=/tmp/lsr_sr1vi4ai_podman/httpd1-create) => { "ansible_loop_var": "item", "changed": false, "gid": 3001, "group": "podman_basic_user", "item": "/tmp/lsr_sr1vi4ai_podman/httpd1-create", "mode": "0755", "owner": "podman_basic_user", "path": "/tmp/lsr_sr1vi4ai_podman/httpd1-create", "secontext": "system_u:object_r:container_file_t:s0:c50,c773", "size": 6, "state": "directory", "uid": 3001 } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_kube_spec.yml:29 Wednesday 02 April 2025 12:17:32 -0400 (0:00:00.874) 0:04:56.011 ******* failed: [managed-node2] (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } fatal: [managed-node2]: FAILED! => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Dump journal] ************************************************************ task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:447 Wednesday 02 April 2025 12:17:33 -0400 (0:00:00.819) 0:04:56.831 ******* fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.026172", "end": "2025-04-02 12:17:34.017025", "failed_when_result": true, "rc": 0, "start": "2025-04-02 12:17:33.990853" } STDOUT: -- Logs begin at Wed 2025-04-02 12:08:31 EDT, end at Wed 2025-04-02 12:17:33 EDT. -- Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com polkitd[926]: Loading rules from directory /usr/share/polkit-1/rules.d Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com polkitd[926]: Finished loading, compiling and executing 2 rules Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com dbus-daemon[595]: [system] Successfully activated service 'org.freedesktop.PolicyKit1' Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Started Authorization Manager. -- Subject: Unit polkit.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit polkit.service has finished starting up. -- -- The start-up result is done. Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com polkitd[926]: Acquired the name org.freedesktop.PolicyKit1 on the system bus Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: Cloud-init v. 23.4-7.el8.2 running 'init' at Wed, 02 Apr 2025 16:08:42 +0000. Up 11.41 seconds. Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: ci-info: ++++++++++++++++++++++++++++++++++++++Net device info+++++++++++++++++++++++++++++++++++++++ Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+ Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: ci-info: | Device | Up | Address | Mask | Scope | Hw-Address | Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+ Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: ci-info: | eth0 | True | 10.31.14.158 | 255.255.252.0 | global | 0a:ff:cf:72:2d:3f | Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: ci-info: | eth0 | True | fe80::8ff:cfff:fe72:2d3f/64 | . | link | 0a:ff:cf:72:2d:3f | Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: ci-info: | lo | True | 127.0.0.1 | 255.0.0.0 | host | . | Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: ci-info: | lo | True | ::1/128 | . | host | . | Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+ Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: ci-info: ++++++++++++++++++++++++++++Route IPv4 info+++++++++++++++++++++++++++++ Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: ci-info: | Route | Destination | Gateway | Genmask | Interface | Flags | Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: ci-info: | 0 | 0.0.0.0 | 10.31.12.1 | 0.0.0.0 | eth0 | UG | Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: ci-info: | 1 | 10.31.12.0 | 0.0.0.0 | 255.255.252.0 | eth0 | U | Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: ci-info: +++++++++++++++++++Route IPv6 info+++++++++++++++++++ Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: ci-info: +-------+-------------+---------+-----------+-------+ Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: ci-info: | Route | Destination | Gateway | Interface | Flags | Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: ci-info: +-------+-------------+---------+-----------+-------+ Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: ci-info: | 1 | fe80::/64 | :: | eth0 | U | Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: ci-info: | 3 | multicast | :: | eth0 | U | Apr 02 12:08:42 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: ci-info: +-------+-------------+---------+-----------+-------+ Apr 02 12:08:47 ip-10-31-14-158.us-east-1.aws.redhat.com chronyd[604]: Selected source 10.2.32.37 Apr 02 12:08:47 ip-10-31-14-158.us-east-1.aws.redhat.com chronyd[604]: System clock TAI offset set to 37 seconds Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: Generating public/private rsa key pair. Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: Your identification has been saved in /etc/ssh/ssh_host_rsa_key. Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: Your public key has been saved in /etc/ssh/ssh_host_rsa_key.pub. Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: The key fingerprint is: Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: SHA256:HxAG7LCvqDe9ifKLZCRriNTNB7PM1yPLH7iTWZAw7xE root@ip-10-31-14-158.us-east-1.aws.redhat.com Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: The key's randomart image is: Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: +---[RSA 3072]----+ Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: | ...o | Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: | .o.E . | Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: | *+ + | Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: | . * ==.. | Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: |.o . B.+So. | Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: |*. =.+o.. | Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: |++ o . ++.. | Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: |=.+.o. +o . | Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: |o=+oo. ... | Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: +----[SHA256]-----+ Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: Generating public/private dsa key pair. Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: Your identification has been saved in /etc/ssh/ssh_host_dsa_key. Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: Your public key has been saved in /etc/ssh/ssh_host_dsa_key.pub. Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: The key fingerprint is: Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: SHA256:KR9Tr/VkN3wmbaTXm0SZsQI7JnCPqUilXRWJirWXXSE root@ip-10-31-14-158.us-east-1.aws.redhat.com Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: The key's randomart image is: Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: +---[DSA 1024]----+ Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: | o ooEo... | Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: | +.+.+.+. =| Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: | oo.o++=.. =.| Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: | ...o.=oo. ++.| Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: | ...S o =+O| Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: | o o o +.*=| Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: | . . .o | Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: | | Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: +----[SHA256]-----+ Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: Generating public/private ecdsa key pair. Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: Your identification has been saved in /etc/ssh/ssh_host_ecdsa_key. Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: Your public key has been saved in /etc/ssh/ssh_host_ecdsa_key.pub. Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: The key fingerprint is: Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: SHA256:gQtGBkXuJwOQIBOvVRQj7R6K80IWybh3szncZN6CV3s root@ip-10-31-14-158.us-east-1.aws.redhat.com Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: The key's randomart image is: Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: +---[ECDSA 256]---+ Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: |Boo*X. | Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: |o+ *.. . | Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: |o =.+ . . | Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: |.* +o. . . | Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: |.o.o+.o S | Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: |+oo ++o . | Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: |o+ o X o . | Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: |. . * = o E | Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: | . o . . | Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: +----[SHA256]-----+ Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: Generating public/private ed25519 key pair. Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: Your identification has been saved in /etc/ssh/ssh_host_ed25519_key. Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: Your public key has been saved in /etc/ssh/ssh_host_ed25519_key.pub. Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: The key fingerprint is: Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: SHA256:cE7JbbZ/ktI40FKgc+ixgzplMds9uwbM0MEd/sdc0Tk root@ip-10-31-14-158.us-east-1.aws.redhat.com Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: The key's randomart image is: Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: +--[ED25519 256]--+ Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: | . .o. ...| Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: | o=.+ E.| Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: | o.*.B = . .| Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: | .B.@ * + . | Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: | =+= S + + | Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: | + +. = = . | Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: | o .. + = . | Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: | . .. o o | Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: | .. | Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[941]: +----[SHA256]-----+ Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Started Initial cloud-init job (metadata service crawler). -- Subject: Unit cloud-init.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit cloud-init.service has finished starting up. -- -- The start-up result is done. Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Starting OpenSSH server daemon... -- Subject: Unit sshd.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit sshd.service has begun starting up. Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Reached target Cloud-config availability. -- Subject: Unit cloud-config.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit cloud-config.target has finished starting up. -- -- The start-up result is done. Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Reached target Network is Online. -- Subject: Unit network-online.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit network-online.target has finished starting up. -- -- The start-up result is done. Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Starting System Logging Service... -- Subject: Unit rsyslog.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit rsyslog.service has begun starting up. Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Starting Crash recovery kernel arming... -- Subject: Unit kdump.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit kdump.service has begun starting up. Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Starting Apply the settings specified in cloud-config... -- Subject: Unit cloud-config.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit cloud-config.service has begun starting up. Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Starting Notify NFS peers of a restart... -- Subject: Unit rpc-statd-notify.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit rpc-statd-notify.service has begun starting up. Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Starting The restraint harness.... -- Subject: Unit restraintd.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit restraintd.service has begun starting up. Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com sshd[1019]: Server listening on 0.0.0.0 port 22. Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Started OpenSSH server daemon. -- Subject: Unit sshd.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit sshd.service has finished starting up. -- -- The start-up result is done. Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com sshd[1019]: Server listening on :: port 22. Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com sm-notify[1023]: Version 2.3.3 starting Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Started Notify NFS peers of a restart. -- Subject: Unit rpc-statd-notify.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit rpc-statd-notify.service has finished starting up. -- -- The start-up result is done. Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Started The restraint harness.. -- Subject: Unit restraintd.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit restraintd.service has finished starting up. -- -- The start-up result is done. Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com restraintd[1037]: Listening on http://localhost:8081 Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com rsyslogd[1020]: [origin software="rsyslogd" swVersion="8.2102.0-15.el8" x-pid="1020" x-info="https://www.rsyslog.com"] start Apr 02 12:08:49 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Started System Logging Service. -- Subject: Unit rsyslog.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit rsyslog.service has finished starting up. -- -- The start-up result is done. Apr 02 12:08:50 ip-10-31-14-158.us-east-1.aws.redhat.com rsyslogd[1020]: imjournal: journal files changed, reloading... [v8.2102.0-15.el8 try https://www.rsyslog.com/e/0 ] Apr 02 12:08:50 ip-10-31-14-158.us-east-1.aws.redhat.com kdumpctl[1027]: kdump: Detected change(s) in the following file(s): /etc/fstab Apr 02 12:08:50 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[1329]: Cloud-init v. 23.4-7.el8.2 running 'modules:config' at Wed, 02 Apr 2025 16:08:50 +0000. Up 19.38 seconds. Apr 02 12:08:50 ip-10-31-14-158.us-east-1.aws.redhat.com kdumpctl[1027]: kdump: Rebuilding /boot/initramfs-4.18.0-553.5.1.el8.x86_64kdump.img Apr 02 12:08:50 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Started Apply the settings specified in cloud-config. -- Subject: Unit cloud-config.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit cloud-config.service has finished starting up. -- -- The start-up result is done. Apr 02 12:08:50 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Starting Permit User Sessions... -- Subject: Unit systemd-user-sessions.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-user-sessions.service has begun starting up. Apr 02 12:08:50 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Starting Execute cloud user/final scripts... -- Subject: Unit cloud-final.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit cloud-final.service has begun starting up. Apr 02 12:08:50 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Started Permit User Sessions. -- Subject: Unit systemd-user-sessions.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-user-sessions.service has finished starting up. -- -- The start-up result is done. Apr 02 12:08:50 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Starting Hold until boot process finishes up... -- Subject: Unit plymouth-quit-wait.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit plymouth-quit-wait.service has begun starting up. Apr 02 12:08:50 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Started Command Scheduler. -- Subject: Unit crond.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit crond.service has finished starting up. -- -- The start-up result is done. Apr 02 12:08:50 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Starting Terminate Plymouth Boot Screen... -- Subject: Unit plymouth-quit.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit plymouth-quit.service has begun starting up. Apr 02 12:08:50 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Received SIGRTMIN+21 from PID 335 (plymouthd). Apr 02 12:08:50 ip-10-31-14-158.us-east-1.aws.redhat.com crond[1352]: (CRON) STARTUP (1.5.2) Apr 02 12:08:50 ip-10-31-14-158.us-east-1.aws.redhat.com crond[1352]: (CRON) INFO (Syslog will be used instead of sendmail.) Apr 02 12:08:50 ip-10-31-14-158.us-east-1.aws.redhat.com crond[1352]: (CRON) INFO (RANDOM_DELAY will be scaled with factor 39% if used.) Apr 02 12:08:50 ip-10-31-14-158.us-east-1.aws.redhat.com crond[1352]: (CRON) INFO (running with inotify support) Apr 02 12:08:50 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Started Terminate Plymouth Boot Screen. -- Subject: Unit plymouth-quit.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit plymouth-quit.service has finished starting up. -- -- The start-up result is done. Apr 02 12:08:50 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Received SIGRTMIN+21 from PID 335 (n/a). Apr 02 12:08:50 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Started Hold until boot process finishes up. -- Subject: Unit plymouth-quit-wait.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit plymouth-quit-wait.service has finished starting up. -- -- The start-up result is done. Apr 02 12:08:50 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Started Getty on tty1. -- Subject: Unit getty@tty1.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit getty@tty1.service has finished starting up. -- -- The start-up result is done. Apr 02 12:08:50 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Reached target Login Prompts. -- Subject: Unit getty.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit getty.target has finished starting up. -- -- The start-up result is done. Apr 02 12:08:50 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Reached target Multi-User System. -- Subject: Unit multi-user.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit multi-user.target has finished starting up. -- -- The start-up result is done. Apr 02 12:08:50 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Starting Update UTMP about System Runlevel Changes... -- Subject: Unit systemd-update-utmp-runlevel.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-update-utmp-runlevel.service has begun starting up. Apr 02 12:08:50 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: systemd-update-utmp-runlevel.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit systemd-update-utmp-runlevel.service has successfully entered the 'dead' state. Apr 02 12:08:50 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Started Update UTMP about System Runlevel Changes. -- Subject: Unit systemd-update-utmp-runlevel.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-update-utmp-runlevel.service has finished starting up. -- -- The start-up result is done. Apr 02 12:08:50 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1560]: dracut-049-233.git20240115.el8 Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: Executing: /usr/bin/dracut --add kdumpbase --quiet --hostonly --hostonly-cmdline --hostonly-i18n --hostonly-mode strict --hostonly-nics -o "plymouth dash resume ifcfg earlykdump" --compress=xz --mount "/dev/disk/by-uuid/fe591198-9082-4b15-9b62-e83518524cd2 /sysroot xfs rw,relatime,seclabel,attr2,inode64,logbufs=8,logbsize=32k,noquota,nofail,x-systemd.before=initrd-fs.target" --no-hostonly-default-device -f /boot/initramfs-4.18.0-553.5.1.el8.x86_64kdump.img 4.18.0-553.5.1.el8.x86_64 Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[1678]: ############################################################# Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[1686]: -----BEGIN SSH HOST KEY FINGERPRINTS----- Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[1693]: 1024 SHA256:KR9Tr/VkN3wmbaTXm0SZsQI7JnCPqUilXRWJirWXXSE root@ip-10-31-14-158.us-east-1.aws.redhat.com (DSA) Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[1698]: 256 SHA256:gQtGBkXuJwOQIBOvVRQj7R6K80IWybh3szncZN6CV3s root@ip-10-31-14-158.us-east-1.aws.redhat.com (ECDSA) Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[1709]: 256 SHA256:cE7JbbZ/ktI40FKgc+ixgzplMds9uwbM0MEd/sdc0Tk root@ip-10-31-14-158.us-east-1.aws.redhat.com (ED25519) Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[1717]: 3072 SHA256:HxAG7LCvqDe9ifKLZCRriNTNB7PM1yPLH7iTWZAw7xE root@ip-10-31-14-158.us-east-1.aws.redhat.com (RSA) Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[1723]: -----END SSH HOST KEY FINGERPRINTS----- Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[1725]: ############################################################# Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[1604]: Cloud-init v. 23.4-7.el8.2 running 'modules:final' at Wed, 02 Apr 2025 16:08:51 +0000. Up 20.02 seconds. Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com cloud-init[1604]: Cloud-init v. 23.4-7.el8.2 finished at Wed, 02 Apr 2025 16:08:51 +0000. Datasource DataSourceEc2Local. Up 20.19 seconds Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'busybox' will not be installed, because command 'busybox' could not be found! Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Started Execute cloud user/final scripts. -- Subject: Unit cloud-final.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit cloud-final.service has finished starting up. -- -- The start-up result is done. Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Reached target Cloud-init target. -- Subject: Unit cloud-init.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit cloud-init.target has finished starting up. -- -- The start-up result is done. Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'ifcfg' will not be installed, because it's in the list to be omitted! Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'plymouth' will not be installed, because it's in the list to be omitted! Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'lvmmerge' will not be installed, because command 'lvm' could not be found! Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'lvmthinpool-monitor' will not be installed, because command 'lvm' could not be found! Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'btrfs' will not be installed, because command 'btrfs' could not be found! Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'dmraid' will not be installed, because command 'dmraid' could not be found! Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'lvm' will not be installed, because command 'lvm' could not be found! Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'mdraid' will not be installed, because command 'mdadm' could not be found! Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'iscsi' will not be installed, because command 'iscsid' could not be found! Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'nvmf' will not be installed, because command 'nvme' could not be found! Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'resume' will not be installed, because it's in the list to be omitted! Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'earlykdump' will not be installed, because it's in the list to be omitted! Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: memstrack is available Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'busybox' will not be installed, because command 'busybox' could not be found! Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'lvmmerge' will not be installed, because command 'lvm' could not be found! Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'lvmthinpool-monitor' will not be installed, because command 'lvm' could not be found! Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'btrfs' will not be installed, because command 'btrfs' could not be found! Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'dmraid' will not be installed, because command 'dmraid' could not be found! Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'lvm' will not be installed, because command 'lvm' could not be found! Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'mdraid' will not be installed, because command 'mdadm' could not be found! Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'iscsi' will not be installed, because command 'iscsid' could not be found! Apr 02 12:08:51 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: dracut module 'nvmf' will not be installed, because command 'nvme' could not be found! Apr 02 12:08:52 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Including module: bash *** Apr 02 12:08:52 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Including module: systemd *** Apr 02 12:08:52 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Including module: systemd-initrd *** Apr 02 12:08:52 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Including module: watchdog-modules *** Apr 02 12:08:52 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: NetworkManager-dispatcher.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Apr 02 12:08:52 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Including module: nss-softokn *** Apr 02 12:08:52 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Including module: rngd *** Apr 02 12:08:52 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Including module: i18n *** Apr 02 12:08:52 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Including module: drm *** Apr 02 12:08:52 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Including module: prefixdevname *** Apr 02 12:08:52 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Including module: kernel-modules *** Apr 02 12:08:52 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Including module: kernel-modules-extra *** Apr 02 12:08:52 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: kernel-modules-extra: configuration source "/run/depmod.d/" does not exist Apr 02 12:08:52 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: kernel-modules-extra: configuration source "/lib/depmod.d/" does not exist Apr 02 12:08:52 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: kernel-modules-extra: parsing configuration file "/etc/depmod.d//dist.conf" Apr 02 12:08:52 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: kernel-modules-extra: /etc/depmod.d//dist.conf: added "updates extra built-in weak-updates" to the list of search directories Apr 02 12:08:52 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Including module: fstab-sys *** Apr 02 12:08:52 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Including module: rootfs-block *** Apr 02 12:08:52 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Including module: terminfo *** Apr 02 12:08:52 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Including module: udev-rules *** Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: Skipping udev rule: 91-permissions.rules Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: Skipping udev rule: 80-drivers-modprobe.rules Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Including module: biosdevname *** Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Including module: dracut-systemd *** Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Including module: usrmount *** Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Including module: base *** Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Including module: fs-lib *** Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Including module: kdumpbase *** Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Including module: memstrack *** Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Including module: microcode_ctl-fw_dir_override *** Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: microcode_ctl module: mangling fw_dir Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: microcode_ctl: reset fw_dir to "/lib/firmware/updates /lib/firmware" Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel"... Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: microcode_ctl: intel: caveats check for kernel version "4.18.0-553.5.1.el8.x86_64" passed, adding "/usr/share/microcode_ctl/ucode_with_caveats/intel" to fw_dir variable Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-2d-07"... Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: microcode_ctl: configuration "intel-06-2d-07" is ignored Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4e-03"... Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: microcode_ctl: configuration "intel-06-4e-03" is ignored Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4f-01"... Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: microcode_ctl: configuration "intel-06-4f-01" is ignored Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-55-04"... Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: microcode_ctl: configuration "intel-06-55-04" is ignored Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-5e-03"... Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: microcode_ctl: configuration "intel-06-5e-03" is ignored Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8c-01"... Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: microcode_ctl: configuration "intel-06-8c-01" is ignored Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8e-9e-0x-0xca"... Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: microcode_ctl: configuration "intel-06-8e-9e-0x-0xca" is ignored Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8e-9e-0x-dell"... Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: microcode_ctl: configuration "intel-06-8e-9e-0x-dell" is ignored Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: microcode_ctl: final fw_dir: "/usr/share/microcode_ctl/ucode_with_caveats/intel /lib/firmware/updates /lib/firmware" Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Including module: shutdown *** Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Including module: squash *** Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Including modules done *** Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Installing kernel module dependencies *** Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Installing kernel module dependencies done *** Apr 02 12:08:53 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Resolving executable dependencies *** Apr 02 12:08:54 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Resolving executable dependencies done*** Apr 02 12:08:54 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Hardlinking files *** Apr 02 12:08:54 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Hardlinking files done *** Apr 02 12:08:54 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Generating early-microcode cpio image *** Apr 02 12:08:54 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Constructing GenuineIntel.bin *** Apr 02 12:08:54 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Constructing GenuineIntel.bin *** Apr 02 12:08:54 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Store current command line parameters *** Apr 02 12:08:54 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: Stored kernel commandline: Apr 02 12:08:54 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: No dracut internal kernel commandline stored in the initramfs Apr 02 12:08:54 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Install squash loader *** Apr 02 12:08:55 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Stripping files *** Apr 02 12:08:55 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Stripping files done *** Apr 02 12:08:55 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Squashing the files inside the initramfs *** Apr 02 12:09:04 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Squashing the files inside the initramfs done *** Apr 02 12:09:04 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Creating image file '/boot/initramfs-4.18.0-553.5.1.el8.x86_64kdump.img' *** Apr 02 12:09:04 ip-10-31-14-158.us-east-1.aws.redhat.com dracut[1562]: *** Creating initramfs image file '/boot/initramfs-4.18.0-553.5.1.el8.x86_64kdump.img' done *** Apr 02 12:09:04 ip-10-31-14-158.us-east-1.aws.redhat.com kdumpctl[1027]: kdump: kexec: loaded kdump kernel Apr 02 12:09:04 ip-10-31-14-158.us-east-1.aws.redhat.com kdumpctl[1027]: kdump: Starting kdump: [OK] Apr 02 12:09:04 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Started Crash recovery kernel arming. -- Subject: Unit kdump.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit kdump.service has finished starting up. -- -- The start-up result is done. Apr 02 12:09:04 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Startup finished in 517ms (kernel) + 3.330s (initrd) + 29.749s (userspace) = 33.596s. -- Subject: System start-up is now complete -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- All system services necessary queued for starting at boot have been -- started. Note that this does not mean that the machine is now idle as services -- might still be busy with completing start-up. -- -- Kernel start-up required 517513 microseconds. -- -- Initial RAM disk start-up required 3330221 microseconds. -- -- Userspace start-up required 29749253 microseconds. Apr 02 12:09:11 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: systemd-hostnamed.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit systemd-hostnamed.service has successfully entered the 'dead' state. Apr 02 12:09:53 ip-10-31-14-158.us-east-1.aws.redhat.com chronyd[604]: Selected source 99.28.14.242 (2.centos.pool.ntp.org) Apr 02 12:11:24 ip-10-31-14-158.us-east-1.aws.redhat.com sshd[5067]: Accepted publickey for root from 10.30.34.132 port 33928 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Apr 02 12:11:24 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Created slice User Slice of UID 0. -- Subject: Unit user-0.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-0.slice has finished starting up. -- -- The start-up result is done. Apr 02 12:11:24 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Starting User runtime directory /run/user/0... -- Subject: Unit user-runtime-dir@0.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@0.service has begun starting up. Apr 02 12:11:24 ip-10-31-14-158.us-east-1.aws.redhat.com systemd-logind[597]: New session 1 of user root. -- Subject: A new session 1 has been created for user root -- Defined-By: systemd -- Support: https://access.redhat.com/support -- Documentation: https://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A new session with the ID 1 has been created for the user root. -- -- The leading process of the session is 5067. Apr 02 12:11:24 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Started User runtime directory /run/user/0. -- Subject: Unit user-runtime-dir@0.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@0.service has finished starting up. -- -- The start-up result is done. Apr 02 12:11:24 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Starting User Manager for UID 0... -- Subject: Unit user@0.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@0.service has begun starting up. Apr 02 12:11:24 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[5072]: pam_unix(systemd-user:session): session opened for user root by (uid=0) Apr 02 12:11:24 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[5072]: Starting D-Bus User Message Bus Socket. -- Subject: Unit UNIT has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun starting up. Apr 02 12:11:24 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[5072]: Reached target Paths. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:11:24 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[5072]: Reached target Timers. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:11:24 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[5072]: Listening on D-Bus User Message Bus Socket. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:11:24 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[5072]: Reached target Sockets. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:11:24 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[5072]: Reached target Basic System. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:11:24 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[5072]: Reached target Default. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:11:24 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[5072]: Startup finished in 30ms. -- Subject: User manager start-up is now complete -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The user manager instance for user 0 has been started. All services queued -- for starting have been started. Note that other services might still be starting -- up or be started at any later time. -- -- Startup of the manager took 30083 microseconds. Apr 02 12:11:24 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Started User Manager for UID 0. -- Subject: Unit user@0.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@0.service has finished starting up. -- -- The start-up result is done. Apr 02 12:11:24 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Started Session 1 of user root. -- Subject: Unit session-1.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit session-1.scope has finished starting up. -- -- The start-up result is done. Apr 02 12:11:24 ip-10-31-14-158.us-east-1.aws.redhat.com sshd[5067]: pam_unix(sshd:session): session opened for user root by (uid=0) Apr 02 12:11:24 ip-10-31-14-158.us-east-1.aws.redhat.com sshd[5081]: Received disconnect from 10.30.34.132 port 33928:11: disconnected by user Apr 02 12:11:24 ip-10-31-14-158.us-east-1.aws.redhat.com sshd[5081]: Disconnected from user root 10.30.34.132 port 33928 Apr 02 12:11:24 ip-10-31-14-158.us-east-1.aws.redhat.com sshd[5067]: pam_unix(sshd:session): session closed for user root Apr 02 12:11:24 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: session-1.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit session-1.scope has successfully entered the 'dead' state. Apr 02 12:11:24 ip-10-31-14-158.us-east-1.aws.redhat.com systemd-logind[597]: Session 1 logged out. Waiting for processes to exit. Apr 02 12:11:24 ip-10-31-14-158.us-east-1.aws.redhat.com systemd-logind[597]: Removed session 1. -- Subject: Session 1 has been terminated -- Defined-By: systemd -- Support: https://access.redhat.com/support -- Documentation: https://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A session with the ID 1 has been terminated. Apr 02 12:11:31 ip-10-31-14-158.us-east-1.aws.redhat.com sshd[5101]: Accepted publickey for root from 10.31.10.215 port 38612 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Apr 02 12:11:31 ip-10-31-14-158.us-east-1.aws.redhat.com sshd[5102]: Accepted publickey for root from 10.31.10.215 port 38626 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Apr 02 12:11:31 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Started Session 4 of user root. -- Subject: Unit session-4.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit session-4.scope has finished starting up. -- -- The start-up result is done. Apr 02 12:11:31 ip-10-31-14-158.us-east-1.aws.redhat.com systemd-logind[597]: New session 4 of user root. -- Subject: A new session 4 has been created for user root -- Defined-By: systemd -- Support: https://access.redhat.com/support -- Documentation: https://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A new session with the ID 4 has been created for the user root. -- -- The leading process of the session is 5101. Apr 02 12:11:31 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Started Session 3 of user root. -- Subject: Unit session-3.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit session-3.scope has finished starting up. -- -- The start-up result is done. Apr 02 12:11:31 ip-10-31-14-158.us-east-1.aws.redhat.com systemd-logind[597]: New session 3 of user root. -- Subject: A new session 3 has been created for user root -- Defined-By: systemd -- Support: https://access.redhat.com/support -- Documentation: https://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A new session with the ID 3 has been created for the user root. -- -- The leading process of the session is 5102. Apr 02 12:11:31 ip-10-31-14-158.us-east-1.aws.redhat.com sshd[5101]: pam_unix(sshd:session): session opened for user root by (uid=0) Apr 02 12:11:31 ip-10-31-14-158.us-east-1.aws.redhat.com sshd[5102]: pam_unix(sshd:session): session opened for user root by (uid=0) Apr 02 12:11:31 ip-10-31-14-158.us-east-1.aws.redhat.com sshd[5108]: Received disconnect from 10.31.10.215 port 38626:11: disconnected by user Apr 02 12:11:31 ip-10-31-14-158.us-east-1.aws.redhat.com sshd[5108]: Disconnected from user root 10.31.10.215 port 38626 Apr 02 12:11:31 ip-10-31-14-158.us-east-1.aws.redhat.com sshd[5102]: pam_unix(sshd:session): session closed for user root Apr 02 12:11:31 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: session-3.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit session-3.scope has successfully entered the 'dead' state. Apr 02 12:11:31 ip-10-31-14-158.us-east-1.aws.redhat.com systemd-logind[597]: Session 3 logged out. Waiting for processes to exit. Apr 02 12:11:31 ip-10-31-14-158.us-east-1.aws.redhat.com systemd-logind[597]: Removed session 3. -- Subject: Session 3 has been terminated -- Defined-By: systemd -- Support: https://access.redhat.com/support -- Documentation: https://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A session with the ID 3 has been terminated. Apr 02 12:11:49 ip-10-31-14-158.us-east-1.aws.redhat.com unknown: Running test '/Prepare-managed-node/tests/prep_managed_node' (serial number 1) with reboot count 0 and test restart count 0. (Be aware the test name is sanitized!) Apr 02 12:11:50 ip-10-31-14-158.us-east-1.aws.redhat.com dbus-daemon[595]: [system] Activating via systemd: service name='org.freedesktop.hostname1' unit='dbus-org.freedesktop.hostname1.service' requested by ':1.24' (uid=0 pid=6609 comm="hostnamectl set-hostname managed-node2 " label="unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023") Apr 02 12:11:50 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Starting Hostname Service... -- Subject: Unit systemd-hostnamed.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-hostnamed.service has begun starting up. Apr 02 12:11:50 ip-10-31-14-158.us-east-1.aws.redhat.com dbus-daemon[595]: [system] Successfully activated service 'org.freedesktop.hostname1' Apr 02 12:11:50 ip-10-31-14-158.us-east-1.aws.redhat.com systemd[1]: Started Hostname Service. -- Subject: Unit systemd-hostnamed.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-hostnamed.service has finished starting up. -- -- The start-up result is done. Apr 02 12:11:50 managed-node2 systemd-hostnamed[6610]: Changed static host name to 'managed-node2' Apr 02 12:11:50 managed-node2 systemd-hostnamed[6610]: Changed host name to 'managed-node2' Apr 02 12:11:50 managed-node2 NetworkManager[661]: [1743610310.2967] hostname: static hostname changed from "ip-10-31-14-158.us-east-1.aws.redhat.com" to "managed-node2" Apr 02 12:11:50 managed-node2 dbus-daemon[595]: [system] Activating via systemd: service name='org.freedesktop.nm_dispatcher' unit='dbus-org.freedesktop.nm-dispatcher.service' requested by ':1.5' (uid=0 pid=661 comm="/usr/sbin/NetworkManager --no-daemon " label="system_u:system_r:NetworkManager_t:s0") Apr 02 12:11:50 managed-node2 systemd[1]: Starting Network Manager Script Dispatcher Service... -- Subject: Unit NetworkManager-dispatcher.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has begun starting up. Apr 02 12:11:50 managed-node2 NetworkManager[661]: [1743610310.3078] policy: set-hostname: set hostname to 'managed-node2' (from system configuration) Apr 02 12:11:50 managed-node2 dbus-daemon[595]: [system] Successfully activated service 'org.freedesktop.nm_dispatcher' Apr 02 12:11:50 managed-node2 systemd[1]: Started Network Manager Script Dispatcher Service. -- Subject: Unit NetworkManager-dispatcher.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has finished starting up. -- -- The start-up result is done. Apr 02 12:11:50 managed-node2 unknown: Leaving test '/Prepare-managed-node/tests/prep_managed_node' (serial number 1). (Be aware the test name is sanitized!) Apr 02 12:12:00 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Apr 02 12:12:20 managed-node2 systemd[1]: systemd-hostnamed.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit systemd-hostnamed.service has successfully entered the 'dead' state. Apr 02 12:12:37 managed-node2 sshd[7157]: Accepted publickey for root from 10.31.47.20 port 45146 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Apr 02 12:12:37 managed-node2 systemd[1]: Started Session 5 of user root. -- Subject: Unit session-5.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit session-5.scope has finished starting up. -- -- The start-up result is done. Apr 02 12:12:37 managed-node2 systemd-logind[597]: New session 5 of user root. -- Subject: A new session 5 has been created for user root -- Defined-By: systemd -- Support: https://access.redhat.com/support -- Documentation: https://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A new session with the ID 5 has been created for the user root. -- -- The leading process of the session is 7157. Apr 02 12:12:37 managed-node2 sshd[7157]: pam_unix(sshd:session): session opened for user root by (uid=0) Apr 02 12:12:37 managed-node2 platform-python[7302]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Apr 02 12:12:38 managed-node2 platform-python[7454]: ansible-tempfile Invoked with state=directory prefix=lsr_ suffix=_podman path=None Apr 02 12:12:39 managed-node2 platform-python[7577]: ansible-file Invoked with path=/tmp/lsr_sr1vi4ai_podman mode=0777 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False state=None _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:12:39 managed-node2 platform-python[7700]: ansible-user Invoked with name=podman_basic_user uid=3001 state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node2 update_password=always group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Apr 02 12:12:39 managed-node2 useradd[7705]: new group: name=podman_basic_user, GID=3001 Apr 02 12:12:39 managed-node2 useradd[7705]: new user: name=podman_basic_user, UID=3001, GID=3001, home=/home/podman_basic_user, shell=/bin/bash Apr 02 12:12:41 managed-node2 platform-python[7833]: ansible-file Invoked with path=/tmp/lsr_sr1vi4ai_podman/httpd1 state=directory mode=0755 owner=podman_basic_user recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:12:41 managed-node2 platform-python[7956]: ansible-file Invoked with path=/tmp/lsr_sr1vi4ai_podman/httpd2 state=directory mode=0755 owner=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:12:41 managed-node2 platform-python[8079]: ansible-file Invoked with path=/tmp/lsr_sr1vi4ai_podman/httpd3 state=directory mode=0755 owner=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:12:42 managed-node2 platform-python[8202]: ansible-ansible.legacy.stat Invoked with path=/tmp/lsr_sr1vi4ai_podman/httpd1/index.txt follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Apr 02 12:12:42 managed-node2 platform-python[8301]: ansible-ansible.legacy.copy Invoked with dest=/tmp/lsr_sr1vi4ai_podman/httpd1/index.txt mode=0644 owner=podman_basic_user src=/root/.ansible/tmp/ansible-tmp-1743610361.8035398-8233-134635055705623/source _original_basename=tmpclxsiq5y follow=False checksum=40bd001563085fc35165329ea1ff5c5ecbdbbeef backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:12:42 managed-node2 platform-python[8426]: ansible-ansible.legacy.stat Invoked with path=/tmp/lsr_sr1vi4ai_podman/httpd2/index.txt follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Apr 02 12:12:42 managed-node2 platform-python[8525]: ansible-ansible.legacy.copy Invoked with dest=/tmp/lsr_sr1vi4ai_podman/httpd2/index.txt mode=0644 owner=root src=/root/.ansible/tmp/ansible-tmp-1743610362.4155128-8233-19129380191169/source _original_basename=tmpkjcj6qlv follow=False checksum=40bd001563085fc35165329ea1ff5c5ecbdbbeef backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:12:43 managed-node2 platform-python[8650]: ansible-ansible.legacy.stat Invoked with path=/tmp/lsr_sr1vi4ai_podman/httpd3/index.txt follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Apr 02 12:12:43 managed-node2 platform-python[8749]: ansible-ansible.legacy.copy Invoked with dest=/tmp/lsr_sr1vi4ai_podman/httpd3/index.txt mode=0644 owner=root src=/root/.ansible/tmp/ansible-tmp-1743610363.036985-8233-254696807528367/source _original_basename=tmpjlk11x6b follow=False checksum=40bd001563085fc35165329ea1ff5c5ecbdbbeef backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:12:44 managed-node2 platform-python[8874]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:12:44 managed-node2 platform-python[8997]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:12:46 managed-node2 sudo[9245]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bdimigrgvgrzkixobxjyhkmuknuxqlgh ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1743610366.2679946-8327-211561726562352/AnsiballZ_dnf.py' Apr 02 12:12:46 managed-node2 sudo[9245]: pam_unix(sudo:session): session opened for user root by root(uid=0) Apr 02 12:12:46 managed-node2 platform-python[9248]: ansible-ansible.legacy.dnf Invoked with name=['crun', 'podman', 'podman-plugins', 'shadow-utils-subid'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Apr 02 12:13:04 managed-node2 dbus-daemon[595]: [system] Reloaded configuration Apr 02 12:13:05 managed-node2 setsebool[9320]: The virt_use_nfs policy boolean was changed to 1 by root Apr 02 12:13:05 managed-node2 setsebool[9320]: The virt_sandbox_use_all_caps policy boolean was changed to 1 by root Apr 02 12:13:21 managed-node2 kernel: SELinux: Converting 367 SID table entries... Apr 02 12:13:21 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Apr 02 12:13:21 managed-node2 kernel: SELinux: policy capability open_perms=1 Apr 02 12:13:21 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Apr 02 12:13:21 managed-node2 kernel: SELinux: policy capability always_check_network=0 Apr 02 12:13:21 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Apr 02 12:13:21 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Apr 02 12:13:21 managed-node2 dbus-daemon[595]: [system] Reloaded configuration Apr 02 12:13:21 managed-node2 kernel: fuse: init (API version 7.34) Apr 02 12:13:21 managed-node2 systemd[1]: Mounting FUSE Control File System... -- Subject: Unit sys-fs-fuse-connections.mount has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit sys-fs-fuse-connections.mount has begun starting up. Apr 02 12:13:21 managed-node2 systemd[1]: Mounted FUSE Control File System. -- Subject: Unit sys-fs-fuse-connections.mount has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit sys-fs-fuse-connections.mount has finished starting up. -- -- The start-up result is done. Apr 02 12:13:22 managed-node2 dbus-daemon[595]: [system] Reloaded configuration Apr 02 12:13:22 managed-node2 dbus-daemon[595]: [system] Reloaded configuration Apr 02 12:13:47 managed-node2 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. -- Subject: Unit run-ra0daa01aab37454285ea701b63944af1.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit run-ra0daa01aab37454285ea701b63944af1.service has finished starting up. -- -- The start-up result is done. Apr 02 12:13:47 managed-node2 systemd[1]: cgroup compatibility translation between legacy and unified hierarchy settings activated. See cgroup-compat debug messages for details. Apr 02 12:13:47 managed-node2 systemd[1]: Starting man-db-cache-update.service... -- Subject: Unit man-db-cache-update.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has begun starting up. Apr 02 12:13:47 managed-node2 systemd[1]: Reloading. Apr 02 12:13:48 managed-node2 sudo[9245]: pam_unix(sudo:session): session closed for user root Apr 02 12:13:48 managed-node2 systemd[1]: man-db-cache-update.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit man-db-cache-update.service has successfully entered the 'dead' state. Apr 02 12:13:48 managed-node2 systemd[1]: Started man-db-cache-update.service. -- Subject: Unit man-db-cache-update.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has finished starting up. -- -- The start-up result is done. Apr 02 12:13:48 managed-node2 systemd[1]: run-ra0daa01aab37454285ea701b63944af1.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-ra0daa01aab37454285ea701b63944af1.service has successfully entered the 'dead' state. Apr 02 12:13:49 managed-node2 platform-python[11800]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:13:49 managed-node2 platform-python[11929]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Apr 02 12:13:50 managed-node2 platform-python[12053]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:13:51 managed-node2 platform-python[12178]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:13:52 managed-node2 platform-python[12301]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:13:52 managed-node2 platform-python[12424]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Apr 02 12:13:56 managed-node2 platform-python[12548]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Apr 02 12:13:56 managed-node2 platform-python[12675]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Apr 02 12:13:56 managed-node2 systemd[1]: Reloading. Apr 02 12:13:56 managed-node2 systemd[1]: Starting firewalld - dynamic firewall daemon... -- Subject: Unit firewalld.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit firewalld.service has begun starting up. Apr 02 12:13:57 managed-node2 systemd[1]: Started firewalld - dynamic firewall daemon. -- Subject: Unit firewalld.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit firewalld.service has finished starting up. -- -- The start-up result is done. Apr 02 12:13:57 managed-node2 firewalld[12711]: WARNING: AllowZoneDrifting is enabled. This is considered an insecure configuration option. It will be removed in a future release. Please consider disabling it now. Apr 02 12:13:57 managed-node2 platform-python[12849]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Apr 02 12:13:59 managed-node2 platform-python[13023]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:14:00 managed-node2 platform-python[13146]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:14:00 managed-node2 platform-python[13269]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Apr 02 12:14:03 managed-node2 platform-python[13393]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Apr 02 12:14:05 managed-node2 dbus-daemon[595]: [system] Reloaded configuration Apr 02 12:14:05 managed-node2 dbus-daemon[595]: [system] Reloaded configuration Apr 02 12:14:05 managed-node2 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. -- Subject: Unit run-refc7be89611b494bbf88e25168b03890.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit run-refc7be89611b494bbf88e25168b03890.service has finished starting up. -- -- The start-up result is done. Apr 02 12:14:06 managed-node2 systemd[1]: Starting man-db-cache-update.service... -- Subject: Unit man-db-cache-update.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has begun starting up. Apr 02 12:14:06 managed-node2 systemd[1]: man-db-cache-update.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit man-db-cache-update.service has successfully entered the 'dead' state. Apr 02 12:14:06 managed-node2 systemd[1]: Started man-db-cache-update.service. -- Subject: Unit man-db-cache-update.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has finished starting up. -- -- The start-up result is done. Apr 02 12:14:06 managed-node2 systemd[1]: run-refc7be89611b494bbf88e25168b03890.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-refc7be89611b494bbf88e25168b03890.service has successfully entered the 'dead' state. Apr 02 12:14:07 managed-node2 platform-python[14000]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Apr 02 12:14:08 managed-node2 platform-python[14152]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Apr 02 12:14:09 managed-node2 kernel: SELinux: Converting 460 SID table entries... Apr 02 12:14:09 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Apr 02 12:14:09 managed-node2 kernel: SELinux: policy capability open_perms=1 Apr 02 12:14:09 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Apr 02 12:14:09 managed-node2 kernel: SELinux: policy capability always_check_network=0 Apr 02 12:14:09 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Apr 02 12:14:09 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Apr 02 12:14:09 managed-node2 dbus-daemon[595]: [system] Reloaded configuration Apr 02 12:14:10 managed-node2 platform-python[14279]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Apr 02 12:14:14 managed-node2 platform-python[14402]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:14:16 managed-node2 platform-python[14527]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:14:16 managed-node2 platform-python[14650]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:14:16 managed-node2 platform-python[14773]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Apr 02 12:14:17 managed-node2 platform-python[14872]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/nopull.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1743610456.6449945-9359-205024044517212/source _original_basename=tmp4v8idla_ follow=False checksum=d5dc917e3cae36de03aa971a17ac473f86fdf934 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:14:17 managed-node2 platform-python[14997]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Apr 02 12:14:18 managed-node2 kernel: evm: overlay not supported Apr 02 12:14:18 managed-node2 systemd[1]: Created slice machine.slice. -- Subject: Unit machine.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine.slice has finished starting up. -- -- The start-up result is done. Apr 02 12:14:18 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_2ece7bcadf2eddade61e07eca32f44abb4ac4beb0eae18d4bfd24840a8730932.slice. -- Subject: Unit machine-libpod_pod_2ece7bcadf2eddade61e07eca32f44abb4ac4beb0eae18d4bfd24840a8730932.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_2ece7bcadf2eddade61e07eca32f44abb4ac4beb0eae18d4bfd24840a8730932.slice has finished starting up. -- -- The start-up result is done. Apr 02 12:14:19 managed-node2 systemd[1]: var-lib-containers-storage-overlay-2eaca8d5d5cfedb57c5baf21928842ddc832deade0b4b24efc999aebcbc9acf4-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-2eaca8d5d5cfedb57c5baf21928842ddc832deade0b4b24efc999aebcbc9acf4-merged.mount has successfully entered the 'dead' state. Apr 02 12:14:20 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:14:23 managed-node2 platform-python[15323]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:14:25 managed-node2 platform-python[15452]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:14:27 managed-node2 platform-python[15577]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Apr 02 12:14:30 managed-node2 platform-python[15701]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Apr 02 12:14:31 managed-node2 platform-python[15828]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Apr 02 12:14:32 managed-node2 platform-python[15955]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Apr 02 12:14:34 managed-node2 platform-python[16078]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Apr 02 12:14:37 managed-node2 platform-python[16202]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Apr 02 12:14:40 managed-node2 platform-python[16326]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Apr 02 12:14:42 managed-node2 platform-python[16478]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Apr 02 12:14:42 managed-node2 platform-python[16601]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Apr 02 12:14:47 managed-node2 platform-python[16724]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:14:49 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:14:49 managed-node2 platform-python[16986]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:14:49 managed-node2 platform-python[17109]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:14:50 managed-node2 platform-python[17232]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Apr 02 12:14:50 managed-node2 platform-python[17331]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/bogus.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1743610490.0738642-10334-156459945632354/source _original_basename=tmps2n5m3ra follow=False checksum=f8266a972ed3be7e204d2a67883fe3a22b8dbf18 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:14:51 managed-node2 platform-python[17456]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Apr 02 12:14:51 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_29b9fe3cb454877533169680e06b10334d735efd7dd3a9e0c03db08bdbb51191.slice. -- Subject: Unit machine-libpod_pod_29b9fe3cb454877533169680e06b10334d735efd7dd3a9e0c03db08bdbb51191.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_29b9fe3cb454877533169680e06b10334d735efd7dd3a9e0c03db08bdbb51191.slice has finished starting up. -- -- The start-up result is done. Apr 02 12:14:53 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:14:56 managed-node2 platform-python[17743]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:14:57 managed-node2 platform-python[17872]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:15:00 managed-node2 platform-python[17997]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Apr 02 12:15:03 managed-node2 platform-python[18121]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Apr 02 12:15:03 managed-node2 platform-python[18248]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Apr 02 12:15:04 managed-node2 platform-python[18375]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Apr 02 12:15:06 managed-node2 platform-python[18498]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Apr 02 12:15:09 managed-node2 platform-python[18622]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Apr 02 12:15:12 managed-node2 platform-python[18746]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Apr 02 12:15:14 managed-node2 platform-python[18898]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Apr 02 12:15:14 managed-node2 platform-python[19021]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Apr 02 12:15:19 managed-node2 platform-python[19144]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:15:20 managed-node2 platform-python[19269]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/nopull.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:15:21 managed-node2 platform-python[19393]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-nopull.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Apr 02 12:15:22 managed-node2 platform-python[19520]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:15:22 managed-node2 platform-python[19645]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Apr 02 12:15:22 managed-node2 platform-python[19645]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/nopull.yml Apr 02 12:15:22 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_2ece7bcadf2eddade61e07eca32f44abb4ac4beb0eae18d4bfd24840a8730932.slice. -- Subject: Unit machine-libpod_pod_2ece7bcadf2eddade61e07eca32f44abb4ac4beb0eae18d4bfd24840a8730932.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_2ece7bcadf2eddade61e07eca32f44abb4ac4beb0eae18d4bfd24840a8730932.slice has finished shutting down. Apr 02 12:15:22 managed-node2 systemd[1]: machine-libpod_pod_2ece7bcadf2eddade61e07eca32f44abb4ac4beb0eae18d4bfd24840a8730932.slice: Consumed 0 CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_2ece7bcadf2eddade61e07eca32f44abb4ac4beb0eae18d4bfd24840a8730932.slice completed and consumed the indicated resources. Apr 02 12:15:22 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:15:23 managed-node2 platform-python[19783]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:15:23 managed-node2 platform-python[19906]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:15:26 managed-node2 platform-python[20161]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:15:28 managed-node2 platform-python[20290]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:15:30 managed-node2 platform-python[20415]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Apr 02 12:15:34 managed-node2 platform-python[20539]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Apr 02 12:15:34 managed-node2 platform-python[20666]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Apr 02 12:15:35 managed-node2 platform-python[20793]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Apr 02 12:15:37 managed-node2 platform-python[20916]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Apr 02 12:15:40 managed-node2 platform-python[21040]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Apr 02 12:15:43 managed-node2 platform-python[21164]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Apr 02 12:15:45 managed-node2 platform-python[21316]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Apr 02 12:15:46 managed-node2 platform-python[21439]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Apr 02 12:15:50 managed-node2 platform-python[21562]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:15:52 managed-node2 platform-python[21687]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/bogus.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:15:52 managed-node2 platform-python[21811]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-bogus.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Apr 02 12:15:53 managed-node2 platform-python[21938]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:15:53 managed-node2 platform-python[22063]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Apr 02 12:15:53 managed-node2 platform-python[22063]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/bogus.yml Apr 02 12:15:53 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_29b9fe3cb454877533169680e06b10334d735efd7dd3a9e0c03db08bdbb51191.slice. -- Subject: Unit machine-libpod_pod_29b9fe3cb454877533169680e06b10334d735efd7dd3a9e0c03db08bdbb51191.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_29b9fe3cb454877533169680e06b10334d735efd7dd3a9e0c03db08bdbb51191.slice has finished shutting down. Apr 02 12:15:53 managed-node2 systemd[1]: machine-libpod_pod_29b9fe3cb454877533169680e06b10334d735efd7dd3a9e0c03db08bdbb51191.slice: Consumed 0 CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_29b9fe3cb454877533169680e06b10334d735efd7dd3a9e0c03db08bdbb51191.slice completed and consumed the indicated resources. Apr 02 12:15:53 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:15:54 managed-node2 platform-python[22202]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:15:54 managed-node2 platform-python[22325]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:15:58 managed-node2 platform-python[22580]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:15:59 managed-node2 platform-python[22709]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:16:02 managed-node2 platform-python[22834]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Apr 02 12:16:05 managed-node2 platform-python[22958]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Apr 02 12:16:06 managed-node2 platform-python[23085]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Apr 02 12:16:06 managed-node2 platform-python[23212]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Apr 02 12:16:09 managed-node2 platform-python[23335]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Apr 02 12:16:12 managed-node2 platform-python[23459]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Apr 02 12:16:15 managed-node2 platform-python[23583]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Apr 02 12:16:16 managed-node2 platform-python[23735]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Apr 02 12:16:17 managed-node2 platform-python[23858]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Apr 02 12:16:21 managed-node2 platform-python[23981]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Apr 02 12:16:22 managed-node2 platform-python[24105]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:16:22 managed-node2 platform-python[24230]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:16:23 managed-node2 platform-python[24354]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:16:24 managed-node2 platform-python[24478]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:16:24 managed-node2 platform-python[24602]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Apr 02 12:16:24 managed-node2 systemd[1]: Created slice User Slice of UID 3001. -- Subject: Unit user-3001.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-3001.slice has finished starting up. -- -- The start-up result is done. Apr 02 12:16:24 managed-node2 systemd[1]: Starting User runtime directory /run/user/3001... -- Subject: Unit user-runtime-dir@3001.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@3001.service has begun starting up. Apr 02 12:16:24 managed-node2 systemd[1]: Started User runtime directory /run/user/3001. -- Subject: Unit user-runtime-dir@3001.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@3001.service has finished starting up. -- -- The start-up result is done. Apr 02 12:16:24 managed-node2 systemd[1]: Starting User Manager for UID 3001... -- Subject: Unit user@3001.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@3001.service has begun starting up. Apr 02 12:16:24 managed-node2 systemd[24608]: pam_unix(systemd-user:session): session opened for user podman_basic_user by (uid=0) Apr 02 12:16:24 managed-node2 systemd[24608]: Reached target Paths. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:16:24 managed-node2 systemd[24608]: Starting D-Bus User Message Bus Socket. -- Subject: Unit UNIT has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun starting up. Apr 02 12:16:24 managed-node2 systemd[24608]: Started Mark boot as successful after the user session has run 2 minutes. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:16:24 managed-node2 systemd[24608]: Reached target Timers. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:16:24 managed-node2 systemd[24608]: Listening on D-Bus User Message Bus Socket. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:16:24 managed-node2 systemd[24608]: Reached target Sockets. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:16:24 managed-node2 systemd[24608]: Reached target Basic System. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:16:24 managed-node2 systemd[24608]: Reached target Default. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:16:24 managed-node2 systemd[24608]: Startup finished in 27ms. -- Subject: User manager start-up is now complete -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The user manager instance for user 3001 has been started. All services queued -- for starting have been started. Note that other services might still be starting -- up or be started at any later time. -- -- Startup of the manager took 27741 microseconds. Apr 02 12:16:24 managed-node2 systemd[1]: Started User Manager for UID 3001. -- Subject: Unit user@3001.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@3001.service has finished starting up. -- -- The start-up result is done. Apr 02 12:16:25 managed-node2 platform-python[24742]: ansible-file Invoked with path=/tmp/lsr_sr1vi4ai_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:16:25 managed-node2 platform-python[24865]: ansible-file Invoked with path=/tmp/lsr_sr1vi4ai_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:16:26 managed-node2 sudo[24988]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-biynpamwfkotledtdcmfrbbtajacguqn ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610585.993839-13138-274383557219687/AnsiballZ_podman_image.py' Apr 02 12:16:26 managed-node2 sudo[24988]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Apr 02 12:16:26 managed-node2 systemd[24608]: Started D-Bus User Message Bus. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:16:26 managed-node2 systemd[24608]: Created slice user.slice. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:16:26 managed-node2 systemd[24608]: Started podman-25000.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:16:26 managed-node2 systemd[24608]: Started podman-pause-72b6ea71.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:16:26 managed-node2 systemd[24608]: Started podman-25017.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:16:28 managed-node2 systemd[24608]: Started podman-25032.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:16:28 managed-node2 sudo[24988]: pam_unix(sudo:session): session closed for user podman_basic_user Apr 02 12:16:28 managed-node2 platform-python[25161]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:16:29 managed-node2 platform-python[25284]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:16:29 managed-node2 platform-python[25407]: ansible-ansible.legacy.stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Apr 02 12:16:29 managed-node2 platform-python[25506]: ansible-ansible.legacy.copy Invoked with dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml owner=podman_basic_user group=3001 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1743610589.315067-13234-160122669195990/source _original_basename=tmp_14yim5_ follow=False checksum=4ad2273e81c5630cfdda46a5cba365174cbe07a0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:16:30 managed-node2 sudo[25631]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-efmiubrwlafglgzefjroxczqdjzuyfqs ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610589.970394-13261-74935529962591/AnsiballZ_podman_play.py' Apr 02 12:16:30 managed-node2 sudo[25631]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Apr 02 12:16:30 managed-node2 platform-python[25634]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Apr 02 12:16:30 managed-node2 systemd[24608]: Started podman-25642.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:16:30 managed-node2 kernel: tun: Universal TUN/TAP device driver, 1.6 Apr 02 12:16:30 managed-node2 systemd[24608]: Started rootless-netns-42f78542.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:16:30 managed-node2 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this. Apr 02 12:16:30 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): veth8a97fc83: link is not ready Apr 02 12:16:30 managed-node2 kernel: cni-podman1: port 1(veth8a97fc83) entered blocking state Apr 02 12:16:30 managed-node2 kernel: cni-podman1: port 1(veth8a97fc83) entered disabled state Apr 02 12:16:30 managed-node2 kernel: device veth8a97fc83 entered promiscuous mode Apr 02 12:16:30 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready Apr 02 12:16:30 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready Apr 02 12:16:30 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth8a97fc83: link becomes ready Apr 02 12:16:30 managed-node2 kernel: cni-podman1: port 1(veth8a97fc83) entered blocking state Apr 02 12:16:30 managed-node2 kernel: cni-podman1: port 1(veth8a97fc83) entered forwarding state Apr 02 12:16:30 managed-node2 dnsmasq[25830]: listening on cni-podman1(#3): 10.89.0.1 Apr 02 12:16:30 managed-node2 dnsmasq[25832]: started, version 2.79 cachesize 150 Apr 02 12:16:30 managed-node2 dnsmasq[25832]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify Apr 02 12:16:30 managed-node2 dnsmasq[25832]: using local addresses only for domain dns.podman Apr 02 12:16:30 managed-node2 dnsmasq[25832]: reading /etc/resolv.conf Apr 02 12:16:30 managed-node2 dnsmasq[25832]: using local addresses only for domain dns.podman Apr 02 12:16:30 managed-node2 dnsmasq[25832]: using nameserver 10.0.2.3#53 Apr 02 12:16:30 managed-node2 dnsmasq[25832]: using nameserver 10.29.169.13#53 Apr 02 12:16:30 managed-node2 dnsmasq[25832]: using nameserver 10.29.170.12#53 Apr 02 12:16:30 managed-node2 dnsmasq[25832]: using nameserver 10.2.32.1#53 Apr 02 12:16:30 managed-node2 dnsmasq[25832]: read /run/user/3001/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Apr 02 12:16:30 managed-node2 conmon[25845]: conmon 15e4acc229ca084a16f1 : failed to write to /proc/self/oom_score_adj: Permission denied Apr 02 12:16:30 managed-node2 conmon[25846]: conmon 15e4acc229ca084a16f1 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/14/attach} Apr 02 12:16:30 managed-node2 conmon[25846]: conmon 15e4acc229ca084a16f1 : terminal_ctrl_fd: 14 Apr 02 12:16:30 managed-node2 conmon[25846]: conmon 15e4acc229ca084a16f1 : winsz read side: 17, winsz write side: 18 Apr 02 12:16:30 managed-node2 conmon[25846]: conmon 15e4acc229ca084a16f1 : container PID: 25856 Apr 02 12:16:30 managed-node2 conmon[25866]: conmon f33d5937cfef3fa5a89e : failed to write to /proc/self/oom_score_adj: Permission denied Apr 02 12:16:30 managed-node2 conmon[25867]: conmon f33d5937cfef3fa5a89e : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Apr 02 12:16:30 managed-node2 conmon[25867]: conmon f33d5937cfef3fa5a89e : terminal_ctrl_fd: 13 Apr 02 12:16:30 managed-node2 conmon[25867]: conmon f33d5937cfef3fa5a89e : winsz read side: 16, winsz write side: 17 Apr 02 12:16:31 managed-node2 conmon[25867]: conmon f33d5937cfef3fa5a89e : container PID: 25877 Apr 02 12:16:31 managed-node2 platform-python[25634]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Apr 02 12:16:31 managed-node2 platform-python[25634]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: 4b39b176161d4c697539de0d2e9a0599176c2647a6d2703b4808ef9a140b8567 Container: f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b Apr 02 12:16:31 managed-node2 platform-python[25634]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-04-02T12:16:30-04:00" level=info msg="/bin/podman filtering at log level debug" time="2025-04-02T12:16:30-04:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-04-02T12:16:30-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-04-02T12:16:30-04:00" level=info msg="Using sqlite as database backend" time="2025-04-02T12:16:30-04:00" level=debug msg="Using graph driver overlay" time="2025-04-02T12:16:30-04:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2025-04-02T12:16:30-04:00" level=debug msg="Using run root /run/user/3001/containers" time="2025-04-02T12:16:30-04:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2025-04-02T12:16:30-04:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2025-04-02T12:16:30-04:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2025-04-02T12:16:30-04:00" level=debug msg="Using transient store: false" time="2025-04-02T12:16:30-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-04-02T12:16:30-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-04-02T12:16:30-04:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2025-04-02T12:16:30-04:00" level=debug msg="Cached value indicated that native-diff is usable" time="2025-04-02T12:16:30-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2025-04-02T12:16:30-04:00" level=debug msg="Initializing event backend file" time="2025-04-02T12:16:30-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-04-02T12:16:30-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-04-02T12:16:30-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-04-02T12:16:30-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-04-02T12:16:30-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-04-02T12:16:30-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-04-02T12:16:30-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-04-02T12:16:30-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" time="2025-04-02T12:16:30-04:00" level=info msg="Setting parallel job count to 7" time="2025-04-02T12:16:30-04:00" level=debug msg="Successfully loaded 1 networks" time="2025-04-02T12:16:30-04:00" level=debug msg="found free device name cni-podman1" time="2025-04-02T12:16:30-04:00" level=debug msg="found free ipv4 network subnet 10.89.0.0/24" time="2025-04-02T12:16:30-04:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-04-02 12:16:30.319987309 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-04-02T12:16:30-04:00" level=debug msg="Successfully loaded 2 networks" time="2025-04-02T12:16:30-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-04-02T12:16:30-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:30-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-04-02T12:16:30-04:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\" does not resolve to an image ID" time="2025-04-02T12:16:30-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-04-02T12:16:30-04:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\" does not resolve to an image ID" time="2025-04-02T12:16:30-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-04-02T12:16:30-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:30-04:00" level=debug msg="FROM \"scratch\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2025-04-02T12:16:30-04:00" level=debug msg="Check for idmapped mounts support " time="2025-04-02T12:16:30-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:30-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:30-04:00" level=debug msg="overlay: test mount indicated that volatile is being used" time="2025-04-02T12:16:30-04:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/94d13db38c50b49ef19d77e255f7b915a1f3211e6d62febb29445663a8017508/empty,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/94d13db38c50b49ef19d77e255f7b915a1f3211e6d62febb29445663a8017508/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/94d13db38c50b49ef19d77e255f7b915a1f3211e6d62febb29445663a8017508/work,userxattr,volatile,context=\"system_u:object_r:container_file_t:s0:c326,c771\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Container ID: 91dce24906e928e486c63111f49274b7015b4c6e7d554ff17a8f8b068e51a5dc" time="2025-04-02T12:16:30-04:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:copy Args:[/usr/libexec/podman/catatonit /catatonit] Flags:[] Attrs:map[] Message:COPY /usr/libexec/podman/catatonit /catatonit Heredocs:[] Original:COPY /usr/libexec/podman/catatonit /catatonit}" time="2025-04-02T12:16:30-04:00" level=debug msg="COPY []string(nil), imagebuilder.Copy{FromFS:false, From:\"\", Src:[]string{\"/usr/libexec/podman/catatonit\"}, Dest:\"/catatonit\", Download:false, Chown:\"\", Chmod:\"\", Checksum:\"\", Files:[]imagebuilder.File(nil)}" time="2025-04-02T12:16:30-04:00" level=debug msg="added content file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd" time="2025-04-02T12:16:30-04:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:entrypoint Args:[/catatonit -P] Flags:[] Attrs:map[json:true] Message:ENTRYPOINT /catatonit -P Heredocs:[] Original:ENTRYPOINT [\"/catatonit\", \"-P\"]}" time="2025-04-02T12:16:30-04:00" level=debug msg="COMMIT localhost/podman-pause:4.9.4-dev-1708535009" time="2025-04-02T12:16:30-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\"" time="2025-04-02T12:16:30-04:00" level=debug msg="COMMIT \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\"" time="2025-04-02T12:16:30-04:00" level=debug msg="committing image with reference \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\" is allowed by policy" time="2025-04-02T12:16:30-04:00" level=debug msg="layer list: [\"94d13db38c50b49ef19d77e255f7b915a1f3211e6d62febb29445663a8017508\"]" time="2025-04-02T12:16:30-04:00" level=debug msg="using \"/var/tmp/buildah1081529911\" to hold temporary data" time="2025-04-02T12:16:30-04:00" level=debug msg="Tar with options on /home/podman_basic_user/.local/share/containers/storage/overlay/94d13db38c50b49ef19d77e255f7b915a1f3211e6d62febb29445663a8017508/diff" time="2025-04-02T12:16:30-04:00" level=debug msg="layer \"94d13db38c50b49ef19d77e255f7b915a1f3211e6d62febb29445663a8017508\" size is 767488 bytes, uncompressed digest sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690, possibly-compressed digest sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690" time="2025-04-02T12:16:30-04:00" level=debug msg="OCIv1 config = {\"created\":\"2025-04-02T16:16:30.452973542Z\",\"architecture\":\"amd64\",\"os\":\"linux\",\"config\":{\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Entrypoint\":[\"/catatonit\",\"-P\"],\"Labels\":{\"io.buildah.version\":\"1.33.5\"}},\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"]},\"history\":[{\"created\":\"2025-04-02T16:16:30.452504898Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-04-02T16:16:30.457979708Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-04-02T12:16:30-04:00" level=debug msg="OCIv1 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.oci.image.manifest.v1+json\",\"config\":{\"mediaType\":\"application/vnd.oci.image.config.v1+json\",\"digest\":\"sha256:1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543\",\"size\":668},\"layers\":[{\"mediaType\":\"application/vnd.oci.image.layer.v1.tar\",\"digest\":\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\",\"size\":767488}],\"annotations\":{\"org.opencontainers.image.base.digest\":\"\",\"org.opencontainers.image.base.name\":\"\"}}" time="2025-04-02T12:16:30-04:00" level=debug msg="Docker v2s2 config = {\"created\":\"2025-04-02T16:16:30.452973542Z\",\"container\":\"91dce24906e928e486c63111f49274b7015b4c6e7d554ff17a8f8b068e51a5dc\",\"container_config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":[],\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.33.5\"}},\"config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":[],\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.33.5\"}},\"architecture\":\"amd64\",\"os\":\"linux\",\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"]},\"history\":[{\"created\":\"2025-04-02T16:16:30.452504898Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:b0770577934d9536a010638e2bd49b7571c5d0a878a528b9fdba01abe9f2d5dd in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-04-02T16:16:30.457979708Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-04-02T12:16:30-04:00" level=debug msg="Docker v2s2 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.docker.distribution.manifest.v2+json\",\"config\":{\"mediaType\":\"application/vnd.docker.container.image.v1+json\",\"size\":1342,\"digest\":\"sha256:d13f2d808cfa70e6213b7672ef9edf673d4884b1e839e40654941f83b0fccedd\"},\"layers\":[{\"mediaType\":\"application/vnd.docker.image.rootfs.diff.tar\",\"size\":767488,\"digest\":\"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"}]}" time="2025-04-02T12:16:30-04:00" level=debug msg="Using SQLite blob info cache at /home/podman_basic_user/.local/share/containers/cache/blob-info-cache-v1.sqlite" time="2025-04-02T12:16:30-04:00" level=debug msg="IsRunningImageAllowed for image containers-storage:" time="2025-04-02T12:16:30-04:00" level=debug msg=" Using transport \"containers-storage\" policy section " time="2025-04-02T12:16:30-04:00" level=debug msg=" Requirement 0: allowed" time="2025-04-02T12:16:30-04:00" level=debug msg="Overall: allowed" time="2025-04-02T12:16:30-04:00" level=debug msg="start reading config" time="2025-04-02T12:16:30-04:00" level=debug msg="finished reading config" time="2025-04-02T12:16:30-04:00" level=debug msg="Manifest has MIME type application/vnd.oci.image.manifest.v1+json, ordered candidate list [application/vnd.oci.image.manifest.v1+json, application/vnd.docker.distribution.manifest.v2+json, application/vnd.docker.distribution.manifest.v1+prettyjws, application/vnd.docker.distribution.manifest.v1+json]" time="2025-04-02T12:16:30-04:00" level=debug msg="... will first try using the original manifest unmodified" time="2025-04-02T12:16:30-04:00" level=debug msg="Checking if we can reuse blob sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690: general substitution = true, compression for MIME type \"application/vnd.oci.image.layer.v1.tar\" = true" time="2025-04-02T12:16:30-04:00" level=debug msg="reading layer \"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"" time="2025-04-02T12:16:30-04:00" level=debug msg="No compression detected" time="2025-04-02T12:16:30-04:00" level=debug msg="Using original blob without modification" time="2025-04-02T12:16:30-04:00" level=debug msg="Applying tar in /home/podman_basic_user/.local/share/containers/storage/overlay/d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690/diff" time="2025-04-02T12:16:30-04:00" level=debug msg="finished reading layer \"sha256:d2d0eb8a68f8cf95b9c7068be2f59961cd9dc579139bd79dee5eb65ea6de5690\"" time="2025-04-02T12:16:30-04:00" level=debug msg="No compression detected" time="2025-04-02T12:16:30-04:00" level=debug msg="Compression change for blob sha256:1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543 (\"application/vnd.oci.image.config.v1+json\") not supported" time="2025-04-02T12:16:30-04:00" level=debug msg="Using original blob without modification" time="2025-04-02T12:16:30-04:00" level=debug msg="setting image creation date to 2025-04-02 16:16:30.452973542 +0000 UTC" time="2025-04-02T12:16:30-04:00" level=debug msg="created new image ID \"1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543\" with metadata \"{}\"" time="2025-04-02T12:16:30-04:00" level=debug msg="added name \"localhost/podman-pause:4.9.4-dev-1708535009\" to image \"1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543\"" time="2025-04-02T12:16:30-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:4.9.4-dev-1708535009\"" time="2025-04-02T12:16:30-04:00" level=debug msg="printing final image id \"1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Pod using bridge network mode" time="2025-04-02T12:16:30-04:00" level=debug msg="Got pod cgroup as /libpod_parent/4b39b176161d4c697539de0d2e9a0599176c2647a6d2703b4808ef9a140b8567" time="2025-04-02T12:16:30-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-04-02T12:16:30-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:30-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-04-02T12:16:30-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-04-02T12:16:30-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543)" time="2025-04-02T12:16:30-04:00" level=debug msg="exporting opaque data as blob \"sha256:1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Inspecting image 1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543" time="2025-04-02T12:16:30-04:00" level=debug msg="exporting opaque data as blob \"sha256:1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Inspecting image 1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543" time="2025-04-02T12:16:30-04:00" level=debug msg="Inspecting image 1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543" time="2025-04-02T12:16:30-04:00" level=debug msg="using systemd mode: false" time="2025-04-02T12:16:30-04:00" level=debug msg="setting container name 4b39b176161d-infra" time="2025-04-02T12:16:30-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Allocated lock 1 for container 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25" time="2025-04-02T12:16:30-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543\"" time="2025-04-02T12:16:30-04:00" level=debug msg="exporting opaque data as blob \"sha256:1bcef46a3de88c89179385f57e50432c95114cbf09db57a372d2b16d05481543\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Created container \"15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Container \"15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25/userdata\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Container \"15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25\" has run directory \"/run/user/3001/containers/overlay-containers/15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25/userdata\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:30-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:30-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-04-02T12:16:30-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:30-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-04-02T12:16:30-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2025-04-02T12:16:30-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:30-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:30-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-04-02T12:16:30-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:30-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-04-02T12:16:30-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:30-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:30-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-04-02T12:16:30-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:30-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-04-02T12:16:30-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-04-02T12:16:30-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:30-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:30-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-04-02T12:16:30-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:30-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-04-02T12:16:30-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-04-02T12:16:30-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-04-02T12:16:30-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-04-02T12:16:30-04:00" level=debug msg="using systemd mode: false" time="2025-04-02T12:16:30-04:00" level=debug msg="adding container to pod httpd1" time="2025-04-02T12:16:30-04:00" level=debug msg="setting container name httpd1-httpd1" time="2025-04-02T12:16:30-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-04-02T12:16:30-04:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2025-04-02T12:16:30-04:00" level=debug msg="Adding mount /proc" time="2025-04-02T12:16:30-04:00" level=debug msg="Adding mount /dev" time="2025-04-02T12:16:30-04:00" level=debug msg="Adding mount /dev/pts" time="2025-04-02T12:16:30-04:00" level=debug msg="Adding mount /dev/mqueue" time="2025-04-02T12:16:30-04:00" level=debug msg="Adding mount /sys" time="2025-04-02T12:16:30-04:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2025-04-02T12:16:30-04:00" level=debug msg="Allocated lock 2 for container f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b" time="2025-04-02T12:16:30-04:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Created container \"f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Container \"f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b/userdata\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Container \"f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b\" has run directory \"/run/user/3001/containers/overlay-containers/f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b/userdata\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Strongconnecting node 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25" time="2025-04-02T12:16:30-04:00" level=debug msg="Pushed 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25 onto stack" time="2025-04-02T12:16:30-04:00" level=debug msg="Finishing node 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25. Popped 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25 off stack" time="2025-04-02T12:16:30-04:00" level=debug msg="Strongconnecting node f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b" time="2025-04-02T12:16:30-04:00" level=debug msg="Pushed f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b onto stack" time="2025-04-02T12:16:30-04:00" level=debug msg="Finishing node f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b. Popped f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b off stack" time="2025-04-02T12:16:30-04:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/FZGFET7VX4DSYH6K5NJN7HBLIW,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/add239a525a71d20e89f26cdceeffdb465defe64274853a2d37a62c98061a689/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/add239a525a71d20e89f26cdceeffdb465defe64274853a2d37a62c98061a689/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c426,c636\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Mounted container \"15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/add239a525a71d20e89f26cdceeffdb465defe64274853a2d37a62c98061a689/merged\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Created root filesystem for container 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25 at /home/podman_basic_user/.local/share/containers/storage/overlay/add239a525a71d20e89f26cdceeffdb465defe64274853a2d37a62c98061a689/merged" time="2025-04-02T12:16:30-04:00" level=debug msg="Made network namespace at /run/user/3001/netns/netns-5a6e59dd-c7c3-d14f-d70f-ebcf9881e68b for container 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25" time="2025-04-02T12:16:30-04:00" level=debug msg="creating rootless network namespace with name \"rootless-netns-d22c9f230d0691b8f418\"" time="2025-04-02T12:16:30-04:00" level=debug msg="slirp4netns command: /bin/slirp4netns --disable-host-loopback --mtu=65520 --enable-sandbox --enable-seccomp --enable-ipv6 -c -r 3 --netns-type=path /run/user/3001/netns/rootless-netns-d22c9f230d0691b8f418 tap0" time="2025-04-02T12:16:30-04:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" time="2025-04-02T12:16:30-04:00" level=debug msg="cni result for container 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25 network podman-default-kube-network: &{0.4.0 [{Name:cni-podman1 Mac:52:02:0c:1f:15:9a Sandbox:} {Name:veth8a97fc83 Mac:16:84:6e:4b:c8:f3 Sandbox:} {Name:eth0 Mac:8e:8c:3c:12:ee:fb Sandbox:/run/user/3001/netns/netns-5a6e59dd-c7c3-d14f-d70f-ebcf9881e68b}] [{Version:4 Interface:0xc000cb7978 Address:{IP:10.89.0.2 Mask:ffffff00} Gateway:10.89.0.1}] [{Dst:{IP:0.0.0.0 Mask:00000000} GW:}] {[10.89.0.1] [dns.podman] []}}" time="2025-04-02T12:16:30-04:00" level=debug msg="rootlessport: time=\"2025-04-02T12:16:30-04:00\" level=info msg=\"Starting parent driver\"\n" time="2025-04-02T12:16:30-04:00" level=debug msg="rootlessport: time=\"2025-04-02T12:16:30-04:00\" level=info msg=\"opaque=map[builtin.readypipepath:/run/user/3001/libpod/tmp/rootlessport365186404/.bp-ready.pipe builtin.socketpath:/run/user/3001/libpod/tmp/rootlessport365186404/.bp.sock]\"\n" time="2025-04-02T12:16:30-04:00" level=debug msg="rootlessport: time=\"2025-04-02T12:16:30-04:00\" level=info msg=\"Starting child driver in child netns (\\\"/proc/self/exe\\\" [rootlessport-child])\"\n" time="2025-04-02T12:16:30-04:00" level=debug msg="rootlessport: time=\"2025-04-02T12:16:30-04:00\" level=info msg=\"Waiting for initComplete\"\n" time="2025-04-02T12:16:30-04:00" level=debug msg="rootlessport: time=\"2025-04-02T12:16:30-04:00\" level=info msg=\"initComplete is closed; parent and child established the communication channel\"\ntime=\"2025-04-02T12:16:30-04:00\" level=info msg=\"Exposing ports [{ 80 15001 1 tcp}]\"\n" time="2025-04-02T12:16:30-04:00" level=debug msg="rootlessport: time=\"2025-04-02T12:16:30-04:00\" level=info msg=Ready\n" time="2025-04-02T12:16:30-04:00" level=debug msg="rootlessport is ready" time="2025-04-02T12:16:30-04:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2025-04-02T12:16:30-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-04-02T12:16:30-04:00" level=debug msg="Workdir \"/\" resolved to host path \"/home/podman_basic_user/.local/share/containers/storage/overlay/add239a525a71d20e89f26cdceeffdb465defe64274853a2d37a62c98061a689/merged\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Created OCI spec for container 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25/userdata/config.json" time="2025-04-02T12:16:30-04:00" level=debug msg="Got pod cgroup as " time="2025-04-02T12:16:30-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-04-02T12:16:30-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25 -u 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25 -r /usr/bin/runc -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25/userdata -p /run/user/3001/containers/overlay-containers/15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25/userdata/pidfile -n 4b39b176161d-infra --exit-dir /run/user/3001/libpod/tmp/exits --full-attach -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg cgroupfs --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25]" time="2025-04-02T12:16:30-04:00" level=info msg="Failed to add conmon to cgroupfs sandbox cgroup: creating cgroup for cpu: mkdir /sys/fs/cgroup/cpu/libpod_parent: permission denied" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2025-04-02T12:16:30-04:00" level=debug msg="Received: 25856" time="2025-04-02T12:16:30-04:00" level=info msg="Got Conmon PID as 25846" time="2025-04-02T12:16:30-04:00" level=debug msg="Created container 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25 in OCI runtime" time="2025-04-02T12:16:30-04:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2025-04-02T12:16:30-04:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2025-04-02T12:16:30-04:00" level=debug msg="Starting container 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25 with command [/catatonit -P]" time="2025-04-02T12:16:30-04:00" level=debug msg="Started container 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25" time="2025-04-02T12:16:30-04:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/TQMG2VJFZSJRG4TRP4BZLQYKKC,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/a49aabc4e6b99e922d2ecc9268062c0213aa47b643a3f64082785cdb629d82bf/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/a49aabc4e6b99e922d2ecc9268062c0213aa47b643a3f64082785cdb629d82bf/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c426,c636\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Mounted container \"f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/a49aabc4e6b99e922d2ecc9268062c0213aa47b643a3f64082785cdb629d82bf/merged\"" time="2025-04-02T12:16:30-04:00" level=debug msg="Created root filesystem for container f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b at /home/podman_basic_user/.local/share/containers/storage/overlay/a49aabc4e6b99e922d2ecc9268062c0213aa47b643a3f64082785cdb629d82bf/merged" time="2025-04-02T12:16:30-04:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2025-04-02T12:16:30-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-04-02T12:16:30-04:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2025-04-02T12:16:30-04:00" level=debug msg="Created OCI spec for container f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b/userdata/config.json" time="2025-04-02T12:16:30-04:00" level=debug msg="Got pod cgroup as " time="2025-04-02T12:16:30-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-04-02T12:16:30-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b -u f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b -r /usr/bin/runc -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b/userdata -p /run/user/3001/containers/overlay-containers/f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b/userdata/pidfile -n httpd1-httpd1 --exit-dir /run/user/3001/libpod/tmp/exits --full-attach -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg cgroupfs --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b]" time="2025-04-02T12:16:30-04:00" level=info msg="Failed to add conmon to cgroupfs sandbox cgroup: creating cgroup for memory: mkdir /sys/fs/cgroup/memory/conmon: permission denied" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2025-04-02T12:16:31-04:00" level=debug msg="Received: 25877" time="2025-04-02T12:16:31-04:00" level=info msg="Got Conmon PID as 25867" time="2025-04-02T12:16:31-04:00" level=debug msg="Created container f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b in OCI runtime" time="2025-04-02T12:16:31-04:00" level=debug msg="Starting container f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b with command [/bin/busybox-extras httpd -f -p 80]" time="2025-04-02T12:16:31-04:00" level=debug msg="Started container f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b" time="2025-04-02T12:16:31-04:00" level=debug msg="Called kube.PersistentPostRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-04-02T12:16:31-04:00" level=debug msg="Shutting down engines" Apr 02 12:16:31 managed-node2 platform-python[25634]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Apr 02 12:16:31 managed-node2 sudo[25631]: pam_unix(sudo:session): session closed for user podman_basic_user Apr 02 12:16:31 managed-node2 sudo[26008]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ejutcfswhhtdwgrmtppjnksdmjzreace ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610591.2463872-13292-275516885660989/AnsiballZ_systemd.py' Apr 02 12:16:31 managed-node2 sudo[26008]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Apr 02 12:16:31 managed-node2 platform-python[26011]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Apr 02 12:16:31 managed-node2 systemd[24608]: Reloading. Apr 02 12:16:31 managed-node2 sudo[26008]: pam_unix(sudo:session): session closed for user podman_basic_user Apr 02 12:16:32 managed-node2 sudo[26145]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vwjyzveimsxdulymzvnhscdgtexgutdd ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610591.946524-13319-189514749230914/AnsiballZ_systemd.py' Apr 02 12:16:32 managed-node2 sudo[26145]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Apr 02 12:16:32 managed-node2 platform-python[26148]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service scope=user enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Apr 02 12:16:32 managed-node2 systemd[24608]: Reloading. Apr 02 12:16:32 managed-node2 sudo[26145]: pam_unix(sudo:session): session closed for user podman_basic_user Apr 02 12:16:32 managed-node2 dnsmasq[25832]: listening on cni-podman1(#3): fe80::5002:cff:fe1f:159a%cni-podman1 Apr 02 12:16:32 managed-node2 sudo[26284]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-aozzuvnjupmwncpwdhhkxqoswbtzthls ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610592.5135815-13343-271250773017074/AnsiballZ_systemd.py' Apr 02 12:16:32 managed-node2 sudo[26284]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Apr 02 12:16:32 managed-node2 platform-python[26287]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Apr 02 12:16:32 managed-node2 systemd[24608]: Created slice podman\x2dkube.slice. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:16:32 managed-node2 systemd[24608]: Starting A template for running K8s workloads via podman-kube-play... -- Subject: Unit UNIT has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun starting up. Apr 02 12:16:32 managed-node2 conmon[25867]: conmon f33d5937cfef3fa5a89e : container 25877 exited with status 137 Apr 02 12:16:32 managed-node2 conmon[25846]: conmon 15e4acc229ca084a16f1 : container 25856 exited with status 137 Apr 02 12:16:32 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:32-04:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b)" Apr 02 12:16:32 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:32-04:00" level=debug msg="Setting custom database backend: \"sqlite\"" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:33-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:33-04:00" level=info msg="Using sqlite as database backend" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25)" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=debug msg="Setting custom database backend: \"sqlite\"" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=info msg="Using sqlite as database backend" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=debug msg="Using graph driver overlay" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=debug msg="Using run root /run/user/3001/containers" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=debug msg="Using transient store: false" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=debug msg="Cached value indicated that overlay is supported" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=debug msg="Cached value indicated that overlay is supported" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=debug msg="Cached value indicated that metacopy is not being used" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=debug msg="Cached value indicated that native-diff is usable" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=debug msg="Initializing event backend file" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=info msg="Setting parallel job count to 7" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:33-04:00" level=debug msg="Using graph driver overlay" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:33-04:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:33-04:00" level=debug msg="Using run root /run/user/3001/containers" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:33-04:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:33-04:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:33-04:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:33-04:00" level=debug msg="Using transient store: false" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:33-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:33-04:00" level=debug msg="Cached value indicated that overlay is supported" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:33-04:00" level=debug msg="Cached value indicated that overlay is supported" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:33-04:00" level=debug msg="Cached value indicated that metacopy is not being used" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:33-04:00" level=debug msg="Cached value indicated that native-diff is usable" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:33-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:33-04:00" level=debug msg="Initializing event backend file" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:33-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:33-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:33-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:33-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:33-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:33-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:33-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:33-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:33-04:00" level=info msg="Setting parallel job count to 7" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:33-04:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup f33d5937cfef3fa5a89ea2c4ac54c51179adcd8499fffafaffd05312a248e69b)" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26313]: time="2025-04-02T12:16:33-04:00" level=debug msg="Shutting down engines" Apr 02 12:16:33 managed-node2 kernel: cni-podman1: port 1(veth8a97fc83) entered disabled state Apr 02 12:16:33 managed-node2 kernel: device veth8a97fc83 left promiscuous mode Apr 02 12:16:33 managed-node2 kernel: cni-podman1: port 1(veth8a97fc83) entered disabled state Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager cgroupfs --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend cni --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --events-backend file --syslog container cleanup 15e4acc229ca084a16f17415648aa21b70b7faad5558a647cad1bf6c866e2c25)" Apr 02 12:16:33 managed-node2 /usr/bin/podman[26321]: time="2025-04-02T12:16:33-04:00" level=debug msg="Shutting down engines" Apr 02 12:16:33 managed-node2 podman[26293]: Pods stopped: Apr 02 12:16:33 managed-node2 podman[26293]: 4b39b176161d4c697539de0d2e9a0599176c2647a6d2703b4808ef9a140b8567 Apr 02 12:16:33 managed-node2 podman[26293]: Pods removed: Apr 02 12:16:33 managed-node2 podman[26293]: 4b39b176161d4c697539de0d2e9a0599176c2647a6d2703b4808ef9a140b8567 Apr 02 12:16:33 managed-node2 podman[26293]: Secrets removed: Apr 02 12:16:33 managed-node2 podman[26293]: Volumes removed: Apr 02 12:16:33 managed-node2 systemd[24608]: Started rootless-netns-b782f79d.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:16:33 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vethef01b4b2: link is not ready Apr 02 12:16:33 managed-node2 kernel: cni-podman1: port 1(vethef01b4b2) entered blocking state Apr 02 12:16:33 managed-node2 kernel: cni-podman1: port 1(vethef01b4b2) entered disabled state Apr 02 12:16:33 managed-node2 kernel: device vethef01b4b2 entered promiscuous mode Apr 02 12:16:33 managed-node2 kernel: cni-podman1: port 1(vethef01b4b2) entered blocking state Apr 02 12:16:33 managed-node2 kernel: cni-podman1: port 1(vethef01b4b2) entered forwarding state Apr 02 12:16:33 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vethef01b4b2: link becomes ready Apr 02 12:16:33 managed-node2 dnsmasq[26541]: listening on cni-podman1(#3): 10.89.0.1 Apr 02 12:16:33 managed-node2 dnsmasq[26543]: started, version 2.79 cachesize 150 Apr 02 12:16:33 managed-node2 dnsmasq[26543]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify Apr 02 12:16:33 managed-node2 dnsmasq[26543]: using local addresses only for domain dns.podman Apr 02 12:16:33 managed-node2 dnsmasq[26543]: reading /etc/resolv.conf Apr 02 12:16:33 managed-node2 dnsmasq[26543]: using local addresses only for domain dns.podman Apr 02 12:16:33 managed-node2 dnsmasq[26543]: using nameserver 10.0.2.3#53 Apr 02 12:16:33 managed-node2 dnsmasq[26543]: using nameserver 10.29.169.13#53 Apr 02 12:16:33 managed-node2 dnsmasq[26543]: using nameserver 10.29.170.12#53 Apr 02 12:16:33 managed-node2 dnsmasq[26543]: using nameserver 10.2.32.1#53 Apr 02 12:16:33 managed-node2 dnsmasq[26543]: read /run/user/3001/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Apr 02 12:16:33 managed-node2 podman[26293]: Pod: Apr 02 12:16:33 managed-node2 podman[26293]: 1af1ad0705c8956c882808bd2ab12312c83b6d5a9c6877347208e42834fbcb5f Apr 02 12:16:33 managed-node2 podman[26293]: Container: Apr 02 12:16:33 managed-node2 podman[26293]: 4edea4b5f0ff84f387a18fab50507c3715f3b93f63e32f039c5fba33bdd00130 Apr 02 12:16:33 managed-node2 systemd[24608]: Started A template for running K8s workloads via podman-kube-play. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:16:33 managed-node2 sudo[26284]: pam_unix(sudo:session): session closed for user podman_basic_user Apr 02 12:16:34 managed-node2 platform-python[26718]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Apr 02 12:16:34 managed-node2 dnsmasq[26543]: listening on cni-podman1(#3): fe80::3019:d4ff:fed7:bac8%cni-podman1 Apr 02 12:16:35 managed-node2 platform-python[26842]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:16:36 managed-node2 platform-python[26967]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:16:38 managed-node2 platform-python[27091]: ansible-file Invoked with path=/tmp/lsr_sr1vi4ai_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:16:38 managed-node2 platform-python[27214]: ansible-file Invoked with path=/tmp/lsr_sr1vi4ai_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:16:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:16:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:16:39 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:16:40 managed-node2 platform-python[27504]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:16:40 managed-node2 platform-python[27627]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:16:41 managed-node2 platform-python[27750]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Apr 02 12:16:41 managed-node2 platform-python[27849]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd2.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1743610600.8094587-13586-112409744055035/source _original_basename=tmpwauj0d_8 follow=False checksum=a3811d4e9a8822a1bb1782651c45a7ba596de3a2 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:16:41 managed-node2 platform-python[27974]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Apr 02 12:16:41 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice. -- Subject: Unit machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice has finished starting up. -- -- The start-up result is done. Apr 02 12:16:41 managed-node2 NetworkManager[661]: [1743610601.9444] manager: (cni-podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Apr 02 12:16:41 managed-node2 NetworkManager[661]: [1743610601.9454] manager: (veth5b8cd7f1): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Apr 02 12:16:41 managed-node2 systemd-udevd[28021]: Using default interface naming scheme 'rhel-8.0'. Apr 02 12:16:41 managed-node2 systemd-udevd[28021]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Apr 02 12:16:41 managed-node2 systemd-udevd[28021]: Could not generate persistent MAC address for cni-podman1: No such file or directory Apr 02 12:16:41 managed-node2 systemd-udevd[28022]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Apr 02 12:16:41 managed-node2 systemd-udevd[28022]: Could not generate persistent MAC address for veth5b8cd7f1: No such file or directory Apr 02 12:16:41 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): veth5b8cd7f1: link is not ready Apr 02 12:16:41 managed-node2 kernel: cni-podman1: port 1(veth5b8cd7f1) entered blocking state Apr 02 12:16:41 managed-node2 kernel: cni-podman1: port 1(veth5b8cd7f1) entered disabled state Apr 02 12:16:41 managed-node2 kernel: device veth5b8cd7f1 entered promiscuous mode Apr 02 12:16:41 managed-node2 NetworkManager[661]: [1743610601.9635] device (cni-podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', sys-iface-state: 'external') Apr 02 12:16:41 managed-node2 NetworkManager[661]: [1743610601.9640] device (cni-podman1): state change: unavailable -> disconnected (reason 'connection-assumed', sys-iface-state: 'external') Apr 02 12:16:41 managed-node2 NetworkManager[661]: [1743610601.9647] device (cni-podman1): Activation: starting connection 'cni-podman1' (2e58c892-8a3b-47aa-a757-e872c94cb339) Apr 02 12:16:41 managed-node2 NetworkManager[661]: [1743610601.9650] device (cni-podman1): state change: disconnected -> prepare (reason 'none', sys-iface-state: 'external') Apr 02 12:16:41 managed-node2 NetworkManager[661]: [1743610601.9652] device (cni-podman1): state change: prepare -> config (reason 'none', sys-iface-state: 'external') Apr 02 12:16:41 managed-node2 NetworkManager[661]: [1743610601.9653] device (cni-podman1): state change: config -> ip-config (reason 'none', sys-iface-state: 'external') Apr 02 12:16:41 managed-node2 NetworkManager[661]: [1743610601.9655] device (cni-podman1): state change: ip-config -> ip-check (reason 'none', sys-iface-state: 'external') Apr 02 12:16:41 managed-node2 dbus-daemon[595]: [system] Activating via systemd: service name='org.freedesktop.nm_dispatcher' unit='dbus-org.freedesktop.nm-dispatcher.service' requested by ':1.5' (uid=0 pid=661 comm="/usr/sbin/NetworkManager --no-daemon " label="system_u:system_r:NetworkManager_t:s0") Apr 02 12:16:41 managed-node2 systemd[1]: Starting Network Manager Script Dispatcher Service... -- Subject: Unit NetworkManager-dispatcher.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has begun starting up. Apr 02 12:16:42 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready Apr 02 12:16:42 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready Apr 02 12:16:42 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth5b8cd7f1: link becomes ready Apr 02 12:16:42 managed-node2 kernel: cni-podman1: port 1(veth5b8cd7f1) entered blocking state Apr 02 12:16:42 managed-node2 kernel: cni-podman1: port 1(veth5b8cd7f1) entered forwarding state Apr 02 12:16:42 managed-node2 NetworkManager[661]: [1743610602.0134] device (veth5b8cd7f1): carrier: link connected Apr 02 12:16:42 managed-node2 NetworkManager[661]: [1743610602.0136] device (cni-podman1): carrier: link connected Apr 02 12:16:42 managed-node2 dbus-daemon[595]: [system] Successfully activated service 'org.freedesktop.nm_dispatcher' Apr 02 12:16:42 managed-node2 systemd[1]: Started Network Manager Script Dispatcher Service. -- Subject: Unit NetworkManager-dispatcher.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has finished starting up. -- -- The start-up result is done. Apr 02 12:16:42 managed-node2 NetworkManager[661]: [1743610602.0335] device (cni-podman1): state change: ip-check -> secondaries (reason 'none', sys-iface-state: 'external') Apr 02 12:16:42 managed-node2 NetworkManager[661]: [1743610602.0337] device (cni-podman1): state change: secondaries -> activated (reason 'none', sys-iface-state: 'external') Apr 02 12:16:42 managed-node2 NetworkManager[661]: [1743610602.0341] device (cni-podman1): Activation: successful, device activated. Apr 02 12:16:42 managed-node2 dnsmasq[28144]: listening on cni-podman1(#3): 10.89.0.1 Apr 02 12:16:42 managed-node2 dnsmasq[28148]: started, version 2.79 cachesize 150 Apr 02 12:16:42 managed-node2 dnsmasq[28148]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify Apr 02 12:16:42 managed-node2 dnsmasq[28148]: using local addresses only for domain dns.podman Apr 02 12:16:42 managed-node2 dnsmasq[28148]: reading /etc/resolv.conf Apr 02 12:16:42 managed-node2 dnsmasq[28148]: using local addresses only for domain dns.podman Apr 02 12:16:42 managed-node2 dnsmasq[28148]: using nameserver 10.29.169.13#53 Apr 02 12:16:42 managed-node2 dnsmasq[28148]: using nameserver 10.29.170.12#53 Apr 02 12:16:42 managed-node2 dnsmasq[28148]: using nameserver 10.2.32.1#53 Apr 02 12:16:42 managed-node2 dnsmasq[28148]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Apr 02 12:16:42 managed-node2 systemd[1]: Started libpod-conmon-f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6.scope. -- Subject: Unit libpod-conmon-f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6.scope has finished starting up. -- -- The start-up result is done. Apr 02 12:16:42 managed-node2 conmon[28150]: conmon f55919f3e74918efc8de : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Apr 02 12:16:42 managed-node2 conmon[28150]: conmon f55919f3e74918efc8de : terminal_ctrl_fd: 13 Apr 02 12:16:42 managed-node2 conmon[28150]: conmon f55919f3e74918efc8de : winsz read side: 17, winsz write side: 18 Apr 02 12:16:42 managed-node2 systemd[1]: Started libcontainer container f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6. -- Subject: Unit libpod-f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6.scope has finished starting up. -- -- The start-up result is done. Apr 02 12:16:42 managed-node2 conmon[28150]: conmon f55919f3e74918efc8de : container PID: 28160 Apr 02 12:16:42 managed-node2 systemd[1]: Started libpod-conmon-07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1.scope. -- Subject: Unit libpod-conmon-07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1.scope has finished starting up. -- -- The start-up result is done. Apr 02 12:16:42 managed-node2 conmon[28171]: conmon 07879efcbdac8edf0c41 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/12/attach} Apr 02 12:16:42 managed-node2 conmon[28171]: conmon 07879efcbdac8edf0c41 : terminal_ctrl_fd: 12 Apr 02 12:16:42 managed-node2 conmon[28171]: conmon 07879efcbdac8edf0c41 : winsz read side: 16, winsz write side: 17 Apr 02 12:16:42 managed-node2 systemd[1]: Started libcontainer container 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1. -- Subject: Unit libpod-07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1.scope has finished starting up. -- -- The start-up result is done. Apr 02 12:16:42 managed-node2 conmon[28171]: conmon 07879efcbdac8edf0c41 : container PID: 28182 Apr 02 12:16:42 managed-node2 platform-python[27974]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml Apr 02 12:16:42 managed-node2 platform-python[27974]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a Container: 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1 Apr 02 12:16:42 managed-node2 platform-python[27974]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-04-02T12:16:41-04:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2025-04-02T12:16:41-04:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-04-02T12:16:41-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-04-02T12:16:41-04:00" level=info msg="Using sqlite as database backend" time="2025-04-02T12:16:41-04:00" level=debug msg="Using graph driver overlay" time="2025-04-02T12:16:41-04:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Using run root /run/containers/storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2025-04-02T12:16:41-04:00" level=debug msg="Using tmp dir /run/libpod" time="2025-04-02T12:16:41-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2025-04-02T12:16:41-04:00" level=debug msg="Using transient store: false" time="2025-04-02T12:16:41-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-04-02T12:16:41-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-04-02T12:16:41-04:00" level=debug msg="Cached value indicated that metacopy is being used" time="2025-04-02T12:16:41-04:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2025-04-02T12:16:41-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2025-04-02T12:16:41-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2025-04-02T12:16:41-04:00" level=debug msg="Initializing event backend file" time="2025-04-02T12:16:41-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-04-02T12:16:41-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-04-02T12:16:41-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-04-02T12:16:41-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-04-02T12:16:41-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-04-02T12:16:41-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-04-02T12:16:41-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-04-02T12:16:41-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" time="2025-04-02T12:16:41-04:00" level=info msg="Setting parallel job count to 7" time="2025-04-02T12:16:41-04:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network a4dcf21f020ee4e36651c11256cbe884182552e835eaaafd409153cd21dca4cc bridge cni-podman1 2025-04-02 12:14:18.041574535 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-04-02T12:16:41-04:00" level=debug msg="Successfully loaded 2 networks" time="2025-04-02T12:16:41-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:41-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-04-02T12:16:41-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@0c83f44aec84a1988fb583251f46d35742f665c899385313475da2ed177c5660\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@0c83f44aec84a1988fb583251f46d35742f665c899385313475da2ed177c5660)" time="2025-04-02T12:16:41-04:00" level=debug msg="exporting opaque data as blob \"sha256:0c83f44aec84a1988fb583251f46d35742f665c899385313475da2ed177c5660\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Pod using bridge network mode" time="2025-04-02T12:16:41-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice for parent machine.slice and name libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a" time="2025-04-02T12:16:41-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice" time="2025-04-02T12:16:41-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice" time="2025-04-02T12:16:41-04:00" level=debug msg="Looking up image \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:41-04:00" level=debug msg="Trying \"localhost/podman-pause:4.9.4-dev-1708535009\" ..." time="2025-04-02T12:16:41-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@0c83f44aec84a1988fb583251f46d35742f665c899385313475da2ed177c5660\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Found image \"localhost/podman-pause:4.9.4-dev-1708535009\" as \"localhost/podman-pause:4.9.4-dev-1708535009\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@0c83f44aec84a1988fb583251f46d35742f665c899385313475da2ed177c5660)" time="2025-04-02T12:16:41-04:00" level=debug msg="exporting opaque data as blob \"sha256:0c83f44aec84a1988fb583251f46d35742f665c899385313475da2ed177c5660\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Inspecting image 0c83f44aec84a1988fb583251f46d35742f665c899385313475da2ed177c5660" time="2025-04-02T12:16:41-04:00" level=debug msg="exporting opaque data as blob \"sha256:0c83f44aec84a1988fb583251f46d35742f665c899385313475da2ed177c5660\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Inspecting image 0c83f44aec84a1988fb583251f46d35742f665c899385313475da2ed177c5660" time="2025-04-02T12:16:41-04:00" level=debug msg="Inspecting image 0c83f44aec84a1988fb583251f46d35742f665c899385313475da2ed177c5660" time="2025-04-02T12:16:41-04:00" level=debug msg="using systemd mode: false" time="2025-04-02T12:16:41-04:00" level=debug msg="setting container name c136295f156f-infra" time="2025-04-02T12:16:41-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Allocated lock 1 for container f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6" time="2025-04-02T12:16:41-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@0c83f44aec84a1988fb583251f46d35742f665c899385313475da2ed177c5660\"" time="2025-04-02T12:16:41-04:00" level=debug msg="exporting opaque data as blob \"sha256:0c83f44aec84a1988fb583251f46d35742f665c899385313475da2ed177c5660\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2025-04-02T12:16:41-04:00" level=debug msg="Check for idmapped mounts support " time="2025-04-02T12:16:41-04:00" level=debug msg="Created container \"f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Container \"f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6\" has work directory \"/var/lib/containers/storage/overlay-containers/f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6/userdata\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Container \"f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6\" has run directory \"/run/containers/storage/overlay-containers/f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6/userdata\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:41-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-04-02T12:16:41-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-04-02T12:16:41-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2025-04-02T12:16:41-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:41-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-04-02T12:16:41-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-04-02T12:16:41-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:41-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-04-02T12:16:41-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-04-02T12:16:41-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-04-02T12:16:41-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-04-02T12:16:41-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-04-02T12:16:41-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-04-02T12:16:41-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-04-02T12:16:41-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-04-02T12:16:41-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-04-02T12:16:41-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-04-02T12:16:41-04:00" level=debug msg="using systemd mode: false" time="2025-04-02T12:16:41-04:00" level=debug msg="adding container to pod httpd2" time="2025-04-02T12:16:41-04:00" level=debug msg="setting container name httpd2-httpd2" time="2025-04-02T12:16:41-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-04-02T12:16:41-04:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2025-04-02T12:16:41-04:00" level=debug msg="Adding mount /proc" time="2025-04-02T12:16:41-04:00" level=debug msg="Adding mount /dev" time="2025-04-02T12:16:41-04:00" level=debug msg="Adding mount /dev/pts" time="2025-04-02T12:16:41-04:00" level=debug msg="Adding mount /dev/mqueue" time="2025-04-02T12:16:41-04:00" level=debug msg="Adding mount /sys" time="2025-04-02T12:16:41-04:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2025-04-02T12:16:41-04:00" level=debug msg="Allocated lock 2 for container 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1" time="2025-04-02T12:16:41-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Created container \"07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Container \"07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1\" has work directory \"/var/lib/containers/storage/overlay-containers/07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1/userdata\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Container \"07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1\" has run directory \"/run/containers/storage/overlay-containers/07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1/userdata\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Strongconnecting node f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6" time="2025-04-02T12:16:41-04:00" level=debug msg="Pushed f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6 onto stack" time="2025-04-02T12:16:41-04:00" level=debug msg="Finishing node f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6. Popped f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6 off stack" time="2025-04-02T12:16:41-04:00" level=debug msg="Strongconnecting node 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1" time="2025-04-02T12:16:41-04:00" level=debug msg="Pushed 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1 onto stack" time="2025-04-02T12:16:41-04:00" level=debug msg="Finishing node 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1. Popped 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1 off stack" time="2025-04-02T12:16:41-04:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/GDJMUPER7RS3E2YWZ32KQ44FIX,upperdir=/var/lib/containers/storage/overlay/e034212cb125fb2fbe7343a5d3ca260ceb356023a668e00e0470c456f3db9fc8/diff,workdir=/var/lib/containers/storage/overlay/e034212cb125fb2fbe7343a5d3ca260ceb356023a668e00e0470c456f3db9fc8/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c641,c1006\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Mounted container \"f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6\" at \"/var/lib/containers/storage/overlay/e034212cb125fb2fbe7343a5d3ca260ceb356023a668e00e0470c456f3db9fc8/merged\"" time="2025-04-02T12:16:41-04:00" level=debug msg="Created root filesystem for container f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6 at /var/lib/containers/storage/overlay/e034212cb125fb2fbe7343a5d3ca260ceb356023a668e00e0470c456f3db9fc8/merged" time="2025-04-02T12:16:41-04:00" level=debug msg="Made network namespace at /run/netns/netns-4077cdc6-43eb-a845-b235-1712e179794f for container f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6" time="2025-04-02T12:16:42-04:00" level=debug msg="cni result for container f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6 network podman-default-kube-network: &{0.4.0 [{Name:cni-podman1 Mac:12:02:c2:48:d2:1a Sandbox:} {Name:veth5b8cd7f1 Mac:22:e1:36:42:c3:b0 Sandbox:} {Name:eth0 Mac:9e:ab:aa:86:81:84 Sandbox:/run/netns/netns-4077cdc6-43eb-a845-b235-1712e179794f}] [{Version:4 Interface:0xc00086fba8 Address:{IP:10.89.0.2 Mask:ffffff00} Gateway:10.89.0.1}] [{Dst:{IP:0.0.0.0 Mask:00000000} GW:}] {[10.89.0.1] [dns.podman] []}}" time="2025-04-02T12:16:42-04:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2025-04-02T12:16:42-04:00" level=debug msg="Setting Cgroups for container f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6 to machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice:libpod:f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6" time="2025-04-02T12:16:42-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-04-02T12:16:42-04:00" level=debug msg="Workdir \"/\" resolved to host path \"/var/lib/containers/storage/overlay/e034212cb125fb2fbe7343a5d3ca260ceb356023a668e00e0470c456f3db9fc8/merged\"" time="2025-04-02T12:16:42-04:00" level=debug msg="Created OCI spec for container f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6 at /var/lib/containers/storage/overlay-containers/f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6/userdata/config.json" time="2025-04-02T12:16:42-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice for parent machine.slice and name libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a" time="2025-04-02T12:16:42-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice" time="2025-04-02T12:16:42-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice" time="2025-04-02T12:16:42-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-04-02T12:16:42-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6 -u f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6 -r /usr/bin/runc -b /var/lib/containers/storage/overlay-containers/f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6/userdata -p /run/containers/storage/overlay-containers/f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6/userdata/pidfile -n c136295f156f-infra --exit-dir /run/libpod/exits --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6]" time="2025-04-02T12:16:42-04:00" level=info msg="Running conmon under slice machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice and unitName libpod-conmon-f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6.scope" time="2025-04-02T12:16:42-04:00" level=debug msg="Received: 28160" time="2025-04-02T12:16:42-04:00" level=info msg="Got Conmon PID as 28150" time="2025-04-02T12:16:42-04:00" level=debug msg="Created container f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6 in OCI runtime" time="2025-04-02T12:16:42-04:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2025-04-02T12:16:42-04:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2025-04-02T12:16:42-04:00" level=debug msg="Starting container f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6 with command [/catatonit -P]" time="2025-04-02T12:16:42-04:00" level=debug msg="Started container f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6" time="2025-04-02T12:16:42-04:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/O7YQPBL2AO7YYECS2J5W2BMVTA,upperdir=/var/lib/containers/storage/overlay/4a5e3927d0aa2cbfc4839e5bf95dd796483458fe346415ead4ec47ddb95353ee/diff,workdir=/var/lib/containers/storage/overlay/4a5e3927d0aa2cbfc4839e5bf95dd796483458fe346415ead4ec47ddb95353ee/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c641,c1006\"" time="2025-04-02T12:16:42-04:00" level=debug msg="Mounted container \"07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1\" at \"/var/lib/containers/storage/overlay/4a5e3927d0aa2cbfc4839e5bf95dd796483458fe346415ead4ec47ddb95353ee/merged\"" time="2025-04-02T12:16:42-04:00" level=debug msg="Created root filesystem for container 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1 at /var/lib/containers/storage/overlay/4a5e3927d0aa2cbfc4839e5bf95dd796483458fe346415ead4ec47ddb95353ee/merged" time="2025-04-02T12:16:42-04:00" level=debug msg="/etc/system-fips does not exist on host, not mounting FIPS mode subscription" time="2025-04-02T12:16:42-04:00" level=debug msg="Setting Cgroups for container 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1 to machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice:libpod:07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1" time="2025-04-02T12:16:42-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-04-02T12:16:42-04:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2025-04-02T12:16:42-04:00" level=debug msg="Created OCI spec for container 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1 at /var/lib/containers/storage/overlay-containers/07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1/userdata/config.json" time="2025-04-02T12:16:42-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice for parent machine.slice and name libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a" time="2025-04-02T12:16:42-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice" time="2025-04-02T12:16:42-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice" time="2025-04-02T12:16:42-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-04-02T12:16:42-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1 -u 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1 -r /usr/bin/runc -b /var/lib/containers/storage/overlay-containers/07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1/userdata -p /run/containers/storage/overlay-containers/07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg cni --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg runc --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1]" time="2025-04-02T12:16:42-04:00" level=info msg="Running conmon under slice machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice and unitName libpod-conmon-07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1.scope" time="2025-04-02T12:16:42-04:00" level=debug msg="Received: 28182" time="2025-04-02T12:16:42-04:00" level=info msg="Got Conmon PID as 28171" time="2025-04-02T12:16:42-04:00" level=debug msg="Created container 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1 in OCI runtime" time="2025-04-02T12:16:42-04:00" level=debug msg="Starting container 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1 with command [/bin/busybox-extras httpd -f -p 80]" time="2025-04-02T12:16:42-04:00" level=debug msg="Started container 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1" time="2025-04-02T12:16:42-04:00" level=debug msg="Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-04-02T12:16:42-04:00" level=debug msg="Shutting down engines" Apr 02 12:16:42 managed-node2 platform-python[27974]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Apr 02 12:16:42 managed-node2 platform-python[28313]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Apr 02 12:16:42 managed-node2 systemd[1]: Reloading. Apr 02 12:16:43 managed-node2 dnsmasq[28148]: listening on cni-podman1(#3): fe80::1002:c2ff:fe48:d21a%cni-podman1 Apr 02 12:16:43 managed-node2 platform-python[28482]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Apr 02 12:16:43 managed-node2 systemd[1]: Reloading. Apr 02 12:16:44 managed-node2 platform-python[28637]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Apr 02 12:16:44 managed-node2 systemd[1]: Created slice system-podman\x2dkube.slice. -- Subject: Unit system-podman\x2dkube.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit system-podman\x2dkube.slice has finished starting up. -- -- The start-up result is done. Apr 02 12:16:44 managed-node2 systemd[1]: Starting A template for running K8s workloads via podman-kube-play... -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun starting up. Apr 02 12:16:44 managed-node2 conmon[28150]: conmon f55919f3e74918efc8de : container 28160 exited with status 137 Apr 02 12:16:44 managed-node2 systemd[1]: libpod-f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6.scope has successfully entered the 'dead' state. Apr 02 12:16:44 managed-node2 systemd[1]: libpod-f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6.scope: Consumed 32ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6.scope completed and consumed the indicated resources. Apr 02 12:16:44 managed-node2 conmon[28171]: conmon 07879efcbdac8edf0c41 : container 28182 exited with status 137 Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6)" Apr 02 12:16:44 managed-node2 systemd[1]: libpod-07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1.scope has successfully entered the 'dead' state. Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=debug msg="Setting custom database backend: \"sqlite\"" Apr 02 12:16:44 managed-node2 systemd[1]: libpod-07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1.scope: Consumed 33ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1.scope completed and consumed the indicated resources. Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=info msg="Using sqlite as database backend" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=debug msg="Using graph driver overlay" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=debug msg="Using graph root /var/lib/containers/storage" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=debug msg="Using run root /run/containers/storage" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=debug msg="Using tmp dir /run/libpod" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=debug msg="Using transient store: false" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=debug msg="Cached value indicated that overlay is supported" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=debug msg="Cached value indicated that overlay is supported" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=debug msg="Cached value indicated that metacopy is being used" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=debug msg="Cached value indicated that native-diff is not being used" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=debug msg="Initializing event backend file" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=info msg="Setting parallel job count to 7" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1)" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=debug msg="Setting custom database backend: \"sqlite\"" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=info msg="Using sqlite as database backend" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=debug msg="Using graph driver overlay" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=debug msg="Using graph root /var/lib/containers/storage" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=debug msg="Using run root /run/containers/storage" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=debug msg="Using tmp dir /run/libpod" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=debug msg="Using transient store: false" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=debug msg="Cached value indicated that overlay is supported" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=debug msg="Cached value indicated that overlay is supported" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=debug msg="Cached value indicated that metacopy is being used" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=debug msg="Cached value indicated that native-diff is not being used" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=debug msg="Initializing event backend file" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=debug msg="Using OCI runtime \"/usr/bin/runc\"" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=info msg="Setting parallel job count to 7" Apr 02 12:16:44 managed-node2 systemd[1]: var-lib-containers-storage-overlay-4a5e3927d0aa2cbfc4839e5bf95dd796483458fe346415ead4ec47ddb95353ee-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-4a5e3927d0aa2cbfc4839e5bf95dd796483458fe346415ead4ec47ddb95353ee-merged.mount has successfully entered the 'dead' state. Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup 07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1)" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28671]: time="2025-04-02T12:16:44-04:00" level=debug msg="Shutting down engines" Apr 02 12:16:44 managed-node2 systemd[1]: libpod-conmon-07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-conmon-07879efcbdac8edf0c41a045b652469e98643ac17f32e813508fa5c30b96f9a1.scope has successfully entered the 'dead' state. Apr 02 12:16:44 managed-node2 kernel: cni-podman1: port 1(veth5b8cd7f1) entered disabled state Apr 02 12:16:44 managed-node2 kernel: device veth5b8cd7f1 left promiscuous mode Apr 02 12:16:44 managed-node2 kernel: cni-podman1: port 1(veth5b8cd7f1) entered disabled state Apr 02 12:16:44 managed-node2 systemd[1]: run-netns-netns\x2d4077cdc6\x2d43eb\x2da845\x2db235\x2d1712e179794f.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-netns-netns\x2d4077cdc6\x2d43eb\x2da845\x2db235\x2d1712e179794f.mount has successfully entered the 'dead' state. Apr 02 12:16:44 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6-userdata-shm.mount has successfully entered the 'dead' state. Apr 02 12:16:44 managed-node2 systemd[1]: var-lib-containers-storage-overlay-e034212cb125fb2fbe7343a5d3ca260ceb356023a668e00e0470c456f3db9fc8-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-e034212cb125fb2fbe7343a5d3ca260ceb356023a668e00e0470c456f3db9fc8-merged.mount has successfully entered the 'dead' state. Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend cni --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime runc --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend file --syslog container cleanup f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6)" Apr 02 12:16:44 managed-node2 /usr/bin/podman[28664]: time="2025-04-02T12:16:44-04:00" level=debug msg="Shutting down engines" Apr 02 12:16:44 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:16:44 managed-node2 systemd[1]: libpod-conmon-f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-conmon-f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6.scope has successfully entered the 'dead' state. Apr 02 12:16:44 managed-node2 systemd[1]: Stopped libpod-conmon-f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6.scope. -- Subject: Unit libpod-conmon-f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6.scope has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-f55919f3e74918efc8def997109ddb3d6eb865e2d48011ac6f8f1224a14d89c6.scope has finished shutting down. Apr 02 12:16:44 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice. -- Subject: Unit machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice has finished shutting down. Apr 02 12:16:44 managed-node2 systemd[1]: machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice: Consumed 191ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a.slice completed and consumed the indicated resources. Apr 02 12:16:44 managed-node2 podman[28644]: Pods stopped: Apr 02 12:16:44 managed-node2 podman[28644]: c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a Apr 02 12:16:44 managed-node2 podman[28644]: Pods removed: Apr 02 12:16:44 managed-node2 podman[28644]: c136295f156f7bf187bddeb2fecbea2cff2e9ee9508d5499e30810fe09f8bf5a Apr 02 12:16:44 managed-node2 podman[28644]: Secrets removed: Apr 02 12:16:44 managed-node2 podman[28644]: Volumes removed: Apr 02 12:16:44 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_77af979829b4c906265c302f35b44662c9ae509e2295277ab05d31114593c34c.slice. -- Subject: Unit machine-libpod_pod_77af979829b4c906265c302f35b44662c9ae509e2295277ab05d31114593c34c.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_77af979829b4c906265c302f35b44662c9ae509e2295277ab05d31114593c34c.slice has finished starting up. -- -- The start-up result is done. Apr 02 12:16:44 managed-node2 systemd[1]: Started libcontainer container 588140bc9115d9b5756adec78a87a15eeab133fc805876e51df1084dcbb86b47. -- Subject: Unit libpod-588140bc9115d9b5756adec78a87a15eeab133fc805876e51df1084dcbb86b47.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-588140bc9115d9b5756adec78a87a15eeab133fc805876e51df1084dcbb86b47.scope has finished starting up. -- -- The start-up result is done. Apr 02 12:16:45 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): veth26ea01cb: link is not ready Apr 02 12:16:45 managed-node2 kernel: cni-podman1: port 1(veth26ea01cb) entered blocking state Apr 02 12:16:45 managed-node2 kernel: cni-podman1: port 1(veth26ea01cb) entered disabled state Apr 02 12:16:45 managed-node2 kernel: device veth26ea01cb entered promiscuous mode Apr 02 12:16:45 managed-node2 kernel: cni-podman1: port 1(veth26ea01cb) entered blocking state Apr 02 12:16:45 managed-node2 kernel: cni-podman1: port 1(veth26ea01cb) entered forwarding state Apr 02 12:16:45 managed-node2 systemd-udevd[28803]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Apr 02 12:16:45 managed-node2 systemd-udevd[28803]: Could not generate persistent MAC address for veth26ea01cb: No such file or directory Apr 02 12:16:45 managed-node2 NetworkManager[661]: [1743610605.0222] manager: (veth26ea01cb): new Veth device (/org/freedesktop/NetworkManager/Devices/5) Apr 02 12:16:45 managed-node2 kernel: cni-podman1: port 1(veth26ea01cb) entered disabled state Apr 02 12:16:45 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth26ea01cb: link becomes ready Apr 02 12:16:45 managed-node2 kernel: cni-podman1: port 1(veth26ea01cb) entered blocking state Apr 02 12:16:45 managed-node2 kernel: cni-podman1: port 1(veth26ea01cb) entered forwarding state Apr 02 12:16:45 managed-node2 NetworkManager[661]: [1743610605.0464] device (veth26ea01cb): carrier: link connected Apr 02 12:16:45 managed-node2 NetworkManager[661]: [1743610605.0466] device (cni-podman1): carrier: link connected Apr 02 12:16:45 managed-node2 dnsmasq[28873]: listening on cni-podman1(#3): 10.89.0.1 Apr 02 12:16:45 managed-node2 dnsmasq[28873]: listening on cni-podman1(#3): fe80::1002:c2ff:fe48:d21a%cni-podman1 Apr 02 12:16:45 managed-node2 dnsmasq[28878]: started, version 2.79 cachesize 150 Apr 02 12:16:45 managed-node2 dnsmasq[28878]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify Apr 02 12:16:45 managed-node2 dnsmasq[28878]: using local addresses only for domain dns.podman Apr 02 12:16:45 managed-node2 dnsmasq[28878]: reading /etc/resolv.conf Apr 02 12:16:45 managed-node2 dnsmasq[28878]: using local addresses only for domain dns.podman Apr 02 12:16:45 managed-node2 dnsmasq[28878]: using nameserver 10.29.169.13#53 Apr 02 12:16:45 managed-node2 dnsmasq[28878]: using nameserver 10.29.170.12#53 Apr 02 12:16:45 managed-node2 dnsmasq[28878]: using nameserver 10.2.32.1#53 Apr 02 12:16:45 managed-node2 dnsmasq[28878]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Apr 02 12:16:45 managed-node2 systemd[1]: Started libcontainer container 5fcc93e0360148c3d3fb6c8c1227984b1b0aca49096bdadcde38e94e7f3827b5. -- Subject: Unit libpod-5fcc93e0360148c3d3fb6c8c1227984b1b0aca49096bdadcde38e94e7f3827b5.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-5fcc93e0360148c3d3fb6c8c1227984b1b0aca49096bdadcde38e94e7f3827b5.scope has finished starting up. -- -- The start-up result is done. Apr 02 12:16:45 managed-node2 systemd[1]: Started libcontainer container 8721dc66e1b09f7265827becb4b41ef1101115eefcd978863bb383a8e8fd79a4. -- Subject: Unit libpod-8721dc66e1b09f7265827becb4b41ef1101115eefcd978863bb383a8e8fd79a4.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-8721dc66e1b09f7265827becb4b41ef1101115eefcd978863bb383a8e8fd79a4.scope has finished starting up. -- -- The start-up result is done. Apr 02 12:16:45 managed-node2 podman[28644]: Pod: Apr 02 12:16:45 managed-node2 podman[28644]: 77af979829b4c906265c302f35b44662c9ae509e2295277ab05d31114593c34c Apr 02 12:16:45 managed-node2 podman[28644]: Container: Apr 02 12:16:45 managed-node2 podman[28644]: 8721dc66e1b09f7265827becb4b41ef1101115eefcd978863bb383a8e8fd79a4 Apr 02 12:16:45 managed-node2 systemd[1]: Started A template for running K8s workloads via podman-kube-play. -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished starting up. -- -- The start-up result is done. Apr 02 12:16:46 managed-node2 platform-python[29052]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:16:48 managed-node2 platform-python[29177]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:16:49 managed-node2 platform-python[29301]: ansible-file Invoked with path=/tmp/lsr_sr1vi4ai_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:16:49 managed-node2 platform-python[29424]: ansible-file Invoked with path=/tmp/lsr_sr1vi4ai_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:16:51 managed-node2 platform-python[29713]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:16:51 managed-node2 platform-python[29836]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:16:52 managed-node2 platform-python[29959]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Apr 02 12:16:52 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Apr 02 12:16:52 managed-node2 platform-python[30059]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd3.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1743610611.7771573-13950-168904350400664/source _original_basename=tmpzpln3uql follow=False checksum=594c0616b7a43ab4e4801162b9fc7add01f01ce1 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:16:52 managed-node2 platform-python[30184]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Apr 02 12:16:52 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_c4c98f775b52953a116e15ae01c6ef94ce8e7195636e4f4d862d67e97e3eaaa8.slice. -- Subject: Unit machine-libpod_pod_c4c98f775b52953a116e15ae01c6ef94ce8e7195636e4f4d862d67e97e3eaaa8.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_c4c98f775b52953a116e15ae01c6ef94ce8e7195636e4f4d862d67e97e3eaaa8.slice has finished starting up. -- -- The start-up result is done. Apr 02 12:16:53 managed-node2 systemd-udevd[30231]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Apr 02 12:16:53 managed-node2 systemd-udevd[30231]: Could not generate persistent MAC address for veth4671ddb4: No such file or directory Apr 02 12:16:53 managed-node2 NetworkManager[661]: [1743610613.0472] manager: (veth4671ddb4): new Veth device (/org/freedesktop/NetworkManager/Devices/6) Apr 02 12:16:53 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): veth4671ddb4: link is not ready Apr 02 12:16:53 managed-node2 kernel: cni-podman1: port 2(veth4671ddb4) entered blocking state Apr 02 12:16:53 managed-node2 kernel: cni-podman1: port 2(veth4671ddb4) entered disabled state Apr 02 12:16:53 managed-node2 kernel: device veth4671ddb4 entered promiscuous mode Apr 02 12:16:53 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready Apr 02 12:16:53 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready Apr 02 12:16:53 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth4671ddb4: link becomes ready Apr 02 12:16:53 managed-node2 kernel: cni-podman1: port 2(veth4671ddb4) entered blocking state Apr 02 12:16:53 managed-node2 kernel: cni-podman1: port 2(veth4671ddb4) entered forwarding state Apr 02 12:16:53 managed-node2 NetworkManager[661]: [1743610613.0758] device (veth4671ddb4): carrier: link connected Apr 02 12:16:53 managed-node2 dnsmasq[28878]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 2 addresses Apr 02 12:16:53 managed-node2 systemd[1]: Started libpod-conmon-fd9a084af93cedc7453d899bc0eabc45f0b1731b44f34d5e0f4ba7a19f0cf8e6.scope. -- Subject: Unit libpod-conmon-fd9a084af93cedc7453d899bc0eabc45f0b1731b44f34d5e0f4ba7a19f0cf8e6.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-fd9a084af93cedc7453d899bc0eabc45f0b1731b44f34d5e0f4ba7a19f0cf8e6.scope has finished starting up. -- -- The start-up result is done. Apr 02 12:16:53 managed-node2 systemd[1]: Started libcontainer container fd9a084af93cedc7453d899bc0eabc45f0b1731b44f34d5e0f4ba7a19f0cf8e6. -- Subject: Unit libpod-fd9a084af93cedc7453d899bc0eabc45f0b1731b44f34d5e0f4ba7a19f0cf8e6.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-fd9a084af93cedc7453d899bc0eabc45f0b1731b44f34d5e0f4ba7a19f0cf8e6.scope has finished starting up. -- -- The start-up result is done. Apr 02 12:16:53 managed-node2 systemd[1]: Started libpod-conmon-a4dfa039017d9aa56d2353778e99414d6a646ddbc1098f51ecf08096c7f34b89.scope. -- Subject: Unit libpod-conmon-a4dfa039017d9aa56d2353778e99414d6a646ddbc1098f51ecf08096c7f34b89.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-a4dfa039017d9aa56d2353778e99414d6a646ddbc1098f51ecf08096c7f34b89.scope has finished starting up. -- -- The start-up result is done. Apr 02 12:16:53 managed-node2 systemd[1]: Started libcontainer container a4dfa039017d9aa56d2353778e99414d6a646ddbc1098f51ecf08096c7f34b89. -- Subject: Unit libpod-a4dfa039017d9aa56d2353778e99414d6a646ddbc1098f51ecf08096c7f34b89.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-a4dfa039017d9aa56d2353778e99414d6a646ddbc1098f51ecf08096c7f34b89.scope has finished starting up. -- -- The start-up result is done. Apr 02 12:16:53 managed-node2 platform-python[30466]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Apr 02 12:16:53 managed-node2 systemd[1]: Reloading. Apr 02 12:16:54 managed-node2 platform-python[30627]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Apr 02 12:16:54 managed-node2 systemd[1]: Reloading. Apr 02 12:16:55 managed-node2 platform-python[30782]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Apr 02 12:16:55 managed-node2 systemd[1]: Starting A template for running K8s workloads via podman-kube-play... -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun starting up. Apr 02 12:16:55 managed-node2 systemd[1]: libpod-fd9a084af93cedc7453d899bc0eabc45f0b1731b44f34d5e0f4ba7a19f0cf8e6.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-fd9a084af93cedc7453d899bc0eabc45f0b1731b44f34d5e0f4ba7a19f0cf8e6.scope has successfully entered the 'dead' state. Apr 02 12:16:55 managed-node2 systemd[1]: libpod-fd9a084af93cedc7453d899bc0eabc45f0b1731b44f34d5e0f4ba7a19f0cf8e6.scope: Consumed 33ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-fd9a084af93cedc7453d899bc0eabc45f0b1731b44f34d5e0f4ba7a19f0cf8e6.scope completed and consumed the indicated resources. Apr 02 12:16:55 managed-node2 systemd[1]: libpod-a4dfa039017d9aa56d2353778e99414d6a646ddbc1098f51ecf08096c7f34b89.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-a4dfa039017d9aa56d2353778e99414d6a646ddbc1098f51ecf08096c7f34b89.scope has successfully entered the 'dead' state. Apr 02 12:16:55 managed-node2 systemd[1]: libpod-a4dfa039017d9aa56d2353778e99414d6a646ddbc1098f51ecf08096c7f34b89.scope: Consumed 35ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-a4dfa039017d9aa56d2353778e99414d6a646ddbc1098f51ecf08096c7f34b89.scope completed and consumed the indicated resources. Apr 02 12:16:55 managed-node2 dnsmasq[28878]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 1 addresses Apr 02 12:16:55 managed-node2 systemd[1]: var-lib-containers-storage-overlay-f51db7abb9aa4296e9a14aee3bd1c603fb931ed7396fd5400e197e88dc1e7222-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-f51db7abb9aa4296e9a14aee3bd1c603fb931ed7396fd5400e197e88dc1e7222-merged.mount has successfully entered the 'dead' state. Apr 02 12:16:55 managed-node2 systemd[1]: libpod-conmon-a4dfa039017d9aa56d2353778e99414d6a646ddbc1098f51ecf08096c7f34b89.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-conmon-a4dfa039017d9aa56d2353778e99414d6a646ddbc1098f51ecf08096c7f34b89.scope has successfully entered the 'dead' state. Apr 02 12:16:55 managed-node2 kernel: cni-podman1: port 2(veth4671ddb4) entered disabled state Apr 02 12:16:55 managed-node2 kernel: device veth4671ddb4 left promiscuous mode Apr 02 12:16:55 managed-node2 kernel: cni-podman1: port 2(veth4671ddb4) entered disabled state Apr 02 12:16:55 managed-node2 systemd[1]: run-netns-netns\x2d074a579b\x2d3a87\x2d2af8\x2ddfdd\x2d5809e41ac97a.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-netns-netns\x2d074a579b\x2d3a87\x2d2af8\x2ddfdd\x2d5809e41ac97a.mount has successfully entered the 'dead' state. Apr 02 12:16:55 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-fd9a084af93cedc7453d899bc0eabc45f0b1731b44f34d5e0f4ba7a19f0cf8e6-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-fd9a084af93cedc7453d899bc0eabc45f0b1731b44f34d5e0f4ba7a19f0cf8e6-userdata-shm.mount has successfully entered the 'dead' state. Apr 02 12:16:55 managed-node2 systemd[1]: var-lib-containers-storage-overlay-65a06a5342233798028714c4c0c5c39116ff44387ef5155a4c41f045ece9c93e-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-65a06a5342233798028714c4c0c5c39116ff44387ef5155a4c41f045ece9c93e-merged.mount has successfully entered the 'dead' state. Apr 02 12:16:55 managed-node2 systemd[1]: libpod-conmon-fd9a084af93cedc7453d899bc0eabc45f0b1731b44f34d5e0f4ba7a19f0cf8e6.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-conmon-fd9a084af93cedc7453d899bc0eabc45f0b1731b44f34d5e0f4ba7a19f0cf8e6.scope has successfully entered the 'dead' state. Apr 02 12:16:55 managed-node2 systemd[1]: Removed slice cgroup machine-libpod_pod_c4c98f775b52953a116e15ae01c6ef94ce8e7195636e4f4d862d67e97e3eaaa8.slice. -- Subject: Unit machine-libpod_pod_c4c98f775b52953a116e15ae01c6ef94ce8e7195636e4f4d862d67e97e3eaaa8.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_c4c98f775b52953a116e15ae01c6ef94ce8e7195636e4f4d862d67e97e3eaaa8.slice has finished shutting down. Apr 02 12:16:55 managed-node2 systemd[1]: machine-libpod_pod_c4c98f775b52953a116e15ae01c6ef94ce8e7195636e4f4d862d67e97e3eaaa8.slice: Consumed 217ms CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit machine-libpod_pod_c4c98f775b52953a116e15ae01c6ef94ce8e7195636e4f4d862d67e97e3eaaa8.slice completed and consumed the indicated resources. Apr 02 12:16:55 managed-node2 podman[30789]: Pods stopped: Apr 02 12:16:55 managed-node2 podman[30789]: c4c98f775b52953a116e15ae01c6ef94ce8e7195636e4f4d862d67e97e3eaaa8 Apr 02 12:16:55 managed-node2 podman[30789]: Pods removed: Apr 02 12:16:55 managed-node2 podman[30789]: c4c98f775b52953a116e15ae01c6ef94ce8e7195636e4f4d862d67e97e3eaaa8 Apr 02 12:16:55 managed-node2 podman[30789]: Secrets removed: Apr 02 12:16:55 managed-node2 podman[30789]: Volumes removed: Apr 02 12:16:55 managed-node2 systemd[1]: Created slice cgroup machine-libpod_pod_bbeccce731ae31691374be30fa1ba510e563a8c50918181290e3c4426d10ae58.slice. -- Subject: Unit machine-libpod_pod_bbeccce731ae31691374be30fa1ba510e563a8c50918181290e3c4426d10ae58.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine-libpod_pod_bbeccce731ae31691374be30fa1ba510e563a8c50918181290e3c4426d10ae58.slice has finished starting up. -- -- The start-up result is done. Apr 02 12:16:56 managed-node2 systemd[1]: Started libcontainer container f5694b8dbcf2f7a0ceeda86b698fdc9cb04ec037d0bf9732c1059c403e2a5bca. -- Subject: Unit libpod-f5694b8dbcf2f7a0ceeda86b698fdc9cb04ec037d0bf9732c1059c403e2a5bca.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-f5694b8dbcf2f7a0ceeda86b698fdc9cb04ec037d0bf9732c1059c403e2a5bca.scope has finished starting up. -- -- The start-up result is done. Apr 02 12:16:56 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): vethd991d6df: link is not ready Apr 02 12:16:56 managed-node2 systemd-udevd[30956]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Apr 02 12:16:56 managed-node2 systemd-udevd[30956]: Could not generate persistent MAC address for vethd991d6df: No such file or directory Apr 02 12:16:56 managed-node2 NetworkManager[661]: [1743610616.1091] manager: (vethd991d6df): new Veth device (/org/freedesktop/NetworkManager/Devices/7) Apr 02 12:16:56 managed-node2 kernel: cni-podman1: port 2(vethd991d6df) entered blocking state Apr 02 12:16:56 managed-node2 kernel: cni-podman1: port 2(vethd991d6df) entered disabled state Apr 02 12:16:56 managed-node2 kernel: device vethd991d6df entered promiscuous mode Apr 02 12:16:56 managed-node2 kernel: cni-podman1: port 2(vethd991d6df) entered blocking state Apr 02 12:16:56 managed-node2 kernel: cni-podman1: port 2(vethd991d6df) entered forwarding state Apr 02 12:16:56 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vethd991d6df: link becomes ready Apr 02 12:16:56 managed-node2 NetworkManager[661]: [1743610616.1287] device (vethd991d6df): carrier: link connected Apr 02 12:16:56 managed-node2 dnsmasq[28878]: read /run/containers/cni/dnsname/podman-default-kube-network/addnhosts - 2 addresses Apr 02 12:16:56 managed-node2 systemd[1]: Started libcontainer container 1000fbfbe42abb5271f375e63fb07b71bc64b515b8966c07c5ec7c42ea29aaed. -- Subject: Unit libpod-1000fbfbe42abb5271f375e63fb07b71bc64b515b8966c07c5ec7c42ea29aaed.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-1000fbfbe42abb5271f375e63fb07b71bc64b515b8966c07c5ec7c42ea29aaed.scope has finished starting up. -- -- The start-up result is done. Apr 02 12:16:56 managed-node2 systemd[1]: Started libcontainer container 5a7699d1314c4fde619f24520a9b089c82f977f54e68e495dfae20bd95501251. -- Subject: Unit libpod-5a7699d1314c4fde619f24520a9b089c82f977f54e68e495dfae20bd95501251.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-5a7699d1314c4fde619f24520a9b089c82f977f54e68e495dfae20bd95501251.scope has finished starting up. -- -- The start-up result is done. Apr 02 12:16:56 managed-node2 podman[30789]: Pod: Apr 02 12:16:56 managed-node2 podman[30789]: bbeccce731ae31691374be30fa1ba510e563a8c50918181290e3c4426d10ae58 Apr 02 12:16:56 managed-node2 podman[30789]: Container: Apr 02 12:16:56 managed-node2 podman[30789]: 5a7699d1314c4fde619f24520a9b089c82f977f54e68e495dfae20bd95501251 Apr 02 12:16:56 managed-node2 systemd[1]: Started A template for running K8s workloads via podman-kube-play. -- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished starting up. -- -- The start-up result is done. Apr 02 12:16:57 managed-node2 sudo[31187]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ewcmhkcxpgjravihqxmrekwhrxbrcbje ; /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610617.1196704-14126-41130928665082/AnsiballZ_command.py' Apr 02 12:16:57 managed-node2 sudo[31187]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Apr 02 12:16:57 managed-node2 platform-python[31190]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:16:57 managed-node2 systemd[24608]: Started podman-31198.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:16:57 managed-node2 sudo[31187]: pam_unix(sudo:session): session closed for user podman_basic_user Apr 02 12:16:57 managed-node2 platform-python[31328]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:16:58 managed-node2 platform-python[31467]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:16:58 managed-node2 sudo[31598]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vwzqlvcobxgxkuhuldzfybecbuqvflxc ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610618.43136-14174-156067799957526/AnsiballZ_command.py' Apr 02 12:16:58 managed-node2 sudo[31598]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Apr 02 12:16:58 managed-node2 platform-python[31601]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --user list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd1[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:16:58 managed-node2 sudo[31598]: pam_unix(sudo:session): session closed for user podman_basic_user Apr 02 12:16:59 managed-node2 platform-python[31727]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd2[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:16:59 managed-node2 platform-python[31853]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd3[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:17:00 managed-node2 platform-python[31979]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:17:00 managed-node2 platform-python[32106]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:17:00 managed-node2 rsyslogd[1020]: imjournal: journal files changed, reloading... [v8.2102.0-15.el8 try https://www.rsyslog.com/e/0 ] Apr 02 12:17:01 managed-node2 platform-python[32233]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /tmp/lsr_sr1vi4ai_podman/httpd1-create _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:17:01 managed-node2 platform-python[32357]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /tmp/lsr_sr1vi4ai_podman/httpd2-create _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:17:01 managed-node2 platform-python[32481]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /tmp/lsr_sr1vi4ai_podman/httpd3-create _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:17:04 managed-node2 platform-python[32730]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:17:06 managed-node2 platform-python[32859]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:17:08 managed-node2 platform-python[32984]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Apr 02 12:17:11 managed-node2 platform-python[33108]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Apr 02 12:17:12 managed-node2 platform-python[33235]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Apr 02 12:17:13 managed-node2 platform-python[33362]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Apr 02 12:17:15 managed-node2 platform-python[33485]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Apr 02 12:17:18 managed-node2 platform-python[33609]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Apr 02 12:17:21 managed-node2 platform-python[33733]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Apr 02 12:17:23 managed-node2 platform-python[33898]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Apr 02 12:17:23 managed-node2 platform-python[34021]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Apr 02 12:17:28 managed-node2 platform-python[34144]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Apr 02 12:17:29 managed-node2 platform-python[34268]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:17:29 managed-node2 platform-python[34393]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:17:29 managed-node2 platform-python[34517]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:17:31 managed-node2 platform-python[34641]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:17:31 managed-node2 platform-python[34765]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Apr 02 12:17:32 managed-node2 platform-python[34888]: ansible-file Invoked with path=/tmp/lsr_sr1vi4ai_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:17:32 managed-node2 platform-python[35011]: ansible-file Invoked with path=/tmp/lsr_sr1vi4ai_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Apr 02 12:17:33 managed-node2 sudo[35134]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rvdsoxjxdkseipgbhseiorophgqzprvc ; XDG_RUNTIME_DIR=/run/user/3001 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610652.964236-15240-119374353441959/AnsiballZ_podman_image.py' Apr 02 12:17:33 managed-node2 sudo[35134]: pam_unix(sudo:session): session opened for user podman_basic_user by root(uid=0) Apr 02 12:17:33 managed-node2 systemd[24608]: Started podman-35139.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:17:33 managed-node2 systemd[24608]: Started podman-35147.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:17:33 managed-node2 systemd[24608]: Started podman-35155.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:17:33 managed-node2 systemd[24608]: Started podman-35163.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:17:33 managed-node2 systemd[24608]: Started podman-35171.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:17:33 managed-node2 sudo[35134]: pam_unix(sudo:session): session closed for user podman_basic_user Apr 02 12:17:33 managed-node2 platform-python[35301]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None TASK [Clean up storage.conf] *************************************************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:456 Wednesday 02 April 2025 12:17:34 -0400 (0:00:00.461) 0:04:57.293 ******* changed: [managed-node2] => { "changed": true, "path": "/etc/containers/storage.conf", "state": "absent" } TASK [Clean up host directories] *********************************************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:463 Wednesday 02 April 2025 12:17:34 -0400 (0:00:00.391) 0:04:57.685 ******* changed: [managed-node2] => { "changed": true, "path": "/tmp/lsr_sr1vi4ai_podman", "state": "absent" } TASK [Remove kube file src] **************************************************** task path: /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/tests/podman/tests_basic.yml:470 Wednesday 02 April 2025 12:17:34 -0400 (0:00:00.369) 0:04:58.054 ******* changed: [managed-node2 -> localhost] => { "changed": true, "path": "/tmp/lsr_podman_djkjebp5.yml", "state": "absent" } PLAY RECAP ********************************************************************* managed-node2 : ok=375 changed=43 unreachable=0 failed=1 skipped=608 rescued=2 ignored=0 TASKS RECAP ******************************************************************** Wednesday 02 April 2025 12:17:35 -0400 (0:00:00.264) 0:04:58.319 ******* =============================================================================== fedora.linux_system_roles.podman : Ensure required packages are installed -- 62.44s /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 fedora.linux_system_roles.selinux : Install SELinux tool semanage ------- 3.58s /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 fedora.linux_system_roles.selinux : Get SELinux modules facts ----------- 3.55s /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:112 fedora.linux_system_roles.selinux : Get SELinux modules facts ----------- 3.38s /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:112 fedora.linux_system_roles.selinux : Get SELinux modules facts ----------- 3.34s /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:112 fedora.linux_system_roles.selinux : Get SELinux modules facts ----------- 3.27s /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:112 fedora.linux_system_roles.selinux : Get SELinux modules facts ----------- 3.26s /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:112 fedora.linux_system_roles.selinux : Get SELinux modules facts ----------- 3.26s /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:112 fedora.linux_system_roles.selinux : Install SELinux python3 tools ------- 3.04s /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35 fedora.linux_system_roles.selinux : Install SELinux tool semanage ------- 2.96s /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 fedora.linux_system_roles.firewall : Install firewalld ------------------ 2.92s /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 fedora.linux_system_roles.selinux : Install SELinux tool semanage ------- 2.92s /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 fedora.linux_system_roles.selinux : Install SELinux python3 tools ------- 2.91s /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35 fedora.linux_system_roles.selinux : Install SELinux tool semanage ------- 2.90s /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 fedora.linux_system_roles.selinux : Install SELinux python3 tools ------- 2.88s /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35 fedora.linux_system_roles.selinux : Install SELinux tool semanage ------- 2.88s /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 fedora.linux_system_roles.selinux : Install SELinux python3 tools ------- 2.88s /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35 fedora.linux_system_roles.selinux : Install SELinux python3 tools ------- 2.87s /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35 fedora.linux_system_roles.selinux : Install SELinux tool semanage ------- 2.86s /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 fedora.linux_system_roles.firewall : Install firewalld ------------------ 2.86s /tmp/collections-qja/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31