Fix ci (#269)
This commit is contained in:
committed by
GitHub
parent
a0fd566f2a
commit
cc1fa209e2
9
.github/workflows/pre-commit.yml
vendored
9
.github/workflows/pre-commit.yml
vendored
@@ -14,13 +14,4 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- run: ./.github/workflows/run-pc.sh
|
||||
shell: bash
|
||||
pre-commit-24:
|
||||
container:
|
||||
image: quay.io/ansible-product-demos/apd-ee-24
|
||||
options: --user root
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: USE_PYTHON=python3.9 ./.github/workflows/run-pc.sh
|
||||
shell: bash
|
||||
|
||||
|
||||
3
.github/workflows/run-pc.sh
vendored
3
.github/workflows/run-pc.sh
vendored
@@ -1,6 +1,7 @@
|
||||
#!/bin/bash -x
|
||||
|
||||
dnf install git-lfs -y
|
||||
# should no longer need this
|
||||
#dnf install git-lfs -y
|
||||
|
||||
PYTHON_VARIANT="${USE_PYTHON:-python3.11}"
|
||||
PATH="$PATH:$HOME/.local/bin"
|
||||
|
||||
@@ -16,7 +16,7 @@ repos:
|
||||
|
||||
- repo: https://github.com/ansible/ansible-lint.git
|
||||
# get latest release tag from https://github.com/ansible/ansible-lint/releases/
|
||||
rev: v6.20.3
|
||||
rev: v25.7.0
|
||||
hooks:
|
||||
- id: ansible-lint
|
||||
additional_dependencies:
|
||||
|
||||
@@ -17,12 +17,12 @@
|
||||
filters:
|
||||
name: "{{ aws_image_filter }}"
|
||||
architecture: "{{ aws_image_architecture | default(omit) }}"
|
||||
register: amis
|
||||
register: aws_amis
|
||||
|
||||
- name: AWS| CREATE VM | save ami
|
||||
ansible.builtin.set_fact:
|
||||
aws_instance_ami: >
|
||||
{{ (amis.images | selectattr('name', 'defined') | sort(attribute='creation_date'))[-2] }}
|
||||
{{ (aws_amis.images | selectattr('name', 'defined') | sort(attribute='creation_date'))[-2] }}
|
||||
|
||||
- name: AWS| CREATE VM | create instance
|
||||
amazon.aws.ec2_instance:
|
||||
|
||||
@@ -10,14 +10,14 @@
|
||||
wait: true
|
||||
|
||||
- name: AWS | RESTORE VM | get volumes
|
||||
register: r_vol_info
|
||||
register: aws_r_vol_info
|
||||
amazon.aws.ec2_vol_info:
|
||||
region: "{{ aws_region }}"
|
||||
filters:
|
||||
attachment.instance-id: "{{ instance_id }}"
|
||||
|
||||
- name: AWS | RESTORE VM | detach volumes
|
||||
loop: "{{ r_vol_info.volumes }}"
|
||||
loop: "{{ aws_r_vol_info.volumes }}"
|
||||
loop_control:
|
||||
loop_var: volume
|
||||
label: "{{ volume.id }}"
|
||||
@@ -40,7 +40,7 @@
|
||||
|
||||
- name: AWS | RESTORE VM | get all snapshots
|
||||
when: inventory_hostname not in aws_snapshots
|
||||
register: r_snapshots
|
||||
register: aws_r_snapshots
|
||||
amazon.aws.ec2_snapshot_info:
|
||||
region: "{{ aws_region }}"
|
||||
filters:
|
||||
@@ -51,7 +51,7 @@
|
||||
amazon.aws.ec2_vol:
|
||||
region: "{{ aws_region }}"
|
||||
instance: "{{ instance_id }}"
|
||||
snapshot: "{{ r_snapshots.snapshots[0].snapshot_id }}"
|
||||
snapshot: "{{ aws_r_snapshots.snapshots[0].snapshot_id }}"
|
||||
device_name: "/dev/sda1"
|
||||
|
||||
- name: AWS | RESTORE VM | start vm
|
||||
|
||||
@@ -12,18 +12,18 @@
|
||||
file: snapshot_vm.yml
|
||||
|
||||
- name: AWS | SNAPSHOT VM | get volumes
|
||||
register: r_vol_info
|
||||
register: aws_r_vol_info
|
||||
amazon.aws.ec2_vol_info:
|
||||
region: "{{ aws_region }}"
|
||||
filters:
|
||||
attachment.instance-id: "{{ instance_id }}"
|
||||
|
||||
- name: AWS | SNAPSHOT VM | take snapshots
|
||||
loop: "{{ r_vol_info.volumes }}"
|
||||
loop: "{{ aws_r_vol_info.volumes }}"
|
||||
loop_control:
|
||||
loop_var: volume
|
||||
label: "{{ volume.id }}"
|
||||
register: r_snapshots
|
||||
register: aws_r_snapshots
|
||||
amazon.aws.ec2_snapshot:
|
||||
region: "{{ aws_region }}"
|
||||
volume_id: "{{ volume.id }}"
|
||||
@@ -32,11 +32,11 @@
|
||||
|
||||
- name: AWS | SNAPSHOT VM | format snapshot stat
|
||||
ansible.builtin.set_fact:
|
||||
snapshot_stat:
|
||||
aws_snapshot_stat:
|
||||
- key: "{{ inventory_hostname }}"
|
||||
value: "{{ r_snapshots.results | json_query(aws_ec2_snapshot_query) }}"
|
||||
value: "{{ aws_r_snapshots.results | json_query(aws_ec2_snapshot_query) }}"
|
||||
|
||||
- name: AWS | SNAPSHOT VM | record snapshot with host key
|
||||
ansible.builtin.set_stats:
|
||||
data:
|
||||
aws_snapshots: "{{ snapshot_stat | items2dict }}"
|
||||
aws_snapshots: "{{ aws_snapshot_stat | items2dict }}"
|
||||
|
||||
@@ -17,14 +17,14 @@
|
||||
kind: Route
|
||||
name: "{{ eda_controller_project_app_name }}"
|
||||
namespace: "{{ eda_controller_project }}"
|
||||
register: r_eda_route
|
||||
until: r_eda_route.resources[0].spec.host is defined
|
||||
register: eda_controller_r_eda_route
|
||||
until: eda_controller_r_eda_route.resources[0].spec.host is defined
|
||||
retries: 30
|
||||
delay: 45
|
||||
|
||||
- name: Get eda-controller route hostname
|
||||
ansible.builtin.set_fact:
|
||||
eda_controller_hostname: "{{ r_eda_route.resources[0].spec.host }}"
|
||||
eda_controller_hostname: "{{ eda_controller_r_eda_route.resources[0].spec.host }}"
|
||||
|
||||
- name: Wait for eda_controller to be running
|
||||
ansible.builtin.uri:
|
||||
@@ -36,8 +36,8 @@
|
||||
validate_certs: false
|
||||
body_format: json
|
||||
status_code: 200
|
||||
register: r_result
|
||||
until: not r_result.failed
|
||||
register: eda_controller_r_result
|
||||
until: not eda_controller_r_result.failed
|
||||
retries: 60
|
||||
delay: 45
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
redhat.openshift_virtualization.kubevirt_vm_info:
|
||||
name: "{{ item }}"
|
||||
namespace: "{{ vm_namespace }}"
|
||||
register: state
|
||||
register: snapshot_state
|
||||
|
||||
- name: Stop VirtualMachine
|
||||
redhat.openshift_virtualization.kubevirt_vm:
|
||||
@@ -11,7 +11,7 @@
|
||||
namespace: "{{ vm_namespace }}"
|
||||
running: false
|
||||
wait: true
|
||||
when: state.resources.0.spec.running
|
||||
when: snapshot_state.resources.0.spec.running
|
||||
|
||||
- name: Create a VirtualMachineSnapshot
|
||||
kubernetes.core.k8s:
|
||||
@@ -29,7 +29,7 @@
|
||||
wait: true
|
||||
wait_condition:
|
||||
type: Ready
|
||||
register: snapshot
|
||||
register: snapshot_snapshot
|
||||
|
||||
- name: Start VirtualMachine
|
||||
redhat.openshift_virtualization.kubevirt_vm:
|
||||
@@ -37,13 +37,13 @@
|
||||
namespace: "{{ vm_namespace }}"
|
||||
running: true
|
||||
wait: true
|
||||
when: state.resources.0.spec.running
|
||||
when: snapshot_state.resources.0.spec.running
|
||||
|
||||
- name: Export snapshot name
|
||||
ansible.builtin.set_stats:
|
||||
data:
|
||||
restore_snapshot_name: "{{ snapshot.result.metadata.name }}"
|
||||
restore_snapshot_name: "{{ snapshot_snapshot.result.metadata.name }}"
|
||||
|
||||
- name: Output snapshot name
|
||||
ansible.builtin.debug:
|
||||
msg: "Successfully created snapshot {{ snapshot.result.metadata.name }}"
|
||||
msg: "Successfully created snapshot {{ snapshot_snapshot.result.metadata.name }}"
|
||||
|
||||
@@ -3,18 +3,18 @@
|
||||
redhat.openshift_virtualization.kubevirt_vm_info:
|
||||
name: "{{ item }}"
|
||||
namespace: "{{ vm_namespace }}"
|
||||
register: state
|
||||
register: snapshot_state
|
||||
|
||||
- name: List snapshots
|
||||
kubernetes.core.k8s_info:
|
||||
api_version: snapshot.kubevirt.io/v1alpha1
|
||||
kind: VirtualMachineSnapshot
|
||||
namespace: "{{ vm_namespace }}"
|
||||
register: snapshot
|
||||
register: snapshot_snapshot
|
||||
|
||||
- name: Set snapshot name for {{ item }}
|
||||
ansible.builtin.set_fact:
|
||||
latest_snapshot: "{{ snapshot.resources | selectattr('spec.source.name', 'equalto', item) | sort(attribute='metadata.creationTimestamp') | first }}"
|
||||
snapshot_latest_snapshot: "{{ snapshot_snapshot.resources | selectattr('spec.source.name', 'equalto', item) | sort(attribute='metadata.creationTimestamp') | first }}"
|
||||
|
||||
- name: Stop VirtualMachine
|
||||
redhat.openshift_virtualization.kubevirt_vm:
|
||||
@@ -22,7 +22,7 @@
|
||||
namespace: "{{ vm_namespace }}"
|
||||
running: false
|
||||
wait: true
|
||||
when: state.resources.0.spec.running
|
||||
when: snapshot_state.resources.0.spec.running
|
||||
|
||||
- name: Restore a VirtualMachineSnapshot
|
||||
kubernetes.core.k8s:
|
||||
@@ -30,14 +30,14 @@
|
||||
apiVersion: snapshot.kubevirt.io/v1alpha1
|
||||
kind: VirtualMachineRestore
|
||||
metadata:
|
||||
generateName: "{{ latest_snapshot.metadata.generateName }}"
|
||||
generateName: "{{ snapshot_latest_snapshot.metadata.generateName }}"
|
||||
namespace: "{{ vm_namespace }}"
|
||||
spec:
|
||||
target:
|
||||
apiGroup: kubevirt.io
|
||||
kind: VirtualMachine
|
||||
name: "{{ item }}"
|
||||
virtualMachineSnapshotName: "{{ latest_snapshot.metadata.name }}"
|
||||
virtualMachineSnapshotName: "{{ snapshot_latest_snapshot.metadata.name }}"
|
||||
wait: true
|
||||
wait_condition:
|
||||
type: Ready
|
||||
@@ -48,4 +48,4 @@
|
||||
namespace: "{{ vm_namespace }}"
|
||||
running: true
|
||||
wait: true
|
||||
when: state.resources.0.spec.running
|
||||
when: snapshot_state.resources.0.spec.running
|
||||
|
||||
@@ -8,12 +8,12 @@
|
||||
check_mode: false
|
||||
|
||||
- name: Upgrade packages (yum)
|
||||
ansible.builtin.yum:
|
||||
ansible.legacy.dnf:
|
||||
name: '*'
|
||||
state: latest # noqa: package-latest - Intended to update packages to latest
|
||||
exclude: "{{ exclude_packages }}"
|
||||
when: ansible_pkg_mgr == "yum"
|
||||
register: patchingresult_yum
|
||||
register: patch_linux_patchingresult_yum
|
||||
|
||||
- name: Upgrade packages (dnf)
|
||||
ansible.builtin.dnf:
|
||||
@@ -21,17 +21,17 @@
|
||||
state: latest # noqa: package-latest - Intended to update packages to latest
|
||||
exclude: "{{ exclude_packages }}"
|
||||
when: ansible_pkg_mgr == "dnf"
|
||||
register: patchingresult_dnf
|
||||
register: patch_linux_patchingresult_dnf
|
||||
|
||||
- name: Check to see if we need a reboot
|
||||
ansible.builtin.command: needs-restarting -r
|
||||
register: result
|
||||
changed_when: result.rc == 1
|
||||
failed_when: result.rc > 1
|
||||
register: patch_linux_result
|
||||
changed_when: patch_linux_result.rc == 1
|
||||
failed_when: patch_linux_result.rc > 1
|
||||
check_mode: false
|
||||
|
||||
- name: Reboot Server if Necessary
|
||||
ansible.builtin.reboot:
|
||||
when:
|
||||
- result.rc == 1
|
||||
- patch_linux_result.rc == 1
|
||||
- allow_reboot
|
||||
|
||||
@@ -12,4 +12,4 @@
|
||||
category_names: "{{ win_update_categories | default(omit) }}"
|
||||
reboot: "{{ allow_reboot }}"
|
||||
state: installed
|
||||
register: patchingresult
|
||||
register: patch_windows_patchingresult
|
||||
|
||||
@@ -35,17 +35,17 @@
|
||||
<td>{{hostvars[linux_host]['ansible_distribution_version']|default("none")}}</td>
|
||||
<td>
|
||||
<ul>
|
||||
{% if hostvars[linux_host].patchingresult_yum.changed|default("false",true) == true %}
|
||||
{% for packagename in hostvars[linux_host].patchingresult_yum.changes.updated|sort %}
|
||||
{% if hostvars[linux_host].patch_linux_patchingresult_yum.changed|default("false",true) == true %}
|
||||
{% for packagename in hostvars[linux_host].patch_linux_patchingresult_yum.changes.updated|sort %}
|
||||
<li> {{ packagename[0] }} - {{ packagename[1] }} </li>
|
||||
{% endfor %}
|
||||
{% elif hostvars[linux_host].patchingresult_dnf.changed|default("false",true) == true %}
|
||||
{% for packagename in hostvars[linux_host].patchingresult_dnf.results|sort %}
|
||||
{% elif hostvars[linux_host].patch_linux_patchingresult_dnf.changed|default("false",true) == true %}
|
||||
{% for packagename in hostvars[linux_host].patch_linux_patchingresult_dnf.results|sort %}
|
||||
<li> {{ packagename }} </li>
|
||||
{% endfor %}
|
||||
{% elif hostvars[linux_host].patchingresult_dnf.changed is undefined %}
|
||||
{% elif hostvars[linux_host].patch_linux_patchingresult_dnf.changed is undefined %}
|
||||
<li> Patching Failed </li>
|
||||
{% elif hostvars[linux_host].patchingresult_yum.changed is undefined %}
|
||||
{% elif hostvars[linux_host].patch_linux_patchingresult_yum.changed is undefined %}
|
||||
<li> Patching Failed </li>
|
||||
{% else %}
|
||||
<li> Compliant </li>
|
||||
|
||||
@@ -13,10 +13,10 @@
|
||||
state: present
|
||||
namespace: patching-report
|
||||
definition: "{{ lookup('ansible.builtin.template', 'resources.yaml.j2') }}"
|
||||
register: resources_output
|
||||
register: report_ocp_patching_resources_output
|
||||
|
||||
- name: Display link to patching report
|
||||
ansible.builtin.debug:
|
||||
msg:
|
||||
- "Patching report availbable at:"
|
||||
- "{{ resources_output.result.results[3].result.spec.port.targetPort }}://{{ resources_output.result.results[3].result.spec.host }}"
|
||||
- "{{ report_ocp_patching_resources_output.result.results[3].result.spec.port.targetPort }}://{{ report_ocp_patching_resources_output.result.results[3].result.spec.host }}"
|
||||
|
||||
@@ -35,17 +35,17 @@
|
||||
<td>{{hostvars[linux_host]['ansible_distribution_version']|default("none")}}</td>
|
||||
<td>
|
||||
<ul>
|
||||
{% if hostvars[linux_host].patchingresult_yum.changed|default("false",true) == true %}
|
||||
{% for packagename in hostvars[linux_host].patchingresult_yum.changes.updated|sort %}
|
||||
{% if hostvars[linux_host].patch_linux_patchingresult_yum.changed|default("false",true) == true %}
|
||||
{% for packagename in hostvars[linux_host].patch_linux_patchingresult_yum.changes.updated|sort %}
|
||||
<li> {{ packagename[0] }} - {{ packagename[1] }} </li>
|
||||
{% endfor %}
|
||||
{% elif hostvars[linux_host].patchingresult_dnf.changed|default("false",true) == true %}
|
||||
{% for packagename in hostvars[linux_host].patchingresult_dnf.results|sort %}
|
||||
{% elif hostvars[linux_host].patch_linux_patchingresult_dnf.changed|default("false",true) == true %}
|
||||
{% for packagename in hostvars[linux_host].patch_linux_patchingresult_dnf.results|sort %}
|
||||
<li> {{ packagename }} </li>
|
||||
{% endfor %}
|
||||
{% elif hostvars[linux_host].patchingresult_dnf.changed is undefined %}
|
||||
{% elif hostvars[linux_host].patch_linux_patchingresult_dnf.changed is undefined %}
|
||||
<li> Patching Failed </li>
|
||||
{% elif hostvars[linux_host].patchingresult_yum.changed is undefined %}
|
||||
{% elif hostvars[linux_host].patch_linux_patchingresult_yum.changed is undefined %}
|
||||
<li> Patching Failed </li>
|
||||
{% else %}
|
||||
<li> Compliant </li>
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
ansible.builtin.include_vars: "{{ ansible_system }}.yml"
|
||||
|
||||
- name: Install httpd package
|
||||
ansible.builtin.yum:
|
||||
ansible.builtin.dnf:
|
||||
name: httpd
|
||||
state: installed
|
||||
check_mode: false
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
ansible.builtin.find:
|
||||
paths: "{{ doc_root }}/{{ reports_dir }}"
|
||||
patterns: '*.html'
|
||||
register: reports
|
||||
register: report_server_reports
|
||||
check_mode: false
|
||||
|
||||
- name: Publish landing page
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
ansible.windows.win_find:
|
||||
paths: "{{ doc_root }}/{{ reports_dir }}"
|
||||
patterns: '*.html'
|
||||
register: reports
|
||||
register: report_server_reports
|
||||
check_mode: false
|
||||
|
||||
- name: Publish landing page
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
</center>
|
||||
<table class="table table-striped mt32 main_net_table">
|
||||
<tbody>
|
||||
{% for report in reports.files %}
|
||||
{% for report in report_server_reports.files %}
|
||||
{% set page = report.path.split('/')[-1] %}
|
||||
<tr>
|
||||
<td class="summary_info">
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
</center>
|
||||
<table class="table table-striped mt32 main_net_table">
|
||||
<tbody>
|
||||
{% for report in reports.files %}
|
||||
{% for report in report_server_reports.files %}
|
||||
{% set page = report.path.split('\\')[-1] %}
|
||||
<tr>
|
||||
<td class="summary_info">
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
name: "{{ instance_name }}"
|
||||
|
||||
- name: Remove rhui client packages
|
||||
ansible.builtin.yum:
|
||||
ansible.builtin.dnf:
|
||||
name:
|
||||
- google-rhui-client*
|
||||
- rh-amazon-rhui-client*
|
||||
@@ -19,17 +19,17 @@
|
||||
- name: Get current repos
|
||||
ansible.builtin.command:
|
||||
cmd: ls /etc/yum.repos.d/
|
||||
register: repos
|
||||
register: register_host_repos
|
||||
changed_when: false
|
||||
|
||||
- name: Remove existing rhui repos
|
||||
ansible.builtin.file:
|
||||
path: "/etc/yum.repos.d/{{ item }}"
|
||||
state: absent
|
||||
loop: "{{ repos.stdout_lines }}"
|
||||
loop: "{{ register_host_repos.stdout_lines }}"
|
||||
|
||||
- name: Install satellite certificate
|
||||
ansible.builtin.yum:
|
||||
ansible.builtin.dnf:
|
||||
name: "{{ satellite_url }}/pub/katello-ca-consumer-latest.noarch.rpm"
|
||||
state: present
|
||||
validate_certs: false
|
||||
@@ -53,7 +53,7 @@
|
||||
state: enabled
|
||||
|
||||
- name: Install satellite client
|
||||
ansible.builtin.yum:
|
||||
ansible.builtin.dnf:
|
||||
name:
|
||||
- katello-host-tools
|
||||
- katello-host-tools-tracer
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
- name: Install openscap client packages
|
||||
ansible.builtin.yum:
|
||||
ansible.builtin.dnf:
|
||||
name:
|
||||
- openscap-scanner
|
||||
- rubygem-foreman_scap_client
|
||||
@@ -15,18 +15,18 @@
|
||||
force_basic_auth: true
|
||||
body_format: json
|
||||
validate_certs: false
|
||||
register: policies
|
||||
register: scap_client_policies
|
||||
no_log: "{{ foreman_operations_scap_client_secure_logging }}"
|
||||
|
||||
- name: Build policy {{ policy_name }}
|
||||
ansible.builtin.set_fact:
|
||||
policy: "{{ policy | default([]) }} + {{ [item] }}"
|
||||
loop: "{{ policies.json.results }}"
|
||||
scap_client_policy: "{{ scap_client_policy | default([]) }} + {{ [item] }}"
|
||||
loop: "{{ scap_client_policies.json.results }}"
|
||||
when: item.name in policy_name or policy_name == 'all'
|
||||
|
||||
- name: Fail if no policy found with required name
|
||||
ansible.builtin.fail:
|
||||
when: policy is not defined
|
||||
when: scap_client_policy is not defined
|
||||
|
||||
- name: Get scap content information
|
||||
ansible.builtin.uri:
|
||||
@@ -37,8 +37,8 @@
|
||||
force_basic_auth: false
|
||||
body_format: json
|
||||
validate_certs: false
|
||||
register: scapcontents
|
||||
loop: "{{ policy }}"
|
||||
register: scap_client_scapcontents
|
||||
loop: "{{ scap_client_policy }}"
|
||||
no_log: "{{ foreman_operations_scap_client_secure_logging }}"
|
||||
|
||||
- name: Get tailoring content information
|
||||
@@ -50,21 +50,21 @@
|
||||
force_basic_auth: false
|
||||
body_format: json
|
||||
validate_certs: false
|
||||
register: tailoringfiles
|
||||
register: scap_client_tailoringfiles
|
||||
when: item.tailoring_file_id | int > 0 | d(False)
|
||||
loop: "{{ policy }}"
|
||||
loop: "{{ scap_client_policy }}"
|
||||
no_log: "{{ foreman_operations_scap_client_secure_logging }}"
|
||||
|
||||
- name: Build scap content parameters
|
||||
ansible.builtin.set_fact:
|
||||
scap_content: "{{ scap_content | default({}) | combine({item.json.id: item.json}) }}"
|
||||
loop: "{{ scapcontents.results }}"
|
||||
scap_client_scap_content: "{{ scap_client_scap_content | default({}) | combine({item.json.id: item.json}) }}"
|
||||
loop: "{{ scap_client_scapcontents.results }}"
|
||||
|
||||
- name: Build tailoring content parameters
|
||||
ansible.builtin.set_fact:
|
||||
tailoring_files: "{{ tailoring_files | default({}) | combine({item.json.id: item.json}) }}"
|
||||
scap_client_tailoring_files: "{{ scap_client_tailoring_files | default({}) | combine({item.json.id: item.json}) }}"
|
||||
when: item.json is defined
|
||||
loop: "{{ tailoringfiles.results }}"
|
||||
loop: "{{ scap_client_tailoringfiles.results }}"
|
||||
|
||||
- name: Apply openscap client configuration template
|
||||
ansible.builtin.template:
|
||||
@@ -78,7 +78,7 @@
|
||||
# cron:
|
||||
# name: "Openscap Execution"
|
||||
# cron_file: 'foreman_openscap_client'
|
||||
# job: '/usr/bin/foreman_scap_client {{policy.id}} > /dev/null'
|
||||
# job: '/usr/bin/foreman_scap_client {{scap_client_policy.id}} > /dev/null'
|
||||
# weekday: "{{crontab_weekdays}}"
|
||||
# hour: "{{crontab_hour}}"
|
||||
# minute: "{{crontab_minute}}"
|
||||
|
||||
@@ -20,12 +20,12 @@
|
||||
|
||||
# Install subscription-manager if it's not there
|
||||
- name: Install subscription-manager
|
||||
ansible.builtin.yum:
|
||||
ansible.builtin.dnf:
|
||||
name: subscription-manager
|
||||
state: present
|
||||
|
||||
- name: Remove rhui client packages
|
||||
ansible.builtin.yum:
|
||||
ansible.builtin.dnf:
|
||||
name: rh-amazon-rhui-client*
|
||||
state: removed
|
||||
|
||||
@@ -43,7 +43,7 @@
|
||||
when: "'rhui' in item"
|
||||
|
||||
- name: Install katello package
|
||||
ansible.builtin.yum:
|
||||
ansible.builtin.dnf:
|
||||
name: "https://{{ sat_url }}/pub/katello-ca-consumer-latest.noarch.rpm"
|
||||
state: present
|
||||
validate_certs: false
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
tasks:
|
||||
# Install yum-utils if it's not there
|
||||
- name: Install yum-utils
|
||||
ansible.builtin.yum:
|
||||
ansible.builtin.dnf:
|
||||
name: yum-utils
|
||||
state: installed
|
||||
check_mode: false
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
key: "{{ sudo_user }}"
|
||||
|
||||
- name: Check Cleanup package
|
||||
ansible.builtin.yum:
|
||||
ansible.builtin.dnf:
|
||||
name: at
|
||||
state: present
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
tasks:
|
||||
# Install yum-utils if it's not there
|
||||
- name: Install yum-utils
|
||||
ansible.builtin.yum:
|
||||
ansible.builtin.dnf:
|
||||
name: yum-utils
|
||||
state: installed
|
||||
|
||||
|
||||
Reference in New Issue
Block a user