11 Commits

Author SHA1 Message Date
Matthew Fernandez
cc1fa209e2 Fix ci (#269) 2025-08-11 15:02:13 -06:00
Zach LeBlanc
a0fd566f2a Add Documentation for Connecting to Windows Hosts (#258) 2025-07-14 15:14:06 -04:00
Chris Edillon
a7b79faf34 Refer to bootstrap repo for initial APD setup (#257) 2025-07-09 13:07:17 -06:00
Chris Edillon
af7d93fcdb Improve compliance report firewalld conditional (#253)
Co-authored-by: Matthew Fernandez <l3acon@users.noreply.github.com>
2025-06-25 14:00:29 -06:00
Matthew Fernandez
0634643f21 Fix AWS groups (#255) 2025-06-25 13:06:49 -04:00
Todd Ruch
db97b38fbc Resolve parameter failure in Windows "Create some users" task (#250) 2025-06-20 14:38:08 -04:00
Chris Edillon
7468d14a98 support building multi-arch EE image (#249)
Co-authored-by: Matthew Fernandez <l3acon@users.noreply.github.com>
2025-06-18 16:49:04 -04:00
Matthew Fernandez
8a70edbfdc Attempt galaxy workaround (#252)
this will eventually be re-worked to put roles in our EE
2025-06-17 10:00:20 -06:00
Matthew Fernandez
9a93004e0a Fix mistake where the main README.md is overridden. (#243) 2025-05-13 12:08:50 -06:00
Matthew Fernandez
64f7c88114 Refactor pre commit (#237)
Wheee!
2025-05-06 14:24:25 -06:00
Chris Edillon
4285a68f3e Update DISA supplemental roles for RHEL STIG (#238) 2025-05-05 11:11:14 -06:00
47 changed files with 344 additions and 318 deletions

View File

@@ -1,10 +1,16 @@
--- ---
profile: production profile: production
offline: false offline: true
skip_list: skip_list:
- "galaxy[no-changelog]" - "galaxy[no-changelog]"
warn_list:
# seems to be a bug, see https://github.com/ansible/ansible-lint/issues/4172
- "fqcn[canonical]"
# @matferna: really not sure why lint thinks it can't find jmespath, it is installed and functional
- "jinja[invalid]"
exclude_paths: exclude_paths:
# would be better to move the roles here to the top-level roles directory # would be better to move the roles here to the top-level roles directory
- collections/ansible_collections/demo/compliance/roles/ - collections/ansible_collections/demo/compliance/roles/

BIN
.github/images/windows_vm_password.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 45 KiB

25
.github/workflows/README.md vendored Normal file
View File

@@ -0,0 +1,25 @@
# GitHub Actions
## Background
We want to make attempts to run our integration tests in the same manner wether using GitHub actions or on a developers's machine locally. For this reason, the tests are curated to run using container images. As of this writing, two images exist which we would like to test against:
- quay.io/ansible-product-demos/apd-ee-24:latest
- quay.io/ansible-product-demos/apd-ee-25:latest
These images are built given the structure defined in their respective EE [definitions][../execution_environments]. Because they differ (mainly due to their python versions), each gets some special handling.
## Troubleshooting GitHub Actions
### Interactive
It is likely the most straight-forward approach to interactively debug issues. The following podman command can be run from the project root directory to replicate the GitHub action:
```
podman run \
--user root \
-v $(pwd):/runner:Z \
-it \
<image> \
/bin/bash
```
`<image>` is one of `quay.io/ansible-product-demos/apd-ee-25:latest`, `quay.io/ansible-product-demos/apd-ee-24:latest`
It is not exact because GitHub seems to run closer to a sidecar container paradigm, and uses docker instead of podman, but hopefully it's close enough.
For the 24 EE, the python interpreriter verions is set for our pre-commit script like so: `USE_PYTHON=python3.9 ./.github/workflows/run-pc.sh`
The 25 EE is similary run but without the need for this variable: `./.github/workflows/run-pc.sh`

View File

@@ -4,17 +4,14 @@ on:
- push - push
- pull_request_target - pull_request_target
env:
ANSIBLE_GALAXY_SERVER_CERTIFIED_TOKEN: ${{ secrets.ANSIBLE_GALAXY_SERVER_CERTIFIED_TOKEN }}
ANSIBLE_GALAXY_SERVER_VALIDATED_TOKEN: ${{ secrets.ANSIBLE_GALAXY_SERVER_VALIDATED_TOKEN }}
jobs: jobs:
pre-commit: pre-commit-25:
name: pre-commit container:
image: quay.io/ansible-product-demos/apd-ee-25
options: --user root
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: actions/setup-python@v5 - run: ./.github/workflows/run-pc.sh
- uses: pre-commit/action@v3.0.1 shell: bash
...

25
.github/workflows/run-pc.sh vendored Executable file
View File

@@ -0,0 +1,25 @@
#!/bin/bash -x
# should no longer need this
#dnf install git-lfs -y
PYTHON_VARIANT="${USE_PYTHON:-python3.11}"
PATH="$PATH:$HOME/.local/bin"
# intsall pip
eval "${PYTHON_VARIANT} -m pip install --user --upgrade pip"
# try to fix 2.4 incompatibility
eval "${PYTHON_VARIANT} -m pip install --user --upgrade setuptools wheel twine check-wheel-contents"
# intsall pre-commit
eval "${PYTHON_VARIANT} -m pip install --user pre-commit"
# view pip packages
eval "${PYTHON_VARIANT} -m pip freeze --local"
# fix permissions on directory
git config --global --add safe.directory $(pwd)
# run pre-commit
pre-commit run --config $(pwd)/.pre-commit-gh.yml --show-diff-on-failure --color=always

1
.gitignore vendored
View File

@@ -13,3 +13,4 @@ roles/*
.cache/ .cache/
.ansible/ .ansible/
**/tmp/ **/tmp/
execution_environments/context/

View File

@@ -14,13 +14,12 @@ repos:
- id: check-json - id: check-json
- id: check-symlinks - id: check-symlinks
- repo: https://github.com/ansible/ansible-lint.git - repo: local
# get latest release tag from https://github.com/ansible/ansible-lint/releases/
rev: v6.20.3
hooks: hooks:
- id: ansible-lint - id: ansible-lint
additional_dependencies: name: ansible-navigator lint --eei quay.io/ansible-product-demos/apd-ee-25:latest --mode stdout
- jmespath language: python
entry: bash -c "ansible-navigator lint --eei quay.io/ansible-product-demos/apd-ee-25 -v --force-color --mode stdout"
- repo: https://github.com/psf/black-pre-commit-mirror - repo: https://github.com/psf/black-pre-commit-mirror
rev: 23.11.0 rev: 23.11.0

30
.pre-commit-gh.yml Normal file
View File

@@ -0,0 +1,30 @@
---
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
hooks:
- id: trailing-whitespace
exclude: rhel[89]STIG/.*$
- id: check-yaml
exclude: \.j2.(yaml|yml)$|\.(yaml|yml).j2$
args: [--unsafe] # see https://github.com/pre-commit/pre-commit-hooks/issues/273
- id: check-toml
- id: check-json
- id: check-symlinks
- repo: https://github.com/ansible/ansible-lint.git
# get latest release tag from https://github.com/ansible/ansible-lint/releases/
rev: v25.7.0
hooks:
- id: ansible-lint
additional_dependencies:
- jmespath
- repo: https://github.com/psf/black-pre-commit-mirror
rev: 23.11.0
hooks:
- id: black
exclude: rhel[89]STIG/.*$
...

View File

@@ -1,10 +1,9 @@
[![Lab](https://img.shields.io/badge/Try%20Me-EE0000?style=for-the-badge&logo=redhat&logoColor=white)](https://red.ht/aap-product-demos)
[![Dev Spaces](https://img.shields.io/badge/Customize%20Here-0078d7.svg?style=for-the-badge&logo=visual-studio-code&logoColor=white)](https://workspaces.openshift.com/f?url=https://github.com/ansible/product-demos)
[![pre-commit](https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white)](https://github.com/pre-commit/pre-commit) [![pre-commit](https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white)](https://github.com/pre-commit/pre-commit)
[![Dev Spaces](https://img.shields.io/badge/Customize%20Here-0078d7.svg?style=for-the-badge&logo=visual-studio-code&logoColor=white)](https://workspaces.openshift.com/f?url=https://github.com/ansible/product-demos)
# Official Ansible Product Demos # APD - Ansible Product Demos
This is a centralized location for Ansible Product Demos. This project is a collection of use cases implemented with Ansible for use with the [Ansible Automation Platform](https://www.redhat.com/en/technologies/management/ansible). The Ansible Product Demos (APD) project is a set of Ansible demos that are deployed using [Red Hat Ansible Automation Platform](https://www.redhat.com/en/technologies/management/ansible). It uses configuraton-as-code to create AAP resources such as projects, templates, and credentials that form the basis for demonstrating automation use cases in several technology domains:
| Demo Name | Description | | Demo Name | Description |
|-----------|-------------| |-----------|-------------|
@@ -15,54 +14,21 @@ This is a centralized location for Ansible Product Demos. This project is a coll
| [OpenShift](openshift/README.md) | OpenShift automation demos | | [OpenShift](openshift/README.md) | OpenShift automation demos |
| [Satellite](satellite/README.md) | Demos of automation with Red Hat Satellite Server | | [Satellite](satellite/README.md) | Demos of automation with Red Hat Satellite Server |
## Contributions
If you would like to contribute to this project please refer to [contribution guide](CONTRIBUTING.md) for best practices.
## Using this project ## Using this project
This project is tested for compatibility with the [demo.redhat.com Ansible Product Demos](https://demo.redhat.com/catalog?search=product+demos&item=babylon-catalog-prod%2Fopenshift-cnv.aap-product-demos-cnv.prod) lab environment. To use with other Ansible Automation Platform installations, review the [prerequisite documentation](https://github.com/ansible/product-demos-bootstrap). Use the [APD bootstrap](https://github.com/ansible/product-demos-bootstrap) repo to add APD to an existing Ansible Automation Platform deployment. The bootstrap repo provides the initial manual prerequisite steps as well as a playbook for adding APD to the existing deployment.
> NOTE: demo.redhat.com is available to Red Hat Associates and Partners with a valid account. For Red Hat associates and partners, there is an Ansible Product Demos catalog item [available on demo.redhat.com](https://red.ht/apd-sandbox) (account required).
1. First you must create a credential for [Automation Hub](https://console.redhat.com/ansible/automation-hub/) to successfully sync collections used by this project.
1. In the Credentials section of the Controller UI, add a new Credential called `Automation Hub` with the type `Ansible Galaxy/Automation Hub API Token`
2. You can obtain a token [here](https://console.redhat.com/ansible/automation-hub/token). This page will also provide the Server URL and Auth Server URL.
3. Next, click on Organizations and edit the `Default` organization. Add your `Automation Hub` credential to the `Galaxy Credentials` section. Don't forget to click **Save**!!
> You can also use an execution environment for disconnected environments. To do this, you must disable collection downloads in the Controller. This can be done in `Settings` > `Job Settings`. This setting prevents the controller from downloading collections listed in the [collections/requirements.yml](collections/requirements.yml) file.
2. If it is not already created for you, add an Execution Environment called `product-demos`
- Name: product-demos
- Image: quay.io/acme_corp/product-demos-ee:latest
- Pull: Only pull the image if not present before running
3. If it is not already created for you, create a Project called `Ansible Product Demos` with this repo as a source. NOTE: if you are using a fork, be sure that you have the correct URL. Update the project.
4. Finally, Create a Job Template called `Setup` with the following configuration:
- Name: Setup
- Inventory: Demo Inventory
- Exec Env: product-demos
- Playbook: setup_demo.yml
- Credentials:
- Type: Red Hat Ansible Automation Platform
- Name: Controller Credential
- Extra vars:
demo: <linux or windows or cloud or network>
## Bring Your Own Demo ## Bring Your Own Demo
Can't find what you're looking for? Customize this repo to make it your own. Can't find what you're looking for? Customize this repo to make it your own.
1. Create a fork of this repo. 1. Create a fork of this repo.
2. Update the URL of the `Ansible Project Demos` in the Controller. 2. Update the URL of the `Ansible Project Demos` project your Ansible Automation Platform controller.
3. Make changes as needed and run the **Product Demos | Single demo setup** job 3. Make changes to your fork as needed and run the **Product Demos | Single demo setup** job
See the [contribution guide](CONTRIBUTING.md) for more details on how to customize the project. See the [contributing guide](CONTRIBUTING.md) for more details on how to customize the project.
--- ---

View File

@@ -1,5 +1,5 @@
[defaults] [defaults]
collections_path=./collections collections_path=./collections:/usr/share/ansible/collections
roles_path=./roles roles_path=./roles
[galaxy] [galaxy]

View File

@@ -17,12 +17,12 @@
filters: filters:
name: "{{ aws_image_filter }}" name: "{{ aws_image_filter }}"
architecture: "{{ aws_image_architecture | default(omit) }}" architecture: "{{ aws_image_architecture | default(omit) }}"
register: amis register: aws_amis
- name: AWS| CREATE VM | save ami - name: AWS| CREATE VM | save ami
ansible.builtin.set_fact: ansible.builtin.set_fact:
aws_instance_ami: > aws_instance_ami: >
{{ (amis.images | selectattr('name', 'defined') | sort(attribute='creation_date'))[-2] }} {{ (aws_amis.images | selectattr('name', 'defined') | sort(attribute='creation_date'))[-2] }}
- name: AWS| CREATE VM | create instance - name: AWS| CREATE VM | create instance
amazon.aws.ec2_instance: amazon.aws.ec2_instance:

View File

@@ -10,14 +10,14 @@
wait: true wait: true
- name: AWS | RESTORE VM | get volumes - name: AWS | RESTORE VM | get volumes
register: r_vol_info register: aws_r_vol_info
amazon.aws.ec2_vol_info: amazon.aws.ec2_vol_info:
region: "{{ aws_region }}" region: "{{ aws_region }}"
filters: filters:
attachment.instance-id: "{{ instance_id }}" attachment.instance-id: "{{ instance_id }}"
- name: AWS | RESTORE VM | detach volumes - name: AWS | RESTORE VM | detach volumes
loop: "{{ r_vol_info.volumes }}" loop: "{{ aws_r_vol_info.volumes }}"
loop_control: loop_control:
loop_var: volume loop_var: volume
label: "{{ volume.id }}" label: "{{ volume.id }}"
@@ -40,7 +40,7 @@
- name: AWS | RESTORE VM | get all snapshots - name: AWS | RESTORE VM | get all snapshots
when: inventory_hostname not in aws_snapshots when: inventory_hostname not in aws_snapshots
register: r_snapshots register: aws_r_snapshots
amazon.aws.ec2_snapshot_info: amazon.aws.ec2_snapshot_info:
region: "{{ aws_region }}" region: "{{ aws_region }}"
filters: filters:
@@ -51,7 +51,7 @@
amazon.aws.ec2_vol: amazon.aws.ec2_vol:
region: "{{ aws_region }}" region: "{{ aws_region }}"
instance: "{{ instance_id }}" instance: "{{ instance_id }}"
snapshot: "{{ r_snapshots.snapshots[0].snapshot_id }}" snapshot: "{{ aws_r_snapshots.snapshots[0].snapshot_id }}"
device_name: "/dev/sda1" device_name: "/dev/sda1"
- name: AWS | RESTORE VM | start vm - name: AWS | RESTORE VM | start vm

View File

@@ -12,18 +12,18 @@
file: snapshot_vm.yml file: snapshot_vm.yml
- name: AWS | SNAPSHOT VM | get volumes - name: AWS | SNAPSHOT VM | get volumes
register: r_vol_info register: aws_r_vol_info
amazon.aws.ec2_vol_info: amazon.aws.ec2_vol_info:
region: "{{ aws_region }}" region: "{{ aws_region }}"
filters: filters:
attachment.instance-id: "{{ instance_id }}" attachment.instance-id: "{{ instance_id }}"
- name: AWS | SNAPSHOT VM | take snapshots - name: AWS | SNAPSHOT VM | take snapshots
loop: "{{ r_vol_info.volumes }}" loop: "{{ aws_r_vol_info.volumes }}"
loop_control: loop_control:
loop_var: volume loop_var: volume
label: "{{ volume.id }}" label: "{{ volume.id }}"
register: r_snapshots register: aws_r_snapshots
amazon.aws.ec2_snapshot: amazon.aws.ec2_snapshot:
region: "{{ aws_region }}" region: "{{ aws_region }}"
volume_id: "{{ volume.id }}" volume_id: "{{ volume.id }}"
@@ -32,11 +32,11 @@
- name: AWS | SNAPSHOT VM | format snapshot stat - name: AWS | SNAPSHOT VM | format snapshot stat
ansible.builtin.set_fact: ansible.builtin.set_fact:
snapshot_stat: aws_snapshot_stat:
- key: "{{ inventory_hostname }}" - key: "{{ inventory_hostname }}"
value: "{{ r_snapshots.results | json_query(aws_ec2_snapshot_query) }}" value: "{{ aws_r_snapshots.results | json_query(aws_ec2_snapshot_query) }}"
- name: AWS | SNAPSHOT VM | record snapshot with host key - name: AWS | SNAPSHOT VM | record snapshot with host key
ansible.builtin.set_stats: ansible.builtin.set_stats:
data: data:
aws_snapshots: "{{ snapshot_stat | items2dict }}" aws_snapshots: "{{ aws_snapshot_stat | items2dict }}"

View File

@@ -17,14 +17,14 @@
kind: Route kind: Route
name: "{{ eda_controller_project_app_name }}" name: "{{ eda_controller_project_app_name }}"
namespace: "{{ eda_controller_project }}" namespace: "{{ eda_controller_project }}"
register: r_eda_route register: eda_controller_r_eda_route
until: r_eda_route.resources[0].spec.host is defined until: eda_controller_r_eda_route.resources[0].spec.host is defined
retries: 30 retries: 30
delay: 45 delay: 45
- name: Get eda-controller route hostname - name: Get eda-controller route hostname
ansible.builtin.set_fact: ansible.builtin.set_fact:
eda_controller_hostname: "{{ r_eda_route.resources[0].spec.host }}" eda_controller_hostname: "{{ eda_controller_r_eda_route.resources[0].spec.host }}"
- name: Wait for eda_controller to be running - name: Wait for eda_controller to be running
ansible.builtin.uri: ansible.builtin.uri:
@@ -36,8 +36,8 @@
validate_certs: false validate_certs: false
body_format: json body_format: json
status_code: 200 status_code: 200
register: r_result register: eda_controller_r_result
until: not r_result.failed until: not eda_controller_r_result.failed
retries: 60 retries: 60
delay: 45 delay: 45

View File

@@ -3,7 +3,7 @@
redhat.openshift_virtualization.kubevirt_vm_info: redhat.openshift_virtualization.kubevirt_vm_info:
name: "{{ item }}" name: "{{ item }}"
namespace: "{{ vm_namespace }}" namespace: "{{ vm_namespace }}"
register: state register: snapshot_state
- name: Stop VirtualMachine - name: Stop VirtualMachine
redhat.openshift_virtualization.kubevirt_vm: redhat.openshift_virtualization.kubevirt_vm:
@@ -11,7 +11,7 @@
namespace: "{{ vm_namespace }}" namespace: "{{ vm_namespace }}"
running: false running: false
wait: true wait: true
when: state.resources.0.spec.running when: snapshot_state.resources.0.spec.running
- name: Create a VirtualMachineSnapshot - name: Create a VirtualMachineSnapshot
kubernetes.core.k8s: kubernetes.core.k8s:
@@ -29,7 +29,7 @@
wait: true wait: true
wait_condition: wait_condition:
type: Ready type: Ready
register: snapshot register: snapshot_snapshot
- name: Start VirtualMachine - name: Start VirtualMachine
redhat.openshift_virtualization.kubevirt_vm: redhat.openshift_virtualization.kubevirt_vm:
@@ -37,13 +37,13 @@
namespace: "{{ vm_namespace }}" namespace: "{{ vm_namespace }}"
running: true running: true
wait: true wait: true
when: state.resources.0.spec.running when: snapshot_state.resources.0.spec.running
- name: Export snapshot name - name: Export snapshot name
ansible.builtin.set_stats: ansible.builtin.set_stats:
data: data:
restore_snapshot_name: "{{ snapshot.result.metadata.name }}" restore_snapshot_name: "{{ snapshot_snapshot.result.metadata.name }}"
- name: Output snapshot name - name: Output snapshot name
ansible.builtin.debug: ansible.builtin.debug:
msg: "Successfully created snapshot {{ snapshot.result.metadata.name }}" msg: "Successfully created snapshot {{ snapshot_snapshot.result.metadata.name }}"

View File

@@ -3,18 +3,18 @@
redhat.openshift_virtualization.kubevirt_vm_info: redhat.openshift_virtualization.kubevirt_vm_info:
name: "{{ item }}" name: "{{ item }}"
namespace: "{{ vm_namespace }}" namespace: "{{ vm_namespace }}"
register: state register: snapshot_state
- name: List snapshots - name: List snapshots
kubernetes.core.k8s_info: kubernetes.core.k8s_info:
api_version: snapshot.kubevirt.io/v1alpha1 api_version: snapshot.kubevirt.io/v1alpha1
kind: VirtualMachineSnapshot kind: VirtualMachineSnapshot
namespace: "{{ vm_namespace }}" namespace: "{{ vm_namespace }}"
register: snapshot register: snapshot_snapshot
- name: Set snapshot name for {{ item }} - name: Set snapshot name for {{ item }}
ansible.builtin.set_fact: ansible.builtin.set_fact:
latest_snapshot: "{{ snapshot.resources | selectattr('spec.source.name', 'equalto', item) | sort(attribute='metadata.creationTimestamp') | first }}" snapshot_latest_snapshot: "{{ snapshot_snapshot.resources | selectattr('spec.source.name', 'equalto', item) | sort(attribute='metadata.creationTimestamp') | first }}"
- name: Stop VirtualMachine - name: Stop VirtualMachine
redhat.openshift_virtualization.kubevirt_vm: redhat.openshift_virtualization.kubevirt_vm:
@@ -22,7 +22,7 @@
namespace: "{{ vm_namespace }}" namespace: "{{ vm_namespace }}"
running: false running: false
wait: true wait: true
when: state.resources.0.spec.running when: snapshot_state.resources.0.spec.running
- name: Restore a VirtualMachineSnapshot - name: Restore a VirtualMachineSnapshot
kubernetes.core.k8s: kubernetes.core.k8s:
@@ -30,14 +30,14 @@
apiVersion: snapshot.kubevirt.io/v1alpha1 apiVersion: snapshot.kubevirt.io/v1alpha1
kind: VirtualMachineRestore kind: VirtualMachineRestore
metadata: metadata:
generateName: "{{ latest_snapshot.metadata.generateName }}" generateName: "{{ snapshot_latest_snapshot.metadata.generateName }}"
namespace: "{{ vm_namespace }}" namespace: "{{ vm_namespace }}"
spec: spec:
target: target:
apiGroup: kubevirt.io apiGroup: kubevirt.io
kind: VirtualMachine kind: VirtualMachine
name: "{{ item }}" name: "{{ item }}"
virtualMachineSnapshotName: "{{ latest_snapshot.metadata.name }}" virtualMachineSnapshotName: "{{ snapshot_latest_snapshot.metadata.name }}"
wait: true wait: true
wait_condition: wait_condition:
type: Ready type: Ready
@@ -48,4 +48,4 @@
namespace: "{{ vm_namespace }}" namespace: "{{ vm_namespace }}"
running: true running: true
wait: true wait: true
when: state.resources.0.spec.running when: snapshot_state.resources.0.spec.running

View File

@@ -8,12 +8,12 @@
check_mode: false check_mode: false
- name: Upgrade packages (yum) - name: Upgrade packages (yum)
ansible.builtin.yum: ansible.legacy.dnf:
name: '*' name: '*'
state: latest # noqa: package-latest - Intended to update packages to latest state: latest # noqa: package-latest - Intended to update packages to latest
exclude: "{{ exclude_packages }}" exclude: "{{ exclude_packages }}"
when: ansible_pkg_mgr == "yum" when: ansible_pkg_mgr == "yum"
register: patchingresult_yum register: patch_linux_patchingresult_yum
- name: Upgrade packages (dnf) - name: Upgrade packages (dnf)
ansible.builtin.dnf: ansible.builtin.dnf:
@@ -21,17 +21,17 @@
state: latest # noqa: package-latest - Intended to update packages to latest state: latest # noqa: package-latest - Intended to update packages to latest
exclude: "{{ exclude_packages }}" exclude: "{{ exclude_packages }}"
when: ansible_pkg_mgr == "dnf" when: ansible_pkg_mgr == "dnf"
register: patchingresult_dnf register: patch_linux_patchingresult_dnf
- name: Check to see if we need a reboot - name: Check to see if we need a reboot
ansible.builtin.command: needs-restarting -r ansible.builtin.command: needs-restarting -r
register: result register: patch_linux_result
changed_when: result.rc == 1 changed_when: patch_linux_result.rc == 1
failed_when: result.rc > 1 failed_when: patch_linux_result.rc > 1
check_mode: false check_mode: false
- name: Reboot Server if Necessary - name: Reboot Server if Necessary
ansible.builtin.reboot: ansible.builtin.reboot:
when: when:
- result.rc == 1 - patch_linux_result.rc == 1
- allow_reboot - allow_reboot

View File

@@ -12,4 +12,4 @@
category_names: "{{ win_update_categories | default(omit) }}" category_names: "{{ win_update_categories | default(omit) }}"
reboot: "{{ allow_reboot }}" reboot: "{{ allow_reboot }}"
state: installed state: installed
register: patchingresult register: patch_windows_patchingresult

View File

@@ -35,17 +35,17 @@
<td>{{hostvars[linux_host]['ansible_distribution_version']|default("none")}}</td> <td>{{hostvars[linux_host]['ansible_distribution_version']|default("none")}}</td>
<td> <td>
<ul> <ul>
{% if hostvars[linux_host].patchingresult_yum.changed|default("false",true) == true %} {% if hostvars[linux_host].patch_linux_patchingresult_yum.changed|default("false",true) == true %}
{% for packagename in hostvars[linux_host].patchingresult_yum.changes.updated|sort %} {% for packagename in hostvars[linux_host].patch_linux_patchingresult_yum.changes.updated|sort %}
<li> {{ packagename[0] }} - {{ packagename[1] }} </li> <li> {{ packagename[0] }} - {{ packagename[1] }} </li>
{% endfor %} {% endfor %}
{% elif hostvars[linux_host].patchingresult_dnf.changed|default("false",true) == true %} {% elif hostvars[linux_host].patch_linux_patchingresult_dnf.changed|default("false",true) == true %}
{% for packagename in hostvars[linux_host].patchingresult_dnf.results|sort %} {% for packagename in hostvars[linux_host].patch_linux_patchingresult_dnf.results|sort %}
<li> {{ packagename }} </li> <li> {{ packagename }} </li>
{% endfor %} {% endfor %}
{% elif hostvars[linux_host].patchingresult_dnf.changed is undefined %} {% elif hostvars[linux_host].patch_linux_patchingresult_dnf.changed is undefined %}
<li> Patching Failed </li> <li> Patching Failed </li>
{% elif hostvars[linux_host].patchingresult_yum.changed is undefined %} {% elif hostvars[linux_host].patch_linux_patchingresult_yum.changed is undefined %}
<li> Patching Failed </li> <li> Patching Failed </li>
{% else %} {% else %}
<li> Compliant </li> <li> Compliant </li>

View File

@@ -13,10 +13,10 @@
state: present state: present
namespace: patching-report namespace: patching-report
definition: "{{ lookup('ansible.builtin.template', 'resources.yaml.j2') }}" definition: "{{ lookup('ansible.builtin.template', 'resources.yaml.j2') }}"
register: resources_output register: report_ocp_patching_resources_output
- name: Display link to patching report - name: Display link to patching report
ansible.builtin.debug: ansible.builtin.debug:
msg: msg:
- "Patching report availbable at:" - "Patching report availbable at:"
- "{{ resources_output.result.results[3].result.spec.port.targetPort }}://{{ resources_output.result.results[3].result.spec.host }}" - "{{ report_ocp_patching_resources_output.result.results[3].result.spec.port.targetPort }}://{{ report_ocp_patching_resources_output.result.results[3].result.spec.host }}"

View File

@@ -35,17 +35,17 @@
<td>{{hostvars[linux_host]['ansible_distribution_version']|default("none")}}</td> <td>{{hostvars[linux_host]['ansible_distribution_version']|default("none")}}</td>
<td> <td>
<ul> <ul>
{% if hostvars[linux_host].patchingresult_yum.changed|default("false",true) == true %} {% if hostvars[linux_host].patch_linux_patchingresult_yum.changed|default("false",true) == true %}
{% for packagename in hostvars[linux_host].patchingresult_yum.changes.updated|sort %} {% for packagename in hostvars[linux_host].patch_linux_patchingresult_yum.changes.updated|sort %}
<li> {{ packagename[0] }} - {{ packagename[1] }} </li> <li> {{ packagename[0] }} - {{ packagename[1] }} </li>
{% endfor %} {% endfor %}
{% elif hostvars[linux_host].patchingresult_dnf.changed|default("false",true) == true %} {% elif hostvars[linux_host].patch_linux_patchingresult_dnf.changed|default("false",true) == true %}
{% for packagename in hostvars[linux_host].patchingresult_dnf.results|sort %} {% for packagename in hostvars[linux_host].patch_linux_patchingresult_dnf.results|sort %}
<li> {{ packagename }} </li> <li> {{ packagename }} </li>
{% endfor %} {% endfor %}
{% elif hostvars[linux_host].patchingresult_dnf.changed is undefined %} {% elif hostvars[linux_host].patch_linux_patchingresult_dnf.changed is undefined %}
<li> Patching Failed </li> <li> Patching Failed </li>
{% elif hostvars[linux_host].patchingresult_yum.changed is undefined %} {% elif hostvars[linux_host].patch_linux_patchingresult_yum.changed is undefined %}
<li> Patching Failed </li> <li> Patching Failed </li>
{% else %} {% else %}
<li> Compliant </li> <li> Compliant </li>

View File

@@ -3,7 +3,7 @@
ansible.builtin.include_vars: "{{ ansible_system }}.yml" ansible.builtin.include_vars: "{{ ansible_system }}.yml"
- name: Install httpd package - name: Install httpd package
ansible.builtin.yum: ansible.builtin.dnf:
name: httpd name: httpd
state: installed state: installed
check_mode: false check_mode: false

View File

@@ -6,7 +6,7 @@
ansible.builtin.find: ansible.builtin.find:
paths: "{{ doc_root }}/{{ reports_dir }}" paths: "{{ doc_root }}/{{ reports_dir }}"
patterns: '*.html' patterns: '*.html'
register: reports register: report_server_reports
check_mode: false check_mode: false
- name: Publish landing page - name: Publish landing page

View File

@@ -6,7 +6,7 @@
ansible.windows.win_find: ansible.windows.win_find:
paths: "{{ doc_root }}/{{ reports_dir }}" paths: "{{ doc_root }}/{{ reports_dir }}"
patterns: '*.html' patterns: '*.html'
register: reports register: report_server_reports
check_mode: false check_mode: false
- name: Publish landing page - name: Publish landing page

View File

@@ -20,7 +20,7 @@
</center> </center>
<table class="table table-striped mt32 main_net_table"> <table class="table table-striped mt32 main_net_table">
<tbody> <tbody>
{% for report in reports.files %} {% for report in report_server_reports.files %}
{% set page = report.path.split('/')[-1] %} {% set page = report.path.split('/')[-1] %}
<tr> <tr>
<td class="summary_info"> <td class="summary_info">

View File

@@ -20,7 +20,7 @@
</center> </center>
<table class="table table-striped mt32 main_net_table"> <table class="table table-striped mt32 main_net_table">
<tbody> <tbody>
{% for report in reports.files %} {% for report in report_server_reports.files %}
{% set page = report.path.split('\\')[-1] %} {% set page = report.path.split('\\')[-1] %}
<tr> <tr>
<td class="summary_info"> <td class="summary_info">

View File

@@ -10,7 +10,7 @@
name: "{{ instance_name }}" name: "{{ instance_name }}"
- name: Remove rhui client packages - name: Remove rhui client packages
ansible.builtin.yum: ansible.builtin.dnf:
name: name:
- google-rhui-client* - google-rhui-client*
- rh-amazon-rhui-client* - rh-amazon-rhui-client*
@@ -19,17 +19,17 @@
- name: Get current repos - name: Get current repos
ansible.builtin.command: ansible.builtin.command:
cmd: ls /etc/yum.repos.d/ cmd: ls /etc/yum.repos.d/
register: repos register: register_host_repos
changed_when: false changed_when: false
- name: Remove existing rhui repos - name: Remove existing rhui repos
ansible.builtin.file: ansible.builtin.file:
path: "/etc/yum.repos.d/{{ item }}" path: "/etc/yum.repos.d/{{ item }}"
state: absent state: absent
loop: "{{ repos.stdout_lines }}" loop: "{{ register_host_repos.stdout_lines }}"
- name: Install satellite certificate - name: Install satellite certificate
ansible.builtin.yum: ansible.builtin.dnf:
name: "{{ satellite_url }}/pub/katello-ca-consumer-latest.noarch.rpm" name: "{{ satellite_url }}/pub/katello-ca-consumer-latest.noarch.rpm"
state: present state: present
validate_certs: false validate_certs: false
@@ -53,7 +53,7 @@
state: enabled state: enabled
- name: Install satellite client - name: Install satellite client
ansible.builtin.yum: ansible.builtin.dnf:
name: name:
- katello-host-tools - katello-host-tools
- katello-host-tools-tracer - katello-host-tools-tracer

View File

@@ -1,6 +1,6 @@
--- ---
- name: Install openscap client packages - name: Install openscap client packages
ansible.builtin.yum: ansible.builtin.dnf:
name: name:
- openscap-scanner - openscap-scanner
- rubygem-foreman_scap_client - rubygem-foreman_scap_client
@@ -15,18 +15,18 @@
force_basic_auth: true force_basic_auth: true
body_format: json body_format: json
validate_certs: false validate_certs: false
register: policies register: scap_client_policies
no_log: "{{ foreman_operations_scap_client_secure_logging }}" no_log: "{{ foreman_operations_scap_client_secure_logging }}"
- name: Build policy {{ policy_name }} - name: Build policy {{ policy_name }}
ansible.builtin.set_fact: ansible.builtin.set_fact:
policy: "{{ policy | default([]) }} + {{ [item] }}" scap_client_policy: "{{ scap_client_policy | default([]) }} + {{ [item] }}"
loop: "{{ policies.json.results }}" loop: "{{ scap_client_policies.json.results }}"
when: item.name in policy_name or policy_name == 'all' when: item.name in policy_name or policy_name == 'all'
- name: Fail if no policy found with required name - name: Fail if no policy found with required name
ansible.builtin.fail: ansible.builtin.fail:
when: policy is not defined when: scap_client_policy is not defined
- name: Get scap content information - name: Get scap content information
ansible.builtin.uri: ansible.builtin.uri:
@@ -37,8 +37,8 @@
force_basic_auth: false force_basic_auth: false
body_format: json body_format: json
validate_certs: false validate_certs: false
register: scapcontents register: scap_client_scapcontents
loop: "{{ policy }}" loop: "{{ scap_client_policy }}"
no_log: "{{ foreman_operations_scap_client_secure_logging }}" no_log: "{{ foreman_operations_scap_client_secure_logging }}"
- name: Get tailoring content information - name: Get tailoring content information
@@ -50,21 +50,21 @@
force_basic_auth: false force_basic_auth: false
body_format: json body_format: json
validate_certs: false validate_certs: false
register: tailoringfiles register: scap_client_tailoringfiles
when: item.tailoring_file_id | int > 0 | d(False) when: item.tailoring_file_id | int > 0 | d(False)
loop: "{{ policy }}" loop: "{{ scap_client_policy }}"
no_log: "{{ foreman_operations_scap_client_secure_logging }}" no_log: "{{ foreman_operations_scap_client_secure_logging }}"
- name: Build scap content parameters - name: Build scap content parameters
ansible.builtin.set_fact: ansible.builtin.set_fact:
scap_content: "{{ scap_content | default({}) | combine({item.json.id: item.json}) }}" scap_client_scap_content: "{{ scap_client_scap_content | default({}) | combine({item.json.id: item.json}) }}"
loop: "{{ scapcontents.results }}" loop: "{{ scap_client_scapcontents.results }}"
- name: Build tailoring content parameters - name: Build tailoring content parameters
ansible.builtin.set_fact: ansible.builtin.set_fact:
tailoring_files: "{{ tailoring_files | default({}) | combine({item.json.id: item.json}) }}" scap_client_tailoring_files: "{{ scap_client_tailoring_files | default({}) | combine({item.json.id: item.json}) }}"
when: item.json is defined when: item.json is defined
loop: "{{ tailoringfiles.results }}" loop: "{{ scap_client_tailoringfiles.results }}"
- name: Apply openscap client configuration template - name: Apply openscap client configuration template
ansible.builtin.template: ansible.builtin.template:
@@ -78,7 +78,7 @@
# cron: # cron:
# name: "Openscap Execution" # name: "Openscap Execution"
# cron_file: 'foreman_openscap_client' # cron_file: 'foreman_openscap_client'
# job: '/usr/bin/foreman_scap_client {{policy.id}} > /dev/null' # job: '/usr/bin/foreman_scap_client {{scap_client_policy.id}} > /dev/null'
# weekday: "{{crontab_weekdays}}" # weekday: "{{crontab_weekdays}}"
# hour: "{{crontab_hour}}" # hour: "{{crontab_hour}}"
# minute: "{{crontab_minute}}" # minute: "{{crontab_minute}}"

View File

@@ -44,14 +44,13 @@ controller_inventory_sources:
- tag:Name - tag:Name
compose: compose:
ansible_host: public_ip_address ansible_host: public_ip_address
ansible_user: 'ec2-user' ansible_user: ec2-user
groups: groups:
cloud_aws: true cloud_aws: true
os_linux: tags.blueprint.startswith('rhel') os_linux: "platform_details == 'Red Hat Enterprise Linux'"
os_windows: tags.blueprint.startswith('win') os_windows: "platform_details == 'Windows'"
keyed_groups: keyed_groups:
- key: platform
prefix: os
- key: tags.blueprint - key: tags.blueprint
prefix: blueprint prefix: blueprint
- key: tags.owner - key: tags.owner
@@ -62,6 +61,7 @@ controller_inventory_sources:
prefix: deployment prefix: deployment
- key: tags.Compliance - key: tags.Compliance
separator: '' separator: ''
controller_groups: controller_groups:
- name: cloud_aws - name: cloud_aws
inventory: Demo Inventory inventory: Demo Inventory

View File

@@ -1 +0,0 @@
openshift-clients-4.16.0-202408021139.p0.ge8fb3c0.assembly.stream.el9.x86_64.rpm filter=lfs diff=lfs merge=lfs -text

View File

@@ -1,17 +1,16 @@
# Execution Environment Images for Ansible Product Demos # Execution Environment Images for Ansible Product Demos
When the Ansible Product Demos setup job template is run, it creates a number of execution environment definitions on the automation controller. The content of this directory is used to create and update the default execution environment images defined during the setup process. When the Ansible Product Demos setup job template is run, it creates a number of execution environment definitions on the automation controller. The content of this directory is used to create and update the default APD execution environment images defined during the setup process, [quay.io/ansible-product-demos/apd-ee-25](quay.io/ansible-product-demos/apd-ee-25).
Currently these execution environment images are created manually using the `build.sh` script, with a future goal of building in a CI pipeline when any EE definitions or requirements are updated. Currently the execution environment image is created manually using the `build.sh` script, with a future goal of building in a CI pipeline when the EE definition or requirements are updated.
## Building the execution environment images ## Building the execution environment images
1. `podman login registry.redhat.io` in order to pull the base EE images 1. `podman login registry.redhat.io` in order to pull the base EE images
2. `export ANSIBLE_GALAXY_SERVER_CERTIFIED_TOKEN="<token>"` obtained from [Automation Hub](https://console.redhat.com/ansible/automation-hub/token) 2. `export ANSIBLE_GALAXY_SERVER_CERTIFIED_TOKEN="<token>"` obtained from [Automation Hub](https://console.redhat.com/ansible/automation-hub/token)
3. `export ANSIBLE_GALAXY_SERVER_VALIDATED_TOKEN="<token>"` (same as above) 3. `export ANSIBLE_GALAXY_SERVER_VALIDATED_TOKEN="<token>"` (same token as above)
4. `./build.sh` to build the EE images and add them to your local podman image cache 4. `./build.sh` to build the EE image
The `build.sh` script creates multiple EE images, each based on the ee-minimal image that comes with a different minor version of AAP. These images are created in the "quay.io/ansible-product-demos" namespace. Currently the script builds the following images: The `build.sh` script creates a multi-architecture EE image for the amd64 (x86_64) and arm64 (aarch64) platforms. It does so by creating the build context using `ansible-builder create`, then creating a podman manifest definition and building an EE image for each supported platform.
* quay.io/ansible-product-demos/apd-ee-24 NOTE: Podman will use qemu to emulate the non-native architecture at build time, so the build must be performed on a system which includes the qemu-user-static package. Builds have only been tested on MacOS using podman-desktop with the native Fedora-based podman machine.
* quay.io/ansible-product-demos/apd-ee-25

View File

@@ -1,32 +0,0 @@
---
version: 3
images:
base_image:
name: registry.redhat.io/ansible-automation-platform-24/ee-minimal-rhel9:latest
dependencies:
galaxy: requirements.yml
additional_build_files:
# https://access.redhat.com/solutions/7024259
# download from access.redhat.com -> Downloads -> OpenShift Container Platform -> Packages
- src: openshift-clients-4.16.0-202408021139.p0.ge8fb3c0.assembly.stream.el9.x86_64.rpm
dest: rpms
- src: ansible.cfg
dest: configs
options:
package_manager_path: /usr/bin/microdnf
additional_build_steps:
prepend_base:
- RUN $PYCMD -m pip install --upgrade pip setuptools
- COPY _build/rpms/openshift-clients*.rpm /tmp/openshift-clients.rpm
- RUN $PKGMGR -y update && $PKGMGR -y install bash-completion && $PKGMGR clean all
- RUN rpm -ivh /tmp/openshift-clients.rpm && rm /tmp/openshift-clients.rpm
prepend_galaxy:
- ADD _build/configs/ansible.cfg /etc/ansible/ansible.cfg
- ARG ANSIBLE_GALAXY_SERVER_CERTIFIED_TOKEN
- ARG ANSIBLE_GALAXY_SERVER_VALIDATED_TOKEN
...

View File

@@ -4,7 +4,7 @@ images:
base_image: base_image:
name: registry.redhat.io/ansible-automation-platform-25/ee-minimal-rhel9:latest name: registry.redhat.io/ansible-automation-platform-25/ee-minimal-rhel9:latest
dependencies: dependencies:
galaxy: requirements-25.yml galaxy: requirements.yml
system: system:
- python3.11-devel [platform:rpm] - python3.11-devel [platform:rpm]
python: python:
@@ -13,10 +13,6 @@ dependencies:
python_path: /usr/bin/python3.11 python_path: /usr/bin/python3.11
additional_build_files: additional_build_files:
# https://access.redhat.com/solutions/7024259
# download from access.redhat.com -> Downloads -> OpenShift Container Platform -> Packages
- src: openshift-clients-4.16.0-202408021139.p0.ge8fb3c0.assembly.stream.el9.x86_64.rpm
dest: rpms
- src: ansible.cfg - src: ansible.cfg
dest: configs dest: configs
@@ -25,16 +21,17 @@ options:
additional_build_steps: additional_build_steps:
prepend_base: prepend_base:
# AgnosticD can use this to deterine it is running from an EE - ARG OPENSHIFT_CLIENT_RPM
# see https://github.com/redhat-cop/agnosticd/blob/development/ansible/install_galaxy_roles.yml
- ENV LAUNCHED_BY_RUNNER=1
- RUN $PYCMD -m pip install --upgrade pip setuptools - RUN $PYCMD -m pip install --upgrade pip setuptools
- COPY _build/rpms/openshift-clients*.rpm /tmp/openshift-clients.rpm
- RUN $PKGMGR -y update && $PKGMGR -y install bash-completion && $PKGMGR clean all - RUN $PKGMGR -y update && $PKGMGR -y install bash-completion && $PKGMGR clean all
- RUN rpm -ivh /tmp/openshift-clients.rpm && rm /tmp/openshift-clients.rpm # microdnf doesn't support URL or local file paths to RPMs, use rpm as a workaround
- RUN curl -o /tmp/openshift-clients.rpm $OPENSHIFT_CLIENT_RPM && rpm -Uvh /tmp/openshift-clients.rpm && rm -f /tmp/openshift-clients.rpm
prepend_galaxy: prepend_galaxy:
- ADD _build/configs/ansible.cfg /etc/ansible/ansible.cfg - ADD _build/configs/ansible.cfg /etc/ansible/ansible.cfg
- ARG ANSIBLE_GALAXY_SERVER_CERTIFIED_TOKEN - ARG ANSIBLE_GALAXY_SERVER_CERTIFIED_TOKEN
- ARG ANSIBLE_GALAXY_SERVER_VALIDATED_TOKEN - ARG ANSIBLE_GALAXY_SERVER_VALIDATED_TOKEN
append_final:
- RUN curl -o /etc/yum.repos.d/hasicorp.repo https://rpm.releases.hashicorp.com/RHEL/hashicorp.repo &&
microdnf install -y terraform
... ...

View File

@@ -1,29 +1,61 @@
#!/bin/bash #!/bin/bash
# array of images to build if [[ -z $ANSIBLE_GALAXY_SERVER_CERTIFIED_TOKEN || -z $ANSIBLE_GALAXY_SERVER_VALIDATED_TOKEN ]]
ee_images=( then
"apd-ee-24" echo "A valid Automation Hub token is required, Set the following environment variables before continuing"
"apd-ee-25" echo "export ANSIBLE_GALAXY_SERVER_CERTIFIED_TOKEN=<token>"
) echo "export ANSIBLE_GALAXY_SERVER_VALIDATED_TOKEN=<token>"
exit 1
fi
for ee in "${ee_images[@]}" # log in to pull the base EE image
if ! podman login --get-login registry.redhat.io > /dev/null
then
echo "Run 'podman login registry.redhat.io' before continuing"
exit 1
fi
# create EE definition
rm -rf ./context/*
ansible-builder create \
--file apd-ee-25.yml \
--context ./context \
-v 3 | tee ansible-builder.log
# remove existing manifest if present
_tag=$(date +%Y%m%d)
podman manifest rm quay.io/ansible-product-demos/apd-ee-25:${_tag}
# create manifest for EE image
podman manifest create quay.io/ansible-product-demos/apd-ee-25:${_tag}
# for the openshift-clients RPM, microdnf doesn't support URL-based installs
# and HTTP doesn't support file globs for GETs, use multiple steps to determine
# the correct RPM URL for each machine architecture
for arch in amd64 arm64
do do
echo "Building EE image ${ee}" _baseurl=https://mirror.openshift.com/pub/openshift-v4/${arch}/dependencies/rpms/4.18-el9-beta/
_rpm=$(curl -s ${_baseurl} | grep openshift-clients-4 | grep href | cut -d\" -f2)
# build EE image # build EE for multiple architectures from the EE context
ansible-builder build \ pushd ./context/ > /dev/null
--file ${ee}.yml \ podman build --platform linux/${arch} \
--context ./ee_contexts/${ee} \ --build-arg ANSIBLE_GALAXY_SERVER_CERTIFIED_TOKEN \
--build-arg ANSIBLE_GALAXY_SERVER_CERTIFIED_TOKEN \ --build-arg ANSIBLE_GALAXY_SERVER_VALIDATED_TOKEN \
--build-arg ANSIBLE_GALAXY_SERVER_VALIDATED_TOKEN \ --build-arg OPENSHIFT_CLIENT_RPM="${_baseurl}${_rpm}" \
-v 3 \ --manifest quay.io/ansible-product-demos/apd-ee-25:${_tag} . \
-t quay.io/ansible-product-demos/${ee}:$(date +%Y%m%d) | tee podman-build-${arch}.log
popd > /dev/null
if [[ $? == 0 ]]
then
# tag EE image as latest
podman tag \
quay.io/ansible-product-demos/${ee}:$(date +%Y%m%d) \
quay.io/ansible-product-demos/${ee}:latest
fi
done done
# inspect manifest content
#podman manifest inspect quay.io/ansible-product-demos/apd-ee-25:${_tag}
# tag manifest as latest
#podman tag quay.io/ansible-product-demos/apd-ee-25:${_tag} quay.io/ansible-product-demos/apd-ee-25:latest
# push all manifest content to repository
# using --all is important here, it pushes all content and not
# just the native platform content
#podman manifest push --all quay.io/ansible-product-demos/apd-ee-25:${_tag}
#podman manifest push --all quay.io/ansible-product-demos/apd-ee-25:latest

View File

@@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:f637eb0440f14f1458800c7a9012adcb9b58eb2131c02f64dfa4ca515e182093
size 54960859

View File

@@ -1,77 +0,0 @@
---
collections:
# AAP config as code
- name: ansible.controller
version: ">=4.6.0"
# TODO this fails trying to install a different version of
# the python-systemd package
# - name: ansible.eda # fails trying to install systemd-python package
# version: ">=2.1.0"
- name: ansible.hub
version: ">=1.0.0"
- name: ansible.platform
version: ">=2.5.0"
- name: infra.ah_configuration
version: ">=2.0.6"
- name: infra.controller_configuration
version: ">=2.11.0"
# linux demos
- name: ansible.posix
version: ">=1.5.4"
- name: community.general
version: ">=8.0.0"
- name: containers.podman
version: ">=1.12.1"
- name: redhat.insights
version: ">=1.2.2"
- name: redhat.rhel_system_roles
version: ">=1.23.0"
# windows demos
- name: microsoft.ad
version: "1.9"
- name: ansible.windows
version: ">=2.3.0"
- name: chocolatey.chocolatey
version: ">=1.5.1"
- name: community.windows
version: ">=2.2.0"
# cloud demos
- name: amazon.aws
version: ">=7.5.0"
# satellite demos
- name: redhat.satellite
version: ">=4.0.0"
# network demos
- name: ansible.netcommon
version: ">=6.0.0"
- name: cisco.ios
version: ">=7.0.0"
- name: cisco.iosxr
version: ">=8.0.0"
- name: cisco.nxos
version: ">=7.0.0"
- name: network.backup
version: ">=3.0.0"
# TODO on 2.5 ee-minimal-rhel9 this tries to build and install
# a different version of python netifaces, which fails
# - name: infoblox.nios_modules
# version: ">=1.6.1"
# openshift demos
- name: kubernetes.core
version: ">=4.0.0"
- name: redhat.openshift
version: ">=3.0.1"
- name: redhat.openshift_virtualization
version: ">=1.4.0"
# for RHDP
- name: ansible.utils
version: ">=5.1.0"
- name: kubevirt.core
version: ">=2.1.0"
- name: community.okd
version: ">=4.0.0"
- name: https://github.com/rhpds/assisted_installer.git
type: git
version: "v0.0.1"
...

View File

@@ -1,14 +1,21 @@
--- ---
collections: collections:
# AAP config as code
- name: ansible.controller - name: ansible.controller
version: "<4.6.0" version: ">=4.6.0"
# TODO this fails trying to install a different version of
# the python-systemd package
# - name: ansible.eda # fails trying to install systemd-python package
# version: ">=2.1.0"
- name: ansible.hub
version: ">=1.0.0"
- name: ansible.platform
version: ">=2.5.0"
- name: infra.ah_configuration - name: infra.ah_configuration
version: ">=2.0.6" version: ">=2.0.6"
- name: infra.controller_configuration - name: infra.controller_configuration
version: ">=2.9.0" version: ">=2.11.0"
- name: redhat_cop.controller_configuration # linux demos
version: ">=2.3.1"
# linux
- name: ansible.posix - name: ansible.posix
version: ">=1.5.4" version: ">=1.5.4"
- name: community.general - name: community.general
@@ -19,7 +26,7 @@ collections:
version: ">=1.2.2" version: ">=1.2.2"
- name: redhat.rhel_system_roles - name: redhat.rhel_system_roles
version: ">=1.23.0" version: ">=1.23.0"
# windows # windows demos
- name: microsoft.ad - name: microsoft.ad
version: "1.9" version: "1.9"
- name: ansible.windows - name: ansible.windows
@@ -28,13 +35,13 @@ collections:
version: ">=1.5.1" version: ">=1.5.1"
- name: community.windows - name: community.windows
version: ">=2.2.0" version: ">=2.2.0"
# cloud # cloud demos
- name: amazon.aws - name: amazon.aws
version: ">=7.5.0" version: ">=7.5.0"
# satellite # satellite demos
- name: redhat.satellite - name: redhat.satellite
version: ">=4.0.0" version: ">=4.0.0"
# network # network demos
- name: ansible.netcommon - name: ansible.netcommon
version: ">=6.0.0" version: ">=6.0.0"
- name: cisco.ios - name: cisco.ios
@@ -43,12 +50,20 @@ collections:
version: ">=8.0.0" version: ">=8.0.0"
- name: cisco.nxos - name: cisco.nxos
version: ">=7.0.0" version: ">=7.0.0"
- name: infoblox.nios_modules - name: network.backup
version: ">=1.6.1" version: ">=3.0.0"
# openshift # TODO on 2.5 ee-minimal-rhel9 this tries to build and install
# a different version of python netifaces, which fails
# - name: infoblox.nios_modules
# version: ">=1.6.1"
# openshift demos
- name: ansible.utils
version: ">=6.0.0"
- name: kubernetes.core - name: kubernetes.core
version: ">=4.0.0" version: ">=4.0.0"
- name: redhat.openshift - name: redhat.openshift
version: ">=3.0.1" version: ">=3.0.1"
- name: redhat.openshift_virtualization - name: redhat.openshift_virtualization
version: ">=1.4.0" version: ">=1.4.0"
...

View File

@@ -20,12 +20,12 @@
# Install subscription-manager if it's not there # Install subscription-manager if it's not there
- name: Install subscription-manager - name: Install subscription-manager
ansible.builtin.yum: ansible.builtin.dnf:
name: subscription-manager name: subscription-manager
state: present state: present
- name: Remove rhui client packages - name: Remove rhui client packages
ansible.builtin.yum: ansible.builtin.dnf:
name: rh-amazon-rhui-client* name: rh-amazon-rhui-client*
state: removed state: removed
@@ -43,7 +43,7 @@
when: "'rhui' in item" when: "'rhui' in item"
- name: Install katello package - name: Install katello package
ansible.builtin.yum: ansible.builtin.dnf:
name: "https://{{ sat_url }}/pub/katello-ca-consumer-latest.noarch.rpm" name: "https://{{ sat_url }}/pub/katello-ca-consumer-latest.noarch.rpm"
state: present state: present
validate_certs: false validate_certs: false

View File

@@ -52,7 +52,9 @@
state: enabled state: enabled
immediate: true immediate: true
permanent: true permanent: true
when: "'firewalld.service' in ansible_facts.services" when:
- "'firewalld.service' in ansible_facts.services"
- ansible_facts.services["firewalld.service"].state == "running"
- name: Disable httpd welcome page - name: Disable httpd welcome page
ansible.builtin.file: ansible.builtin.file:

View File

@@ -8,7 +8,7 @@
tasks: tasks:
# Install yum-utils if it's not there # Install yum-utils if it's not there
- name: Install yum-utils - name: Install yum-utils
ansible.builtin.yum: ansible.builtin.dnf:
name: yum-utils name: yum-utils
state: installed state: installed
check_mode: false check_mode: false

View File

@@ -16,7 +16,7 @@
key: "{{ sudo_user }}" key: "{{ sudo_user }}"
- name: Check Cleanup package - name: Check Cleanup package
ansible.builtin.yum: ansible.builtin.dnf:
name: at name: at
state: present state: present

View File

@@ -5,7 +5,7 @@
tasks: tasks:
# Install yum-utils if it's not there # Install yum-utils if it's not there
- name: Install yum-utils - name: Install yum-utils
ansible.builtin.yum: ansible.builtin.dnf:
name: yum-utils name: yum-utils
state: installed state: installed

View File

@@ -2,45 +2,65 @@
roles: roles:
# RHEL 7 compliance roles from ComplianceAsCode # RHEL 7 compliance roles from ComplianceAsCode
- name: redhatofficial.rhel7-cis - name: redhatofficial.rhel7-cis
src: https://github.com/RedHatOfficial/ansible-role-rhel7-cis
version: 0.1.72 version: 0.1.72
- name: redhatofficial.rhel7-cjis - name: redhatofficial.rhel7-cjis
src: https://github.com/RedHatOfficial/ansible-role-rhel7-cjis
version: 0.1.72 version: 0.1.72
- name: redhatofficial.rhel7-cui - name: redhatofficial.rhel7-cui
src: https://github.com/RedHatOfficial/ansible-role-rhel7-cui
version: 0.1.72 version: 0.1.72
- name: redhatofficial.rhel7-hipaa - name: redhatofficial.rhel7-hipaa
src: https://github.com/RedHatOfficial/ansible-role-rhel7-hipaa
version: 0.1.72 version: 0.1.72
- name: redhatofficial.rhel7-ospp - name: redhatofficial.rhel7-ospp
src: https://github.com/RedHatOfficial/ansible-role-rhel7-ospp
version: 0.1.72 version: 0.1.72
- name: redhatofficial.rhel7-pci-dss - name: redhatofficial.rhel7-pci-dss
src: https://github.com/RedHatOfficial/ansible-role-rhel7-pci-dss
version: 0.1.72 version: 0.1.72
- name: redhatofficial.rhel7-stig - name: redhatofficial.rhel7-stig
src: https://github.com/RedHatOfficial/ansible-role-rhel7-stig
version: 0.1.72 version: 0.1.72
# RHEL 8 compliance roles from ComplianceAsCode # RHEL 8 compliance roles from ComplianceAsCode
- name: redhatofficial.rhel8-cis - name: redhatofficial.rhel8-cis
src: https://github.com/RedHatOfficial/ansible-role-rhel8-cis
version: 0.1.72 version: 0.1.72
- name: redhatofficial.rhel8-cjis - name: redhatofficial.rhel8-cjis
src: https://github.com/RedHatOfficial/ansible-role-rhel8-cjis
version: 0.1.72 version: 0.1.72
- name: redhatofficial.rhel8-cui - name: redhatofficial.rhel8-cui
src: https://github.com/RedHatOfficial/ansible-role-rhel8-cui
version: 0.1.72 version: 0.1.72
- name: redhatofficial.rhel8-hipaa - name: redhatofficial.rhel8-hipaa
src: https://github.com/RedHatOfficial/ansible-role-rhel8-hipaa
version: 0.1.72 version: 0.1.72
- name: redhatofficial.rhel8-ospp - name: redhatofficial.rhel8-ospp
src: https://github.com/RedHatOfficial/ansible-role-rhel8-ospp
version: 0.1.72 version: 0.1.72
- name: redhatofficial.rhel8-pci-dss - name: redhatofficial.rhel8-pci-dss
src: https://github.com/RedHatOfficial/ansible-role-rhel8-pci-dss
version: 0.1.72 version: 0.1.72
- name: redhatofficial.rhel8-stig - name: redhatofficial.rhel8-stig
src: https://github.com/RedHatOfficial/ansible-role-rhel8-stig
version: 0.1.72 version: 0.1.72
# RHEL 9 compliance roles from ComplianceAsCode # RHEL 9 compliance roles from ComplianceAsCode
- name: redhatofficial.rhel9-cis - name: redhatofficial.rhel9-cis
src: https://github.com/RedHatOfficial/ansible-role-rhel9-cis
version: 0.1.72 version: 0.1.72
- name: redhatofficial.rhel9-cui - name: redhatofficial.rhel9-cui
src: https://github.com/RedHatOfficial/ansible-role-rhel9-cui
version: 0.1.72 version: 0.1.72
- name: redhatofficial.rhel9-hipaa - name: redhatofficial.rhel9-hipaa
src: https://github.com/RedHatOfficial/ansible-role-rhel9-hipaa
version: 0.1.72 version: 0.1.72
- name: redhatofficial.rhel9-ospp - name: redhatofficial.rhel9-ospp
src: https://github.com/RedHatOfficial/ansible-role-rhel9-ospp
version: 0.1.72 version: 0.1.72
- name: redhatofficial.rhel9-pci-dss - name: redhatofficial.rhel9-pci-dss
src: https://github.com/RedHatOfficial/ansible-role-rhel9-pci-dss
version: 0.1.72 version: 0.1.72
- name: redhatofficial.rhel9-stig - name: redhatofficial.rhel9-stig
src: https://github.com/RedHatOfficial/ansible-role-rhel9-stig
version: 0.1.72 version: 0.1.72
... ...

View File

@@ -8,6 +8,8 @@
- [Jobs](#jobs) - [Jobs](#jobs)
- [Workflows](#workflows) - [Workflows](#workflows)
- [Suggested Usage](#suggested-usage) - [Suggested Usage](#suggested-usage)
- [Connecting to Windows Hosts](#connecting-to-windows-hosts)
- [Testing with RDP](#testing-with-rdp)
## About These Demos ## About These Demos
This category of demos shows examples of Windows Server operations and management with Ansible Automation Platform. The list of demos can be found below. See the [Suggested Usage](#suggested-usage) section of this document for recommendations on how to best use these demos. This category of demos shows examples of Windows Server operations and management with Ansible Automation Platform. The list of demos can be found below. See the [Suggested Usage](#suggested-usage) section of this document for recommendations on how to best use these demos.
@@ -40,3 +42,24 @@ We are currently investigating an intermittent connectivity issue related to the
**WINDOWS / Helpdesk new user portal** - This job is dependant on the Create Active Directory Domain completing before users can be created. **WINDOWS / Helpdesk new user portal** - This job is dependant on the Create Active Directory Domain completing before users can be created.
**WINDOWS / Join Active Directory Domain** - This job is dependant on the Create Active Directory Domain completing before computers can be joined. **WINDOWS / Join Active Directory Domain** - This job is dependant on the Create Active Directory Domain completing before computers can be joined.
## Connecting to Windows Hosts
The provided template for provisioning VMs in AWS supports a few blueprints, notably [windows_core](../cloud/blueprints/windows_core.yml) and [windows_full](../cloud/blueprints/windows_full.yml). The windows blueprints both rely on the [aws_windows_userdata](../collections/ansible_collections/demo/cloud/roles/aws/templates/aws_windows_userdata.j2) script which configures a user with Administrator privileges. By default, the Demo Credential is used to inject a password for `ec2-user`.
⚠️ When using Ansible Product Demos on demo.redhat.com,<br>
the image below demonstrates where you can locate the Demo Credential password:<br>
![Windows VM Password](../.github/images/windows_vm_password.png)
### Testing with RDP
In the AWS Console, you can follow the steps below to download an RDP configuration for your Windows host:
1. Navigate to the EC2 Dashboard
2. Navigate to Instances
3. Click on the desired Instance ID
4. Click the button to **Connect**
5. Select the **RDP client** tab
6. Click the button to **Download remote desktop file**
7. Use a local RDP client to open the file and connect<br>
_Note: the configuration will default to using Administrator as the username, replace with ec2-user_

View File

@@ -46,15 +46,17 @@
- name: Create some users - name: Create some users
microsoft.ad.user: microsoft.ad.user:
name: "{{ item.name }}" name: "{{ item.name }}"
groups: "{{ item.groups }}" groups:
set:
- "{{ item.group }}"
password: "{{ lookup('community.general.random_string', min_lower=1, min_upper=1, min_special=1, min_numeric=1) }}" password: "{{ lookup('community.general.random_string', min_lower=1, min_upper=1, min_special=1, min_numeric=1) }}"
update_password: on_create update_password: on_create
loop: loop:
- name: "UserA" - name: "UserA"
groups: "GroupA" group: "GroupA"
- name: "UserB" - name: "UserB"
groups: "GroupB" group: "GroupB"
- name: "UserC" - name: "UserC"
groups: "GroupC" group: "GroupC"
retries: 5 retries: 5
delay: 10 delay: 10

View File

@@ -10,7 +10,7 @@
# Example result: ['&Qw2|E[-'] # Example result: ['&Qw2|E[-']
- name: Create new user - name: Create new user
community.windows.win_domain_user: microsoft.ad.user:
name: "{{ firstname }} {{ surname }}" name: "{{ firstname }} {{ surname }}"
firstname: "{{ firstname }}" firstname: "{{ firstname }}"
surname: "{{ surname }}" surname: "{{ surname }}"

View File

@@ -16,7 +16,7 @@
- name: Ensure Demo OU exists - name: Ensure Demo OU exists
run_once: true run_once: true
delegate_to: "{{ domain_controller }}" delegate_to: "{{ domain_controller }}"
community.windows.win_domain_ou: microsoft.ad.ou:
name: Demo name: Demo
state: present state: present
@@ -26,7 +26,7 @@
- name: Join ansible.local domain - name: Join ansible.local domain
register: r_domain_membership register: r_domain_membership
ansible.windows.win_domain_membership: microsoft.ad.membership:
dns_domain_name: ansible.local dns_domain_name: ansible.local
hostname: "{{ inventory_hostname.split('.')[0] }}" hostname: "{{ inventory_hostname.split('.')[0] }}"
domain_admin_user: "{{ ansible_user }}@ansible.local" domain_admin_user: "{{ ansible_user }}@ansible.local"