11 Commits

Author SHA1 Message Date
Chris Edillon
7cdffbd265 Merge branch 'main' into jce/cloud-cleanup 2025-07-01 09:21:30 -04:00
Chris Edillon
af7d93fcdb Improve compliance report firewalld conditional (#253)
Co-authored-by: Matthew Fernandez <l3acon@users.noreply.github.com>
2025-06-25 14:00:29 -06:00
Chris Edillon
2ffb9f5f7c added inventory sync 2025-06-25 15:08:40 -04:00
Chris Edillon
605aa91080 Added workflow to delete cloud stack 2025-06-25 13:45:23 -04:00
Matthew Fernandez
0634643f21 Fix AWS groups (#255) 2025-06-25 13:06:49 -04:00
Todd Ruch
db97b38fbc Resolve parameter failure in Windows "Create some users" task (#250) 2025-06-20 14:38:08 -04:00
Chris Edillon
7468d14a98 support building multi-arch EE image (#249)
Co-authored-by: Matthew Fernandez <l3acon@users.noreply.github.com>
2025-06-18 16:49:04 -04:00
Matthew Fernandez
8a70edbfdc Attempt galaxy workaround (#252)
this will eventually be re-worked to put roles in our EE
2025-06-17 10:00:20 -06:00
Matthew Fernandez
9a93004e0a Fix mistake where the main README.md is overridden. (#243) 2025-05-13 12:08:50 -06:00
Matthew Fernandez
64f7c88114 Refactor pre commit (#237)
Wheee!
2025-05-06 14:24:25 -06:00
Chris Edillon
4285a68f3e Update DISA supplemental roles for RHEL STIG (#238) 2025-05-05 11:11:14 -06:00
25 changed files with 472 additions and 195 deletions

View File

@@ -1,10 +1,16 @@
---
profile: production
offline: false
offline: true
skip_list:
- "galaxy[no-changelog]"
warn_list:
# seems to be a bug, see https://github.com/ansible/ansible-lint/issues/4172
- "fqcn[canonical]"
# @matferna: really not sure why lint thinks it can't find jmespath, it is installed and functional
- "jinja[invalid]"
exclude_paths:
# would be better to move the roles here to the top-level roles directory
- collections/ansible_collections/demo/compliance/roles/

25
.github/workflows/README.md vendored Normal file
View File

@@ -0,0 +1,25 @@
# GitHub Actions
## Background
We want to make attempts to run our integration tests in the same manner wether using GitHub actions or on a developers's machine locally. For this reason, the tests are curated to run using conatiner images. As of this writing, two images exist which we would like to test against:
- quay.io/ansible-product-demos/apd-ee-24:latest
- quay.io/ansible-product-demos/apd-ee-25:latest
These images are built given the structure defined in their respective EE [definitions][../execution_environments]. Because they differ (mainly due to their python versions), each gets some special handling.
## Troubleshooting GitHub Actions
### Interactive
It is likely the most straight-forward approach to interactively debug issues. The following podman command can be run from the project root directory to replicate the GitHub action:
```
podman run \
--user root \
-v $(pwd):/runner:Z \
-it \
<image> \
/bin/bash
```
`<image>` is one of `quay.io/ansible-product-demos/apd-ee-25:latest`, `quay.io/ansible-product-demos/apd-ee-24:latest`
It is not exact because GitHub seems to run closer to a sidecar container paradigm, and uses docker instead of podman, but hopefully it's close enough.
For the 24 EE, the python interpreriter verions is set for our pre-commit script like so: `USE_PYTHON=python3.9 ./.github/workflows/run-pc.sh`
The 25 EE is similary run but without the need for this variable: `./.github/workflows/run-pc.sh`

View File

@@ -4,17 +4,23 @@ on:
- push
- pull_request_target
env:
ANSIBLE_GALAXY_SERVER_CERTIFIED_TOKEN: ${{ secrets.ANSIBLE_GALAXY_SERVER_CERTIFIED_TOKEN }}
ANSIBLE_GALAXY_SERVER_VALIDATED_TOKEN: ${{ secrets.ANSIBLE_GALAXY_SERVER_VALIDATED_TOKEN }}
jobs:
pre-commit:
name: pre-commit
pre-commit-25:
container:
image: quay.io/ansible-product-demos/apd-ee-25
options: --user root
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- uses: pre-commit/action@v3.0.1
- run: ./.github/workflows/run-pc.sh
shell: bash
pre-commit-24:
container:
image: quay.io/ansible-product-demos/apd-ee-24
options: --user root
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- run: USE_PYTHON=python3.9 ./.github/workflows/run-pc.sh
shell: bash
...

24
.github/workflows/run-pc.sh vendored Executable file
View File

@@ -0,0 +1,24 @@
#!/bin/bash -x
dnf install git-lfs -y
PYTHON_VARIANT="${USE_PYTHON:-python3.11}"
PATH="$PATH:$HOME/.local/bin"
# intsall pip
eval "${PYTHON_VARIANT} -m pip install --user --upgrade pip"
# try to fix 2.4 incompatibility
eval "${PYTHON_VARIANT} -m pip install --user --upgrade setuptools wheel twine check-wheel-contents"
# intsall pre-commit
eval "${PYTHON_VARIANT} -m pip install --user pre-commit"
# view pip packages
eval "${PYTHON_VARIANT} -m pip freeze --local"
# fix permissions on directory
git config --global --add safe.directory $(pwd)
# run pre-commit
pre-commit run --config $(pwd)/.pre-commit-gh.yml --show-diff-on-failure --color=always

View File

@@ -14,13 +14,12 @@ repos:
- id: check-json
- id: check-symlinks
- repo: https://github.com/ansible/ansible-lint.git
# get latest release tag from https://github.com/ansible/ansible-lint/releases/
rev: v6.20.3
- repo: local
hooks:
- id: ansible-lint
additional_dependencies:
- jmespath
name: ansible-navigator lint --eei quay.io/ansible-product-demos/apd-ee-25:latest --mode stdout
language: python
entry: bash -c "ansible-navigator lint --eei quay.io/ansible-product-demos/apd-ee-25 -v --force-color --mode stdout"
- repo: https://github.com/psf/black-pre-commit-mirror
rev: 23.11.0

30
.pre-commit-gh.yml Normal file
View File

@@ -0,0 +1,30 @@
---
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
hooks:
- id: trailing-whitespace
exclude: rhel[89]STIG/.*$
- id: check-yaml
exclude: \.j2.(yaml|yml)$|\.(yaml|yml).j2$
args: [--unsafe] # see https://github.com/pre-commit/pre-commit-hooks/issues/273
- id: check-toml
- id: check-json
- id: check-symlinks
- repo: https://github.com/ansible/ansible-lint.git
# get latest release tag from https://github.com/ansible/ansible-lint/releases/
rev: v6.20.3
hooks:
- id: ansible-lint
additional_dependencies:
- jmespath
- repo: https://github.com/psf/black-pre-commit-mirror
rev: 23.11.0
hooks:
- id: black
exclude: rhel[89]STIG/.*$
...

View File

@@ -1,5 +1,5 @@
[defaults]
collections_path=./collections
collections_path=./collections:/usr/share/ansible/collections
roles_path=./roles
[galaxy]

View File

@@ -27,7 +27,7 @@
- us-east-2b
- us-east-2c
us-west-1:
# us-west-1a not available when last checked 20250218
# us-west-1a not available when last checked 20250618
- us-west-1b
- us-west-1c
us-west-2:

22
cloud/delete_aws_key.yml Normal file
View File

@@ -0,0 +1,22 @@
---
- name: Delete AWS keypair
hosts: localhost
vars:
aws_key_name: aws-test-key
tasks:
- name: Fail if variables not defined
ansible.builtin.assert:
that:
- aws_key_name is defined
- create_vm_aws_region is defined
fail_msg: "Required variables not set"
- name: Delete AWS keypair
amazon.aws.ec2_key:
name: "{{ aws_key_name }}"
region: "{{ create_vm_aws_region }}"
state: absent
...

83
cloud/delete_vpc.yml Normal file
View File

@@ -0,0 +1,83 @@
---
- name: Delete cloud stack VPC
hosts: localhost
gather_facts: false
vars:
aws_vpc_name: aws-test-vpc
aws_sg_name: aws-test-sg
aws_subnet_name: aws-test-subnet
aws_subnet_cidr: 10.0.1.0/24
aws_rt_name: aws-test-rt
aws_purpose_tag: ansible_demo
tasks:
- name: Verify the VPC name
ansible.builtin.assert:
that:
- aws_vpc_name == "aws-test-vpc"
fail_msg: 'Only the VPC "aws-test-vpc" can be deleted with this playbook'
- name: Retrieve VPC info
amazon.aws.ec2_vpc_net_info:
region: "{{ create_vm_aws_region }}"
filters:
"tag:Name": "{{ aws_vpc_name }}"
"tag:purpose": "{{ aws_purpose_tag }}"
register: _vpc
- name: Retrieve internet gateway info
amazon.aws.ec2_vpc_igw_info:
region: "{{ create_vm_aws_region }}"
filters:
"tag:Name": "{{ aws_vpc_name }}"
"tag:purpose": "{{ aws_purpose_tag }}"
register: _igw
- name: Retrieve route table info
amazon.aws.ec2_vpc_route_table_info:
region: "{{ create_vm_aws_region }}"
filters:
"tag:Name": "{{ aws_rt_name }}"
"tag:purpose": "{{ aws_purpose_tag }}"
register: _rt
- name: Delete demo security group
amazon.aws.ec2_security_group:
name: "{{ aws_sg_name }}"
region: "{{ create_vm_aws_region }}"
vpc_id: "{{ _vpc.vpcs.0.id }}"
state: absent
when: _vpc.vpcs
- name: Delete subnet in the VPC
amazon.aws.ec2_vpc_subnet:
vpc_id: "{{ _vpc.vpcs.0.id }}"
cidr: "{{ aws_subnet_cidr }}"
region: "{{ create_vm_aws_region }}"
state: absent
when: _vpc.vpcs
- name: Delete the subnet route table
amazon.aws.ec2_vpc_route_table:
route_table_id: "{{ _rt.route_tables.0.route_table_id }}"
region: "{{ create_vm_aws_region }}"
lookup: id
state: absent
when: _rt.route_tables
- name: Delete internet gateway
amazon.aws.ec2_vpc_igw:
internet_gateway_id: "{{ _igw.internet_gateways.0.internet_gateway_id }}"
region: "{{ create_vm_aws_region }}"
state: absent
when: _igw.internet_gateways
- name: Delete VPC
amazon.aws.ec2_vpc_net:
vpc_id: "{{ _vpc.vpcs.0.id }}"
region: "{{ create_vm_aws_region }}"
state: absent
when: _vpc.vpcs
...

View File

@@ -171,6 +171,63 @@ controller_templates:
variable: _hosts
required: false
- name: Cloud / AWS / Delete Keypair
job_type: run
organization: Default
credentials:
- AWS
project: Ansible Product Demos
playbook: cloud/delete_aws_key.yml
inventory: Demo Inventory
notification_templates_started: Telemetry
notification_templates_success: Telemetry
notification_templates_error: Telemetry
survey_enabled: true
survey:
name: ''
description: ''
spec:
- question_name: AWS Region
type: multiplechoice
variable: create_vm_aws_region
required: true
choices:
- us-east-1
- us-east-2
- us-west-1
- us-west-2
- question_name: Keypair Name
type: text
variable: aws_key_name
required: true
default: aws-test-key
- name: Cloud / AWS / Delete VPC
job_type: run
organization: Default
credentials:
- AWS
project: Ansible Product Demos
playbook: cloud/delete_vpc.yml
inventory: Demo Inventory
notification_templates_started: Telemetry
notification_templates_success: Telemetry
notification_templates_error: Telemetry
survey_enabled: true
survey:
name: ''
description: ''
spec:
- question_name: AWS Region
type: multiplechoice
variable: create_vm_aws_region
required: true
choices:
- us-east-1
- us-east-2
- us-west-1
- us-west-2
- name: Cloud / AWS / Display EC2 Stats
job_type: run
organization: Default
@@ -392,3 +449,74 @@ controller_workflows:
unified_job_template: 'SUBMIT FEEDBACK'
extra_data:
feedback: Cloud / AWS / Patch EC2 Workflow | Failed to restore ec2 from snapshot
- name: Delete AWS Cloud Stack
description: >
Delete the AWS cloud stack created by the "Deploy Cloud Stack in AWS" workflow
organization: Default
notification_templates_started: Telemetry
notification_templates_success: Telemetry
notification_templates_error: Telemetry
survey_enabled: true
survey:
name: ''
description: ''
spec:
- question_name: AWS Region
type: multiplechoice
variable: create_vm_aws_region
required: true
choices:
- us-east-1
- us-east-2
- us-west-1
- us-west-2
- question_name: Instances to delete
type: text
variable: _hosts
required: true
default: aws-dc,aws_win1,aws_rhel8,aws_rhel9,reports
- question_name: Keypair to delete
type: text
variable: aws_key_name
required: true
default: aws-test-key
- question_name: VPC to delete
type: text
variable: aws_vpc_name
required: true
default: aws-test-vpc
simplified_workflow_nodes:
- identifier: Delete AWS Instances
unified_job_template: Cloud / AWS / Delete VM
success_nodes:
- Inventory Sync
failure_nodes:
- Ticket - Delete AWS Instances Failed
- identifier: Inventory Sync
unified_job_template: AWS Inventory
success_nodes:
- Delete AWS Keypair
- Delete AWS VPC
- identifier: Delete AWS Keypair
unified_job_template: Cloud / AWS / Delete Keypair
failure_nodes:
- Ticket - Delete AWS Keypair Failed
- identifier: Delete AWS VPC
unified_job_template: Cloud / AWS / Delete VPC
failure_nodes:
- Ticket - Delete AWS VPC Failed
- identifier: Ticket - Delete AWS Instances Failed
unified_job_template: 'SUBMIT FEEDBACK'
extra_data:
feedback: Failed to delete one or more AWS instances
- identifier: Ticket - Delete AWS Keypair Failed
unified_job_template: 'SUBMIT FEEDBACK'
extra_data:
feedback: Failed to delete AWS keypair
- identifier: Ticket - Delete AWS VPC Failed
unified_job_template: 'SUBMIT FEEDBACK'
extra_data:
feedback: Failed to delete AWS VPC
...

View File

@@ -44,14 +44,13 @@ controller_inventory_sources:
- tag:Name
compose:
ansible_host: public_ip_address
ansible_user: 'ec2-user'
ansible_user: ec2-user
groups:
cloud_aws: true
os_linux: tags.blueprint.startswith('rhel')
os_windows: tags.blueprint.startswith('win')
os_linux: "platform_details == 'Red Hat Enterprise Linux'"
os_windows: "platform_details == 'Windows'"
keyed_groups:
- key: platform
prefix: os
- key: tags.blueprint
prefix: blueprint
- key: tags.owner
@@ -62,6 +61,7 @@ controller_inventory_sources:
prefix: deployment
- key: tags.Compliance
separator: ''
controller_groups:
- name: cloud_aws
inventory: Demo Inventory

View File

@@ -1 +0,0 @@
openshift-clients-4.16.0-202408021139.p0.ge8fb3c0.assembly.stream.el9.x86_64.rpm filter=lfs diff=lfs merge=lfs -text

View File

@@ -1,17 +1,16 @@
# Execution Environment Images for Ansible Product Demos
When the Ansible Product Demos setup job template is run, it creates a number of execution environment definitions on the automation controller. The content of this directory is used to create and update the default execution environment images defined during the setup process.
When the Ansible Product Demos setup job template is run, it creates a number of execution environment definitions on the automation controller. The content of this directory is used to create and update the default APD execution environment images defined during the setup process, [quay.io/ansible-product-demos/apd-ee-25](quay.io/ansible-product-demos/apd-ee-25).
Currently these execution environment images are created manually using the `build.sh` script, with a future goal of building in a CI pipeline when any EE definitions or requirements are updated.
Currently the execution environment image is created manually using the `build.sh` script, with a future goal of building in a CI pipeline when the EE definition or requirements are updated.
## Building the execution environment images
1. `podman login registry.redhat.io` in order to pull the base EE images
2. `export ANSIBLE_GALAXY_SERVER_CERTIFIED_TOKEN="<token>"` obtained from [Automation Hub](https://console.redhat.com/ansible/automation-hub/token)
3. `export ANSIBLE_GALAXY_SERVER_VALIDATED_TOKEN="<token>"` (same as above)
4. `./build.sh` to build the EE images and add them to your local podman image cache
3. `export ANSIBLE_GALAXY_SERVER_VALIDATED_TOKEN="<token>"` (same token as above)
4. `./build.sh` to build the EE image
The `build.sh` script creates multiple EE images, each based on the ee-minimal image that comes with a different minor version of AAP. These images are created in the "quay.io/ansible-product-demos" namespace. Currently the script builds the following images:
The `build.sh` script creates a multi-architecture EE image for the amd64 (x86_64) and arm64 (aarch64) platforms. It does so by creating the build context using `ansible-builder create`, then creating a podman manifest definition and building an EE image for each supported platform.
* quay.io/ansible-product-demos/apd-ee-24
* quay.io/ansible-product-demos/apd-ee-25
NOTE: Podman will use qemu to emulate the non-native architecture at build time, so the build must be performed on a system which includes the qemu-user-static package. Builds have only been tested on MacOS using podman-desktop with the native Fedora-based podman machine.

View File

@@ -1,32 +0,0 @@
---
version: 3
images:
base_image:
name: registry.redhat.io/ansible-automation-platform-24/ee-minimal-rhel9:latest
dependencies:
galaxy: requirements.yml
additional_build_files:
# https://access.redhat.com/solutions/7024259
# download from access.redhat.com -> Downloads -> OpenShift Container Platform -> Packages
- src: openshift-clients-4.16.0-202408021139.p0.ge8fb3c0.assembly.stream.el9.x86_64.rpm
dest: rpms
- src: ansible.cfg
dest: configs
options:
package_manager_path: /usr/bin/microdnf
additional_build_steps:
prepend_base:
- RUN $PYCMD -m pip install --upgrade pip setuptools
- COPY _build/rpms/openshift-clients*.rpm /tmp/openshift-clients.rpm
- RUN $PKGMGR -y update && $PKGMGR -y install bash-completion && $PKGMGR clean all
- RUN rpm -ivh /tmp/openshift-clients.rpm && rm /tmp/openshift-clients.rpm
prepend_galaxy:
- ADD _build/configs/ansible.cfg /etc/ansible/ansible.cfg
- ARG ANSIBLE_GALAXY_SERVER_CERTIFIED_TOKEN
- ARG ANSIBLE_GALAXY_SERVER_VALIDATED_TOKEN
...

View File

@@ -4,7 +4,7 @@ images:
base_image:
name: registry.redhat.io/ansible-automation-platform-25/ee-minimal-rhel9:latest
dependencies:
galaxy: requirements-25.yml
galaxy: requirements.yml
system:
- python3.11-devel [platform:rpm]
python:
@@ -13,10 +13,6 @@ dependencies:
python_path: /usr/bin/python3.11
additional_build_files:
# https://access.redhat.com/solutions/7024259
# download from access.redhat.com -> Downloads -> OpenShift Container Platform -> Packages
- src: openshift-clients-4.16.0-202408021139.p0.ge8fb3c0.assembly.stream.el9.x86_64.rpm
dest: rpms
- src: ansible.cfg
dest: configs
@@ -25,16 +21,17 @@ options:
additional_build_steps:
prepend_base:
# AgnosticD can use this to deterine it is running from an EE
# see https://github.com/redhat-cop/agnosticd/blob/development/ansible/install_galaxy_roles.yml
- ENV LAUNCHED_BY_RUNNER=1
- ARG OPENSHIFT_CLIENT_RPM
- RUN $PYCMD -m pip install --upgrade pip setuptools
- COPY _build/rpms/openshift-clients*.rpm /tmp/openshift-clients.rpm
- RUN $PKGMGR -y update && $PKGMGR -y install bash-completion && $PKGMGR clean all
- RUN rpm -ivh /tmp/openshift-clients.rpm && rm /tmp/openshift-clients.rpm
# microdnf doesn't support URL or local file paths to RPMs, use rpm as a workaround
- RUN curl -o /tmp/openshift-clients.rpm $OPENSHIFT_CLIENT_RPM && rpm -Uvh /tmp/openshift-clients.rpm && rm -f /tmp/openshift-clients.rpm
prepend_galaxy:
- ADD _build/configs/ansible.cfg /etc/ansible/ansible.cfg
- ARG ANSIBLE_GALAXY_SERVER_CERTIFIED_TOKEN
- ARG ANSIBLE_GALAXY_SERVER_VALIDATED_TOKEN
append_final:
- RUN curl -o /etc/yum.repos.d/hasicorp.repo https://rpm.releases.hashicorp.com/RHEL/hashicorp.repo &&
microdnf install -y terraform
...

View File

@@ -1,29 +1,61 @@
#!/bin/bash
# array of images to build
ee_images=(
"apd-ee-24"
"apd-ee-25"
)
if [[ -z $ANSIBLE_GALAXY_SERVER_CERTIFIED_TOKEN || -z $ANSIBLE_GALAXY_SERVER_VALIDATED_TOKEN ]]
then
echo "A valid Automation Hub token is required, Set the following environment variables before continuing"
echo "export ANSIBLE_GALAXY_SERVER_CERTIFIED_TOKEN=<token>"
echo "export ANSIBLE_GALAXY_SERVER_VALIDATED_TOKEN=<token>"
exit 1
fi
for ee in "${ee_images[@]}"
# log in to pull the base EE image
if ! podman login --get-login registry.redhat.io > /dev/null
then
echo "Run 'podman login registry.redhat.io' before continuing"
exit 1
fi
# create EE definition
rm -rf ./context/*
ansible-builder create \
--file apd-ee-25.yml \
--context ./context \
-v 3 | tee ansible-builder.log
# remove existing manifest if present
_tag=$(date +%Y%m%d)
podman manifest rm quay.io/ansible-product-demos/apd-ee-25:${_tag}
# create manifest for EE image
podman manifest create quay.io/ansible-product-demos/apd-ee-25:${_tag}
# for the openshift-clients RPM, microdnf doesn't support URL-based installs
# and HTTP doesn't support file globs for GETs, use multiple steps to determine
# the correct RPM URL for each machine architecture
for arch in amd64 arm64
do
echo "Building EE image ${ee}"
_baseurl=https://mirror.openshift.com/pub/openshift-v4/${arch}/dependencies/rpms/4.18-el9-beta/
_rpm=$(curl -s ${_baseurl} | grep openshift-clients-4 | grep href | cut -d\" -f2)
# build EE image
ansible-builder build \
--file ${ee}.yml \
--context ./ee_contexts/${ee} \
--build-arg ANSIBLE_GALAXY_SERVER_CERTIFIED_TOKEN \
--build-arg ANSIBLE_GALAXY_SERVER_VALIDATED_TOKEN \
-v 3 \
-t quay.io/ansible-product-demos/${ee}:$(date +%Y%m%d)
if [[ $? == 0 ]]
then
# tag EE image as latest
podman tag \
quay.io/ansible-product-demos/${ee}:$(date +%Y%m%d) \
quay.io/ansible-product-demos/${ee}:latest
fi
# build EE for multiple architectures from the EE context
pushd ./context/ > /dev/null
podman build --platform linux/${arch} \
--build-arg ANSIBLE_GALAXY_SERVER_CERTIFIED_TOKEN \
--build-arg ANSIBLE_GALAXY_SERVER_VALIDATED_TOKEN \
--build-arg OPENSHIFT_CLIENT_RPM="${_baseurl}${_rpm}" \
--manifest quay.io/ansible-product-demos/apd-ee-25:${_tag} . \
| tee podman-build-${arch}.log
popd > /dev/null
done
# inspect manifest content
#podman manifest inspect quay.io/ansible-product-demos/apd-ee-25:${_tag}
# tag manifest as latest
#podman tag quay.io/ansible-product-demos/apd-ee-25:${_tag} quay.io/ansible-product-demos/apd-ee-25:latest
# push all manifest content to repository
# using --all is important here, it pushes all content and not
# just the native platform content
#podman manifest push --all quay.io/ansible-product-demos/apd-ee-25:${_tag}
#podman manifest push --all quay.io/ansible-product-demos/apd-ee-25:latest

View File

@@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:f637eb0440f14f1458800c7a9012adcb9b58eb2131c02f64dfa4ca515e182093
size 54960859

View File

@@ -1,77 +0,0 @@
---
collections:
# AAP config as code
- name: ansible.controller
version: ">=4.6.0"
# TODO this fails trying to install a different version of
# the python-systemd package
# - name: ansible.eda # fails trying to install systemd-python package
# version: ">=2.1.0"
- name: ansible.hub
version: ">=1.0.0"
- name: ansible.platform
version: ">=2.5.0"
- name: infra.ah_configuration
version: ">=2.0.6"
- name: infra.controller_configuration
version: ">=2.11.0"
# linux demos
- name: ansible.posix
version: ">=1.5.4"
- name: community.general
version: ">=8.0.0"
- name: containers.podman
version: ">=1.12.1"
- name: redhat.insights
version: ">=1.2.2"
- name: redhat.rhel_system_roles
version: ">=1.23.0"
# windows demos
- name: microsoft.ad
version: "1.9"
- name: ansible.windows
version: ">=2.3.0"
- name: chocolatey.chocolatey
version: ">=1.5.1"
- name: community.windows
version: ">=2.2.0"
# cloud demos
- name: amazon.aws
version: ">=7.5.0"
# satellite demos
- name: redhat.satellite
version: ">=4.0.0"
# network demos
- name: ansible.netcommon
version: ">=6.0.0"
- name: cisco.ios
version: ">=7.0.0"
- name: cisco.iosxr
version: ">=8.0.0"
- name: cisco.nxos
version: ">=7.0.0"
- name: network.backup
version: ">=3.0.0"
# TODO on 2.5 ee-minimal-rhel9 this tries to build and install
# a different version of python netifaces, which fails
# - name: infoblox.nios_modules
# version: ">=1.6.1"
# openshift demos
- name: kubernetes.core
version: ">=4.0.0"
- name: redhat.openshift
version: ">=3.0.1"
- name: redhat.openshift_virtualization
version: ">=1.4.0"
# for RHDP
- name: ansible.utils
version: ">=5.1.0"
- name: kubevirt.core
version: ">=2.1.0"
- name: community.okd
version: ">=4.0.0"
- name: https://github.com/rhpds/assisted_installer.git
type: git
version: "v0.0.1"
...

View File

@@ -1,14 +1,21 @@
---
collections:
# AAP config as code
- name: ansible.controller
version: "<4.6.0"
version: ">=4.6.0"
# TODO this fails trying to install a different version of
# the python-systemd package
# - name: ansible.eda # fails trying to install systemd-python package
# version: ">=2.1.0"
- name: ansible.hub
version: ">=1.0.0"
- name: ansible.platform
version: ">=2.5.0"
- name: infra.ah_configuration
version: ">=2.0.6"
- name: infra.controller_configuration
version: ">=2.9.0"
- name: redhat_cop.controller_configuration
version: ">=2.3.1"
# linux
version: ">=2.11.0"
# linux demos
- name: ansible.posix
version: ">=1.5.4"
- name: community.general
@@ -19,7 +26,7 @@ collections:
version: ">=1.2.2"
- name: redhat.rhel_system_roles
version: ">=1.23.0"
# windows
# windows demos
- name: microsoft.ad
version: "1.9"
- name: ansible.windows
@@ -28,13 +35,13 @@ collections:
version: ">=1.5.1"
- name: community.windows
version: ">=2.2.0"
# cloud
# cloud demos
- name: amazon.aws
version: ">=7.5.0"
# satellite
# satellite demos
- name: redhat.satellite
version: ">=4.0.0"
# network
# network demos
- name: ansible.netcommon
version: ">=6.0.0"
- name: cisco.ios
@@ -43,12 +50,20 @@ collections:
version: ">=8.0.0"
- name: cisco.nxos
version: ">=7.0.0"
- name: infoblox.nios_modules
version: ">=1.6.1"
# openshift
- name: network.backup
version: ">=3.0.0"
# TODO on 2.5 ee-minimal-rhel9 this tries to build and install
# a different version of python netifaces, which fails
# - name: infoblox.nios_modules
# version: ">=1.6.1"
# openshift demos
- name: ansible.utils
version: ">=6.0.0"
- name: kubernetes.core
version: ">=4.0.0"
- name: redhat.openshift
version: ">=3.0.1"
- name: redhat.openshift_virtualization
version: ">=1.4.0"
...

View File

@@ -52,7 +52,9 @@
state: enabled
immediate: true
permanent: true
when: "'firewalld.service' in ansible_facts.services"
when:
- "'firewalld.service' in ansible_facts.services"
- ansible_facts.services["firewalld.service"].state == "running"
- name: Disable httpd welcome page
ansible.builtin.file:

View File

@@ -2,45 +2,65 @@
roles:
# RHEL 7 compliance roles from ComplianceAsCode
- name: redhatofficial.rhel7-cis
src: https://github.com/RedHatOfficial/ansible-role-rhel7-cis
version: 0.1.72
- name: redhatofficial.rhel7-cjis
src: https://github.com/RedHatOfficial/ansible-role-rhel7-cjis
version: 0.1.72
- name: redhatofficial.rhel7-cui
src: https://github.com/RedHatOfficial/ansible-role-rhel7-cui
version: 0.1.72
- name: redhatofficial.rhel7-hipaa
src: https://github.com/RedHatOfficial/ansible-role-rhel7-hipaa
version: 0.1.72
- name: redhatofficial.rhel7-ospp
src: https://github.com/RedHatOfficial/ansible-role-rhel7-ospp
version: 0.1.72
- name: redhatofficial.rhel7-pci-dss
src: https://github.com/RedHatOfficial/ansible-role-rhel7-pci-dss
version: 0.1.72
- name: redhatofficial.rhel7-stig
src: https://github.com/RedHatOfficial/ansible-role-rhel7-stig
version: 0.1.72
# RHEL 8 compliance roles from ComplianceAsCode
- name: redhatofficial.rhel8-cis
src: https://github.com/RedHatOfficial/ansible-role-rhel8-cis
version: 0.1.72
- name: redhatofficial.rhel8-cjis
src: https://github.com/RedHatOfficial/ansible-role-rhel8-cjis
version: 0.1.72
- name: redhatofficial.rhel8-cui
src: https://github.com/RedHatOfficial/ansible-role-rhel8-cui
version: 0.1.72
- name: redhatofficial.rhel8-hipaa
src: https://github.com/RedHatOfficial/ansible-role-rhel8-hipaa
version: 0.1.72
- name: redhatofficial.rhel8-ospp
src: https://github.com/RedHatOfficial/ansible-role-rhel8-ospp
version: 0.1.72
- name: redhatofficial.rhel8-pci-dss
src: https://github.com/RedHatOfficial/ansible-role-rhel8-pci-dss
version: 0.1.72
- name: redhatofficial.rhel8-stig
src: https://github.com/RedHatOfficial/ansible-role-rhel8-stig
version: 0.1.72
# RHEL 9 compliance roles from ComplianceAsCode
- name: redhatofficial.rhel9-cis
src: https://github.com/RedHatOfficial/ansible-role-rhel9-cis
version: 0.1.72
- name: redhatofficial.rhel9-cui
src: https://github.com/RedHatOfficial/ansible-role-rhel9-cui
version: 0.1.72
- name: redhatofficial.rhel9-hipaa
src: https://github.com/RedHatOfficial/ansible-role-rhel9-hipaa
version: 0.1.72
- name: redhatofficial.rhel9-ospp
src: https://github.com/RedHatOfficial/ansible-role-rhel9-ospp
version: 0.1.72
- name: redhatofficial.rhel9-pci-dss
src: https://github.com/RedHatOfficial/ansible-role-rhel9-pci-dss
version: 0.1.72
- name: redhatofficial.rhel9-stig
src: https://github.com/RedHatOfficial/ansible-role-rhel9-stig
version: 0.1.72
...

View File

@@ -46,15 +46,17 @@
- name: Create some users
microsoft.ad.user:
name: "{{ item.name }}"
groups: "{{ item.groups }}"
groups:
set:
- "{{ item.group }}"
password: "{{ lookup('community.general.random_string', min_lower=1, min_upper=1, min_special=1, min_numeric=1) }}"
update_password: on_create
loop:
- name: "UserA"
groups: "GroupA"
group: "GroupA"
- name: "UserB"
groups: "GroupB"
group: "GroupB"
- name: "UserC"
groups: "GroupC"
group: "GroupC"
retries: 5
delay: 10

View File

@@ -10,7 +10,7 @@
# Example result: ['&Qw2|E[-']
- name: Create new user
community.windows.win_domain_user:
microsoft.ad.user:
name: "{{ firstname }} {{ surname }}"
firstname: "{{ firstname }}"
surname: "{{ surname }}"

View File

@@ -16,7 +16,7 @@
- name: Ensure Demo OU exists
run_once: true
delegate_to: "{{ domain_controller }}"
community.windows.win_domain_ou:
microsoft.ad.ou:
name: Demo
state: present
@@ -26,7 +26,7 @@
- name: Join ansible.local domain
register: r_domain_membership
ansible.windows.win_domain_membership:
microsoft.ad.membership:
dns_domain_name: ansible.local
hostname: "{{ inventory_hostname.split('.')[0] }}"
domain_admin_user: "{{ ansible_user }}@ansible.local"