Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 43 additions & 0 deletions ansible/rhacm-downstream-deploy-421.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
---
# Playbook to install RHACM Downstream Build
#
# Example Usage:
#
# time ansible-playbook -i ansible/inventory/cloud30.local ansible/rhacm-downstream-deploy.yml
#

- name: Setup downstream RHACM Mirror
hosts: bastion
vars_files:
- vars/all.yml
vars:
wait_for_machineconfigpool_update: false
roles:
- rhacm-hub-mirror

# Konflux builds no longer require this workaround
- name: Fix ipv6/disconnected nodes /etc/containers/registries.conf for tag mirroring
hosts: fix_icsp_nodes
vars_files:
- vars/all.yml
roles:
- icsp-enable-tag-mirror

- name: Install downstream RHACM
hosts: bastion
vars_files:
- vars/all.yml
roles:
- rhacm-downstream-deploy
- role: rhacm-observability
when: setup_rhacm_observability
- role: talm-deploy
when: setup_talm_operator or setup_talm_repo
- role: rhacm-ztp-patches-421
when: setup_rhacm_ztp_patches
- role: rhacm-siteconfig-operator
when: acm_enable_siteconfig
- mce-assisted-installer
- role: mce-image-based-install
when: mce_enable_ibio
- mce-add-clusterimagesets
18 changes: 18 additions & 0 deletions ansible/rhacm-ztp-setup-421.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
---
# Playbook to setup ZTP for RHACM
#
# Example Usage:
#
# time ansible-playbook -i ansible/inventory/cloud30.local ansible/rhacm-ztp-setup.yml
#

- name: Setup RHACM ZTP
hosts: bastion
vars_files:
- vars/all.yml
roles:
- rhacm-ztp-setup-421
- role: telco-core-ztp
when: setup_core_ztp
- role: telco-ran-du-ztp-421
when: setup_ran_du_ztp
39 changes: 39 additions & 0 deletions ansible/roles/rhacm-ztp-patches-421/tasks/main.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
---
# rhacm-ztp-patches tasks

- name: Apply the ArgoCD Deployment kustomization
shell: |
KUBECONFIG={{ hub_cluster_kubeconfig }} oc apply -k {{ install_directory }}/rhacm-ztp/telco-reference/telco-ran/configuration/argocd/deployment

- name: Apply ArgoCD telco-reference argocd-openshift-gitops-patch.json
shell: |
KUBECONFIG={{ hub_cluster_kubeconfig }} oc patch argocd openshift-gitops -n openshift-gitops --patch-file {{ install_directory }}/rhacm-ztp/telco-reference/telco-ran/configuration/argocd/deployment/argocd-openshift-gitops-patch.json --type=merge

- name: Wait for openshift-gitops-repo-server pod running
shell: |
KUBECONFIG={{ hub_cluster_kubeconfig }} oc get pods -n openshift-gitops -l app.kubernetes.io/name=openshift-gitops-repo-server -o jsonpath='{.items[0].status.phase}'
retries: 60
delay: 2
register: as_pod
until: as_pod.stdout == "Running"

# Although we wait for the new repo-server pod to be running, we can still apply the cluster and policy applications too quickly
- name: Pause for 15s
pause:
seconds: 15

# View the resources with oc get argocd -n openshift-gitops openshift-gitops -o json | jq '.spec.redis.resources'
- name: Patch openshift-gitops redis memory requests/limits
when: gitops_redis_mem_patch
shell: |
KUBECONFIG={{ hub_cluster_kubeconfig }} oc patch argocd -n openshift-gitops openshift-gitops --type json -p '[{"op": "replace", "path": "/spec/redis/resources/limits/memory", "value": "8Gi"}, {"op": "replace", "path": "/spec/redis/resources/requests/memory", "value": "256Mi"}]'

- name: Apply ArgoCD Cluster Applications
when: setup_ztp_cluster_applications
shell: |
KUBECONFIG={{ hub_cluster_kubeconfig }} oc apply -f {{ install_directory }}/rhacm-ztp/telco-reference/telco-ran/configuration/argocd/cluster-applications/

- name: Apply the ArgoCD Policy Applications
when: setup_ztp_policy_application
shell: |
KUBECONFIG={{ hub_cluster_kubeconfig }} oc apply -f {{ install_directory }}/rhacm-ztp/telco-reference/telco-ran/configuration/argocd/policy-applications/
18 changes: 18 additions & 0 deletions ansible/roles/rhacm-ztp-setup-421/defaults/main.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
---
# rhacm-ztp-setup default vars

# Base repo with branch to host both cluster and policy applications and data
telco_reference_repo: https://github.com/openshift-kni/telco-reference.git
telco_reference_branch: release-4.21

# Gogs service to commit repo to
gogs_host: "[fc00:1000::1]"
gogs_port: 10880
gogs_username: testadmin
gogs_password: testadmin

# The ztp-site-generator container image tag
ztp_site_generator_image_tag: v4.21.0-1

# RHACM Policy Generator Tag
rhacm_policy_generator_image_tag: v2.13.4-2
105 changes: 105 additions & 0 deletions ansible/roles/rhacm-ztp-setup-421/tasks/main.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
---
# rhacm-ztp-setup tasks

- name: Clear old telco-reference clone
file:
path: "{{ item }}"
state: absent
loop:
- "{{ install_directory }}/rhacm-ztp/telco-reference"

- name: Create directories for rhacm-ztp
file:
path: "{{ item }}"
state: directory
loop:
- "{{ install_directory }}/rhacm-ztp"
- "{{ install_directory }}/rhacm-ztp/telco-reference"

- name: Clone telco-reference
git:
repo: "{{ telco_reference_repo }}"
dest: "{{ install_directory }}/rhacm-ztp/telco-reference"
force: true
version: "{{ telco_reference_branch }}"

# Perhaps we can detect if the repo exists already instead of ignoring errors
- name: Create telco-reference repo in gogs
uri:
url: "http://{{ gogs_host }}:{{ gogs_port }}/api/v1/admin/users/{{ gogs_username }}/repos"
user: "{{ gogs_username }}"
password: "{{ gogs_password }}"
force_basic_auth: true
method: POST
headers:
content-type: application/json
Accept: application/json
body: {"name": "telco-reference", "description": "test repo", "private": false}
body_format: json
validate_certs: no
status_code: 201
return_content: yes
ignore_errors: true

# The --force flag on the git push forces the gogs fork of the repo to be reset to the above "clone"
- name: Push telco-reference into gogs
shell: |
cd {{ install_directory }}/rhacm-ztp/telco-reference
git remote add origin-gogs http://{{ gogs_username }}:{{ gogs_password }}@{{ gogs_host }}:{{ gogs_port }}/testadmin/telco-reference.git
git push -u origin-gogs --all --force



- name: Patches for telco-reference argocd-openshift-gitops-patch.json
replace:
path: "{{ install_directory }}/rhacm-ztp/telco-reference/telco-ran/configuration/argocd/deployment/argocd-openshift-gitops-patch.json"
replace: "{{ item.replace }}"
regexp: "{{ item.regexp }}"
when: item.enabled | default(true)
loop:
- replace: "{{ rhacm_disconnected_registry }}:{{ rhacm_disconnected_registry_port }}"
regexp: "quay.io"
enabled: "{{ rhacm_disconnected_registry and rhacm_disconnected_registry|length > 1 }}"
# - replace: "openshift-kni"
# regexp: "redhat_emp1"
# For rhacm policy generator release-4.14 branch
- replace: "{{ rhacm_disconnected_registry }}:{{ rhacm_disconnected_registry_port }}"
regexp: "registry.redhat.io"
enabled: "{{ rhacm_disconnected_registry and rhacm_disconnected_registry|length > 1 }}"
# - replace: "acm-d"
# regexp: "rhacm2"
# enabled: "{{ rhacm_disconnected_registry and rhacm_disconnected_registry|length > 1 }}"
# release-4.17 telco-reference
- replace: "{{ rhacm_policy_generator_image_tag }}"
regexp: "v2.11"
- replace: "{{ rhacm_policy_generator_image_tag }}"
regexp: "v2.7"
# release-4.11 telco-reference
- replace: "{{ ztp_site_generator_image_tag }}"
regexp: "latest"
# release-4.10 telco-reference
- replace: "{{ ztp_site_generator_image_tag }}"
regexp: "4.10.0"

- name: Remove the cluster/policies app from telco-reference argocd deployment
file:
state: absent
path: "{{ install_directory }}/rhacm-ztp/telco-reference/telco-ran/configuration/argocd/deployment/{{ item }}"
loop:
- clusters-app.yaml
- policies-app.yaml

- name: Remove clusters-app.yaml and policies-app.yaml from telco-reference argocd deployment kustomization.yaml
replace:
path: "{{ install_directory }}/rhacm-ztp/telco-reference/telco-ran/configuration/argocd/deployment/kustomization.yaml"
replace: ""
regexp: "{{ item }}"
loop:
- ".*- clusters-app.yaml"
- ".*- policies-app.yaml"

- name: Commit and push ZTP initial configuration to telco-reference
shell: |
cd {{ install_directory }}/rhacm-ztp/telco-reference
git commit -a -m "Add common ZTP inital configuration"
git push origin-gogs
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ spec:
holdInstallation: false
{% if sno_du_profile == "4.15" %}
installConfigOverrides: "{\"capabilities\":{\"baselineCapabilitySet\": \"None\", \"additionalEnabledCapabilities\": [ \"OperatorLifecycleManager\", \"NodeTuning\" ] }}"
{% elif sno_du_profile in ["4.16", "4.17", "4.18", "4.19", "4.20"] %}
{% elif sno_du_profile in ["4.16", "4.17", "4.18", "4.19", "4.20", "4.21"] %}
installConfigOverrides: "{\"capabilities\":{\"baselineCapabilitySet\": \"None\", \"additionalEnabledCapabilities\": [ \"OperatorLifecycleManager\", \"Ingress\", \"NodeTuning\" ] }}"
ignitionConfigOverride: "{\"ignition\":{\"version\":\"3.2.0\"},\"storage\":{\"files\":[{\"path\":\"/etc/containers/policy.json\",\"mode\":420,\"overwrite\":true,\"contents\":{\"source\":\"data:text/plain;charset=utf-8;base64,ewogICAgImRlZmF1bHQiOiBbCiAgICAgICAgewogICAgICAgICAgICAidHlwZSI6ICJpbnNlY3VyZUFjY2VwdEFueXRoaW5nIgogICAgICAgIH0KICAgIF0sCiAgICAidHJhbnNwb3J0cyI6CiAgICAgICAgewogICAgICAgICAgICAiZG9ja2VyLWRhZW1vbiI6CiAgICAgICAgICAgICAgICB7CiAgICAgICAgICAgICAgICAgICAgIiI6IFt7InR5cGUiOiJpbnNlY3VyZUFjY2VwdEFueXRoaW5nIn1dCiAgICAgICAgICAgICAgICB9CiAgICAgICAgfQp9\"}}]}}"
{% endif %}
Expand Down
74 changes: 74 additions & 0 deletions ansible/roles/telco-ran-du-ztp-421/defaults/main.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
---
# telco-ran-du-ztp default vars

# Pre-creates manifests for the desired number of argocd cluster applications
cluster_applications_count: 40

siteconfigs_per_application: 100

siteconfigs_directories:
- /root/hv-vm/sno/ai-siteconfig
- /root/hv-vm/compact/ai-siteconfig
- /root/hv-vm/standard/ai-siteconfig

#####
# Siteconfig extra-manifests to include:
#####
# Include the crun container runtime manifest with day-0 install via siteconfig extra-manifests directory
include_crun_extra_manifests: true
# Include a modified sync-time-once chronyd manifest with day-0 install instead of the ztp generator included version
# Used in conjunction with "siteconfig_exclude_sync_time_once" to prevent chronyd start and time drift issue
# See https://issues.redhat.com/browse/OCPBUGS-21740
include_synctimeonce_extra_manifests: false
# Include a manifest to partition /var/lib/containers for IBU
include_varlibcontainers_partitioned_extra_manifests: false

#####
# DU Profile Options
#####
# Currently can choose between 4.20, 4.19, 4.18, 4.17, 4.16, 4.15, 4.14, 4.13, and 4.12
du_profile_version: 4.21

disconnected_operator_index_name: redhat/redhat-operator-index
operator_index_tag: v4.21

# Initialize the siteconfig list so it can be sorted later
siteconfig_list: []

# Adjust the name of the du profile catalog source so it does not conflict with default names
# https://bugzilla.redhat.com/show_bug.cgi?id=2074612
common_catalogsource_name: rh-du-operators

# These policy names were adjusted to increase the number of ztp generated policies to match as if performanceprofile
# was enabled. The original names are commented below.
group_policy_logforwarder_name: "config-log-policy"
group_policy_storage_name: "config-storage-policy"
# group_policy_logforwarder_name: "config-policy"
# group_policy_storage_name: "config-policy"

# Image Based Upgrades requires the oadp operator to be installed on SNOs
include_oadp_operator: false
# Adjust URL to match your cluster's minio route
oadp_s3Url: http://minio-minio.apps.bm.example.com
s3_access_key_id: minio
s3_secret_access_key: minio123

# Image Based Upgrades requires the Lifecycle-agent operator to be installed on SNOs
include_lca_operator: false
# alpha for 4.15, stable for 4.16 and brew
lifecycle_agent_channel: stable
# v1alpha1 (alpha installed operator), v1 for brew (stable)
ibu_source_crs_apiversion: v1

# Only implemented for SNOs, organizes the CRs into 13-18 policies instead of the default of 5 policies
manyPolicies: false

# When enabled, creates extra annotations in common PGT which use hub side templating
extraHubCommonTemplates: false
# When enabled, creates extra annotations in group PGT which use hub side templating
extraHubGroupTemplates: false
# When enabled, creates extra annotations from site specific ConfigMaps on hub in group PGT
extraHubSiteTemplates: false

# When enabled use ACM PolicyGenerator instead of RAN PolicyGenTemplate to generate ACM policies
acm_policygenerator: false
1 change: 1 addition & 0 deletions ansible/roles/telco-ran-du-ztp-421/tasks/Untitled
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
source-crs
Loading