-
Notifications
You must be signed in to change notification settings - Fork 39
Expand file tree
/
Copy pathcluster.yml
More file actions
29 lines (24 loc) · 2.04 KB
/
cluster.yml
File metadata and controls
29 lines (24 loc) · 2.04 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
---
# Account for the fact we are running outside of the expected environment system:
caas_inventory: "{{ ansible_inventory_sources | last }}" # ansible_inventory_sources is absolute
appliances_environment_root: "{{ caas_inventory | dirname }}"
appliances_repository_root: "{{ appliances_environment_root | dirname | dirname }}"
# Read the secrets from the Ansible local facts on the control host
vault_azimuth_user_password: "{{ hostvars[groups['control'][0]].ansible_local.openhpc_secrets.vault_azimuth_user_password }}"
vault_grafana_admin_password: "{{ hostvars[groups['control'][0]].ansible_local.openhpc_secrets.vault_grafana_admin_password }}"
vault_elasticsearch_admin_password: "{{ hostvars[groups['control'][0]].ansible_local.openhpc_secrets.vault_elasticsearch_admin_password }}"
vault_elasticsearch_kibana_password: "{{ hostvars[groups['control'][0]].ansible_local.openhpc_secrets.vault_elasticsearch_kibana_password }}"
vault_mysql_root_password: "{{ hostvars[groups['control'][0]].ansible_local.openhpc_secrets.vault_mysql_root_password }}"
vault_mysql_slurm_password: "{{ hostvars[groups['control'][0]].ansible_local.openhpc_secrets.vault_mysql_slurm_password }}"
vault_openhpc_mungekey: "{{ hostvars[groups['control'][0]].ansible_local.openhpc_secrets.vault_openhpc_mungekey }}"
vault_alertmanager_admin_password: "{{ hostvars[groups['control'][0]].ansible_local.openhpc_secrets.vault_alertmanager_admin_password }}"
# Override this to cope with the case where the podman group just doesn't exist
appliances_local_users_podman_enable: "{{ groups.get('podman', []) | length > 0 }}"
# The server name for Open OnDemand depends on whether Zenith is enabled or not
openondemand_servername_default: "{{ hostvars[groups['openstack'][0]].cluster_gateway_ip | replace('.', '-') ~ '.sslip.io' }}"
openondemand_servername: "{{ zenith_fqdn_ood | default(openondemand_servername_default) }}"
appliances_state_dir: /var/lib/state
caas_ood_zenith_dir: /var/lib/ood_zenith
# Defaults for caas-provided extravars:
cluster_project_manila_share: false
cluster_home_manila_share: false