2014-03-04 02:08:51 +08:00
|
|
|
---
|
|
|
|
# Defines deployment design and assigns role to server groups
|
|
|
|
|
2022-04-27 23:00:56 +08:00
|
|
|
- hosts: localhost
|
|
|
|
connection: local
|
|
|
|
tasks:
|
2022-06-15 15:16:26 +08:00
|
|
|
- name: Warn about ceph-ansible current status
|
|
|
|
fail:
|
2022-04-27 23:00:56 +08:00
|
|
|
msg: "cephadm is the new official installer. Please, consider migrating.
|
|
|
|
See https://docs.ceph.com/en/latest/cephadm/install for new deployments
|
|
|
|
or https://docs.ceph.com/en/latest/cephadm/adoption for migrating existing deployments."
|
2022-06-15 15:16:26 +08:00
|
|
|
when: not yes_i_know | default(false) | bool
|
2022-04-27 23:00:56 +08:00
|
|
|
|
2016-10-12 12:03:52 +08:00
|
|
|
- hosts:
|
|
|
|
- mons
|
|
|
|
- osds
|
|
|
|
- mdss
|
|
|
|
- rgws
|
|
|
|
- nfss
|
|
|
|
- rbdmirrors
|
|
|
|
- clients
|
2017-03-16 17:17:08 +08:00
|
|
|
- mgrs
|
2018-06-06 12:07:33 +08:00
|
|
|
- iscsigws
|
2020-07-25 06:05:41 +08:00
|
|
|
- monitoring
|
2019-04-04 11:33:15 +08:00
|
|
|
- rgwloadbalancers
|
2017-08-24 15:28:22 +08:00
|
|
|
|
2016-10-12 12:03:52 +08:00
|
|
|
gather_facts: false
|
2018-05-16 02:01:54 +08:00
|
|
|
any_errors_fatal: true
|
|
|
|
become: true
|
2017-08-24 15:28:22 +08:00
|
|
|
|
2019-04-01 23:46:15 +08:00
|
|
|
tags: always
|
2017-08-24 15:28:22 +08:00
|
|
|
|
2017-10-31 21:39:29 +08:00
|
|
|
vars:
|
|
|
|
delegate_facts_host: True
|
|
|
|
|
2018-11-12 20:21:26 +08:00
|
|
|
pre_tasks:
|
2023-06-02 17:34:15 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-defaults
|
2016-10-12 12:03:52 +08:00
|
|
|
# If we can't get python2 installed before any module is used we will fail
|
|
|
|
# so just try what we can to get it installed
|
2017-08-24 15:28:22 +08:00
|
|
|
|
2019-01-16 13:57:02 +08:00
|
|
|
- import_tasks: raw_install_python.yml
|
2017-10-12 21:55:20 +08:00
|
|
|
|
2017-10-31 21:39:29 +08:00
|
|
|
- name: gather facts
|
|
|
|
setup:
|
2020-06-30 22:13:42 +08:00
|
|
|
gather_subset:
|
|
|
|
- 'all'
|
|
|
|
- '!facter'
|
|
|
|
- '!ohai'
|
2019-10-02 21:36:30 +08:00
|
|
|
when:
|
|
|
|
- not delegate_facts_host | bool or inventory_hostname in groups.get(client_group_name, [])
|
2017-10-31 21:39:29 +08:00
|
|
|
|
2017-08-24 15:28:22 +08:00
|
|
|
- name: gather and delegate facts
|
2016-10-12 12:03:52 +08:00
|
|
|
setup:
|
2020-06-30 22:13:42 +08:00
|
|
|
gather_subset:
|
|
|
|
- 'all'
|
|
|
|
- '!facter'
|
|
|
|
- '!ohai'
|
2017-08-24 15:28:22 +08:00
|
|
|
delegate_to: "{{ item }}"
|
|
|
|
delegate_facts: True
|
2019-10-02 21:36:30 +08:00
|
|
|
with_items: "{{ groups['all'] | difference(groups.get('clients', [])) }}"
|
2018-05-04 00:41:16 +08:00
|
|
|
run_once: true
|
2019-04-01 23:46:15 +08:00
|
|
|
when: delegate_facts_host | bool
|
2017-08-24 15:28:22 +08:00
|
|
|
|
2018-11-12 20:21:26 +08:00
|
|
|
tasks:
|
2021-03-16 21:51:06 +08:00
|
|
|
|
|
|
|
# dummy container setup is only supported on x86_64
|
|
|
|
# when running with containerized_deployment: true this task
|
|
|
|
# creates a group that contains only x86_64 hosts.
|
|
|
|
# when running with containerized_deployment: false this task
|
|
|
|
# will add all client hosts to the group (and not filter).
|
|
|
|
- name: create filtered clients group
|
|
|
|
add_host:
|
|
|
|
name: "{{ item }}"
|
|
|
|
groups: _filtered_clients
|
|
|
|
with_items: "{{ groups.get(client_group_name, []) | intersect(ansible_play_batch) }}"
|
2021-03-24 16:29:28 +08:00
|
|
|
when: (hostvars[item]['ansible_facts']['architecture'] == 'x86_64') or (not containerized_deployment | bool)
|
2021-03-16 21:51:06 +08:00
|
|
|
|
2019-01-10 07:19:02 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-facts
|
2021-06-30 01:24:29 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-handler
|
2019-03-15 05:03:16 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-validate
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-infra
|
2019-08-03 04:20:08 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-common
|
2018-05-16 02:01:54 +08:00
|
|
|
|
2014-03-04 02:08:51 +08:00
|
|
|
- hosts: mons
|
2016-10-12 12:03:52 +08:00
|
|
|
gather_facts: false
|
2016-02-04 19:36:46 +08:00
|
|
|
become: True
|
2018-11-26 18:06:10 +08:00
|
|
|
any_errors_fatal: true
|
2018-11-12 20:21:26 +08:00
|
|
|
pre_tasks:
|
2018-03-01 00:08:07 +08:00
|
|
|
- name: set ceph monitor install 'In Progress'
|
|
|
|
run_once: true
|
|
|
|
set_stats:
|
|
|
|
data:
|
|
|
|
installer_phase_ceph_mon:
|
|
|
|
status: "In Progress"
|
|
|
|
start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
|
2018-10-01 23:43:24 +08:00
|
|
|
|
2018-11-12 20:21:26 +08:00
|
|
|
tasks:
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-defaults
|
2018-04-10 21:30:16 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-12-10 22:46:32 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-facts
|
|
|
|
tags: ['ceph_update_config']
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-handler
|
2019-10-04 20:58:11 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-config
|
2018-04-10 21:30:16 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-mon
|
2018-10-16 21:40:35 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-mgr
|
2019-02-22 17:50:22 +08:00
|
|
|
when: groups.get(mgr_group_name, []) | length == 0
|
2018-10-01 23:43:24 +08:00
|
|
|
|
2018-11-12 20:21:26 +08:00
|
|
|
post_tasks:
|
2018-03-01 00:08:07 +08:00
|
|
|
- name: set ceph monitor install 'Complete'
|
|
|
|
run_once: true
|
|
|
|
set_stats:
|
|
|
|
data:
|
|
|
|
installer_phase_ceph_mon:
|
|
|
|
status: "Complete"
|
|
|
|
end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
|
2014-03-04 02:08:51 +08:00
|
|
|
|
2017-09-22 23:26:28 +08:00
|
|
|
- hosts: mgrs
|
|
|
|
gather_facts: false
|
|
|
|
become: True
|
2018-11-26 18:06:10 +08:00
|
|
|
any_errors_fatal: true
|
2018-11-12 20:21:26 +08:00
|
|
|
pre_tasks:
|
2018-03-01 00:08:07 +08:00
|
|
|
- name: set ceph manager install 'In Progress'
|
|
|
|
run_once: true
|
|
|
|
set_stats:
|
|
|
|
data:
|
|
|
|
installer_phase_ceph_mgr:
|
|
|
|
status: "In Progress"
|
|
|
|
start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
|
2018-10-01 23:43:24 +08:00
|
|
|
|
2018-11-12 20:21:26 +08:00
|
|
|
tasks:
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-defaults
|
2018-04-10 21:30:16 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-12-10 22:46:32 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-facts
|
|
|
|
tags: ['ceph_update_config']
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-handler
|
2019-10-04 20:58:11 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-config
|
2018-04-10 21:30:16 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-mgr
|
|
|
|
|
2018-11-12 20:21:26 +08:00
|
|
|
post_tasks:
|
2018-03-01 00:08:07 +08:00
|
|
|
- name: set ceph manager install 'Complete'
|
|
|
|
run_once: true
|
|
|
|
set_stats:
|
|
|
|
data:
|
|
|
|
installer_phase_ceph_mgr:
|
|
|
|
status: "Complete"
|
|
|
|
end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
|
2017-09-22 23:26:28 +08:00
|
|
|
|
2023-08-04 15:45:28 +08:00
|
|
|
- hosts: mons[0]
|
|
|
|
become: True
|
|
|
|
gather_facts: false
|
|
|
|
any_errors_fatal: true
|
|
|
|
tasks:
|
|
|
|
- import_role:
|
|
|
|
name: ceph-defaults
|
|
|
|
|
|
|
|
- name: set global config
|
|
|
|
ceph_config:
|
|
|
|
action: set
|
|
|
|
who: "global"
|
|
|
|
option: "{{ item.key }}"
|
|
|
|
value: "{{ item.value }}"
|
|
|
|
with_dict:
|
|
|
|
"{{ {
|
|
|
|
'public_network': public_network | default(False),
|
|
|
|
'cluster_network': cluster_network | default(False),
|
|
|
|
'osd pool default crush rule': osd_pool_default_crush_rule,
|
|
|
|
'ms bind ipv6': 'true' if ip_version == 'ipv6' else 'false',
|
|
|
|
'ms bind ipv4': 'false' if ip_version == 'ipv6' else 'true',
|
|
|
|
'osd crush chooseleaf type': '0' if common_single_host_mode | default(False) | bool else False,
|
|
|
|
} }}"
|
|
|
|
when:
|
|
|
|
- inventory_hostname == ansible_play_hosts_all | last
|
|
|
|
- item.value
|
|
|
|
|
|
|
|
- name: set global config overrides
|
|
|
|
ceph_config:
|
|
|
|
action: set
|
|
|
|
who: "global"
|
|
|
|
option: "{{ item.key }}"
|
|
|
|
value: "{{ item.value }}"
|
|
|
|
when: inventory_hostname == ansible_play_hosts_all | last
|
|
|
|
with_dict: "{{ ceph_conf_overrides['global'] }}"
|
|
|
|
|
|
|
|
- name: set osd_memory_target
|
|
|
|
ceph_config:
|
|
|
|
action: set
|
|
|
|
who: "osd.*/{{ item }}:host"
|
|
|
|
option: "osd_memory_target"
|
|
|
|
value: "{{ _osd_memory_target | default(osd_memory_target) }}"
|
|
|
|
when: inventory_hostname == ansible_play_hosts_all | last
|
|
|
|
loop: "{{ groups[osd_group_name] | default([]) }}"
|
|
|
|
|
2014-03-04 02:08:51 +08:00
|
|
|
- hosts: osds
|
2016-10-12 12:03:52 +08:00
|
|
|
gather_facts: false
|
2016-02-04 19:36:46 +08:00
|
|
|
become: True
|
2018-11-26 18:06:10 +08:00
|
|
|
any_errors_fatal: true
|
2018-11-12 20:21:26 +08:00
|
|
|
pre_tasks:
|
2018-03-01 00:08:07 +08:00
|
|
|
- name: set ceph osd install 'In Progress'
|
|
|
|
run_once: true
|
|
|
|
set_stats:
|
|
|
|
data:
|
|
|
|
installer_phase_ceph_osd:
|
|
|
|
status: "In Progress"
|
|
|
|
start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
|
2018-10-01 23:43:24 +08:00
|
|
|
|
2018-11-12 20:21:26 +08:00
|
|
|
tasks:
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-defaults
|
2018-04-10 21:30:16 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-12-10 22:46:32 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-facts
|
|
|
|
tags: ['ceph_update_config']
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-handler
|
2019-10-04 20:58:11 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-config
|
2018-04-10 21:30:16 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-osd
|
|
|
|
|
2018-11-12 20:21:26 +08:00
|
|
|
post_tasks:
|
2018-03-01 00:08:07 +08:00
|
|
|
- name: set ceph osd install 'Complete'
|
|
|
|
run_once: true
|
|
|
|
set_stats:
|
|
|
|
data:
|
|
|
|
installer_phase_ceph_osd:
|
|
|
|
status: "Complete"
|
|
|
|
end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
|
2014-03-06 15:49:46 +08:00
|
|
|
|
|
|
|
- hosts: mdss
|
2016-10-12 12:03:52 +08:00
|
|
|
gather_facts: false
|
2016-02-04 19:36:46 +08:00
|
|
|
become: True
|
2018-11-26 18:06:10 +08:00
|
|
|
any_errors_fatal: true
|
2018-11-12 20:21:26 +08:00
|
|
|
pre_tasks:
|
2018-03-01 00:08:07 +08:00
|
|
|
- name: set ceph mds install 'In Progress'
|
|
|
|
run_once: true
|
|
|
|
set_stats:
|
|
|
|
data:
|
|
|
|
installer_phase_ceph_mds:
|
|
|
|
status: "In Progress"
|
|
|
|
start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
|
2018-10-01 23:43:24 +08:00
|
|
|
|
2018-11-12 20:21:26 +08:00
|
|
|
tasks:
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-defaults
|
2018-04-10 21:30:16 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-12-10 22:46:32 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-facts
|
|
|
|
tags: ['ceph_update_config']
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-handler
|
2019-10-04 20:58:11 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-config
|
2018-04-10 21:30:16 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-mds
|
|
|
|
|
2018-11-12 20:21:26 +08:00
|
|
|
post_tasks:
|
2018-03-01 00:08:07 +08:00
|
|
|
- name: set ceph mds install 'Complete'
|
|
|
|
run_once: true
|
|
|
|
set_stats:
|
|
|
|
data:
|
|
|
|
installer_phase_ceph_mds:
|
|
|
|
status: "Complete"
|
|
|
|
end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
|
2014-03-06 20:54:37 +08:00
|
|
|
|
|
|
|
- hosts: rgws
|
2016-10-12 12:03:52 +08:00
|
|
|
gather_facts: false
|
2016-02-04 19:36:46 +08:00
|
|
|
become: True
|
2018-11-26 18:06:10 +08:00
|
|
|
any_errors_fatal: true
|
2018-11-12 20:21:26 +08:00
|
|
|
pre_tasks:
|
2018-03-01 00:08:07 +08:00
|
|
|
- name: set ceph rgw install 'In Progress'
|
|
|
|
run_once: true
|
|
|
|
set_stats:
|
|
|
|
data:
|
|
|
|
installer_phase_ceph_rgw:
|
|
|
|
status: "In Progress"
|
|
|
|
start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
|
2018-10-01 23:43:24 +08:00
|
|
|
|
2018-11-12 20:21:26 +08:00
|
|
|
tasks:
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-defaults
|
2018-04-10 21:30:16 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-12-10 22:46:32 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-facts
|
|
|
|
tags: ['ceph_update_config']
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-handler
|
2019-10-04 20:58:11 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-config
|
2018-04-10 21:30:16 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-rgw
|
|
|
|
|
2018-11-12 20:21:26 +08:00
|
|
|
post_tasks:
|
2018-03-01 00:08:07 +08:00
|
|
|
- name: set ceph rgw install 'Complete'
|
|
|
|
run_once: true
|
|
|
|
set_stats:
|
|
|
|
data:
|
|
|
|
installer_phase_ceph_rgw:
|
|
|
|
status: "Complete"
|
|
|
|
end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
|
2015-11-27 02:57:16 +08:00
|
|
|
|
2020-03-20 03:44:20 +08:00
|
|
|
- hosts: clients
|
2016-10-12 12:03:52 +08:00
|
|
|
gather_facts: false
|
2016-05-06 02:20:03 +08:00
|
|
|
become: True
|
2018-11-26 18:06:10 +08:00
|
|
|
any_errors_fatal: true
|
2020-11-12 22:19:42 +08:00
|
|
|
tags: 'ceph_client'
|
2018-11-12 20:21:26 +08:00
|
|
|
pre_tasks:
|
2020-03-20 03:44:20 +08:00
|
|
|
- name: set ceph client install 'In Progress'
|
2018-03-01 00:08:07 +08:00
|
|
|
run_once: true
|
|
|
|
set_stats:
|
|
|
|
data:
|
2020-03-20 03:44:20 +08:00
|
|
|
installer_phase_ceph_client:
|
2018-03-01 00:08:07 +08:00
|
|
|
status: "In Progress"
|
|
|
|
start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
|
2018-10-01 23:43:24 +08:00
|
|
|
|
2018-11-12 20:21:26 +08:00
|
|
|
tasks:
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-defaults
|
2020-11-12 22:19:42 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-12-10 22:46:32 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-facts
|
2020-11-12 22:19:42 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-handler
|
2020-11-12 22:19:42 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-config
|
2020-11-12 22:19:42 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
2020-03-20 03:44:20 +08:00
|
|
|
name: ceph-client
|
2018-10-01 23:43:24 +08:00
|
|
|
|
2018-11-12 20:21:26 +08:00
|
|
|
post_tasks:
|
2020-03-20 03:44:20 +08:00
|
|
|
- name: set ceph client install 'Complete'
|
2018-03-01 00:08:07 +08:00
|
|
|
run_once: true
|
|
|
|
set_stats:
|
|
|
|
data:
|
2020-03-20 03:44:20 +08:00
|
|
|
installer_phase_ceph_client:
|
2018-03-01 00:08:07 +08:00
|
|
|
status: "Complete"
|
|
|
|
end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
|
2016-04-04 06:35:09 +08:00
|
|
|
|
2020-03-20 03:44:20 +08:00
|
|
|
- hosts: nfss
|
2016-10-12 12:03:52 +08:00
|
|
|
gather_facts: false
|
2016-04-04 06:35:09 +08:00
|
|
|
become: True
|
2018-11-26 18:06:10 +08:00
|
|
|
any_errors_fatal: true
|
2018-11-12 20:21:26 +08:00
|
|
|
pre_tasks:
|
2020-03-20 03:44:20 +08:00
|
|
|
- name: set ceph nfs install 'In Progress'
|
2018-03-01 00:08:07 +08:00
|
|
|
run_once: true
|
|
|
|
set_stats:
|
|
|
|
data:
|
2020-03-20 03:44:20 +08:00
|
|
|
installer_phase_ceph_nfs:
|
2018-03-01 00:08:07 +08:00
|
|
|
status: "In Progress"
|
|
|
|
start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
|
2018-10-01 23:43:24 +08:00
|
|
|
|
2018-11-12 20:21:26 +08:00
|
|
|
tasks:
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-defaults
|
2018-04-10 21:30:16 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-12-10 22:46:32 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-facts
|
|
|
|
tags: ['ceph_update_config']
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-handler
|
2019-10-04 20:58:11 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-config
|
2018-04-10 21:30:16 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
2020-03-20 03:44:20 +08:00
|
|
|
name: ceph-nfs
|
2018-10-01 23:43:24 +08:00
|
|
|
|
2018-11-12 20:21:26 +08:00
|
|
|
post_tasks:
|
2020-03-20 03:44:20 +08:00
|
|
|
- name: set ceph nfs install 'Complete'
|
2018-03-01 00:08:07 +08:00
|
|
|
run_once: true
|
|
|
|
set_stats:
|
|
|
|
data:
|
2020-03-20 03:44:20 +08:00
|
|
|
installer_phase_ceph_nfs:
|
2018-03-01 00:08:07 +08:00
|
|
|
status: "Complete"
|
|
|
|
end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
|
2016-05-11 00:29:27 +08:00
|
|
|
|
2020-03-20 03:44:20 +08:00
|
|
|
- hosts: rbdmirrors
|
2016-10-12 12:03:52 +08:00
|
|
|
gather_facts: false
|
2016-05-11 00:29:27 +08:00
|
|
|
become: True
|
2018-11-26 18:06:10 +08:00
|
|
|
any_errors_fatal: true
|
2018-11-12 20:21:26 +08:00
|
|
|
pre_tasks:
|
2020-03-20 03:44:20 +08:00
|
|
|
- name: set ceph rbd mirror install 'In Progress'
|
2018-03-01 00:08:07 +08:00
|
|
|
run_once: true
|
|
|
|
set_stats:
|
|
|
|
data:
|
2020-03-20 03:44:20 +08:00
|
|
|
installer_phase_ceph_rbdmirror:
|
2018-03-01 00:08:07 +08:00
|
|
|
status: "In Progress"
|
|
|
|
start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
|
2018-10-01 23:43:24 +08:00
|
|
|
|
2018-11-12 20:21:26 +08:00
|
|
|
tasks:
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-defaults
|
2018-04-10 21:30:16 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-12-10 22:46:32 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-facts
|
|
|
|
tags: ['ceph_update_config']
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-handler
|
2019-10-04 20:58:11 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-config
|
2018-04-10 21:30:16 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
2020-03-20 03:44:20 +08:00
|
|
|
name: ceph-rbd-mirror
|
2018-10-01 23:43:24 +08:00
|
|
|
|
2018-11-12 20:21:26 +08:00
|
|
|
post_tasks:
|
2020-03-20 03:44:20 +08:00
|
|
|
- name: set ceph rbd mirror install 'Complete'
|
2018-03-01 00:08:07 +08:00
|
|
|
run_once: true
|
|
|
|
set_stats:
|
|
|
|
data:
|
2020-03-20 03:44:20 +08:00
|
|
|
installer_phase_ceph_rbdmirror:
|
2018-03-01 00:08:07 +08:00
|
|
|
status: "Complete"
|
|
|
|
end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
|
2016-08-26 21:22:47 +08:00
|
|
|
|
2018-08-20 21:53:03 +08:00
|
|
|
- hosts:
|
|
|
|
- iscsigws
|
2017-08-05 02:18:11 +08:00
|
|
|
gather_facts: false
|
|
|
|
become: True
|
2018-11-26 18:06:10 +08:00
|
|
|
any_errors_fatal: true
|
2018-11-12 20:21:26 +08:00
|
|
|
pre_tasks:
|
2018-03-01 00:08:07 +08:00
|
|
|
- name: set ceph iscsi gateway install 'In Progress'
|
|
|
|
run_once: true
|
|
|
|
set_stats:
|
|
|
|
data:
|
|
|
|
installer_phase_ceph_iscsi_gw:
|
|
|
|
status: "In Progress"
|
|
|
|
start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
|
2018-10-01 23:43:24 +08:00
|
|
|
|
2018-11-12 20:21:26 +08:00
|
|
|
tasks:
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-defaults
|
2018-04-10 21:30:16 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-12-10 22:46:32 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-facts
|
|
|
|
tags: ['ceph_update_config']
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-handler
|
2019-10-04 20:58:11 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-config
|
2018-04-10 21:30:16 +08:00
|
|
|
tags: ['ceph_update_config']
|
2018-10-01 23:43:24 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-iscsi-gw
|
|
|
|
|
2018-11-12 20:21:26 +08:00
|
|
|
post_tasks:
|
2018-03-01 00:08:07 +08:00
|
|
|
- name: set ceph iscsi gw install 'Complete'
|
|
|
|
run_once: true
|
|
|
|
set_stats:
|
|
|
|
data:
|
|
|
|
installer_phase_ceph_iscsi_gw:
|
|
|
|
status: "Complete"
|
|
|
|
end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
|
site: report ceph -s status at the end of the deployment
We now show the output of 'ceph -s'. Example output below:
TASK [display post install message] **********************************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": [
" cluster:",
" id: 753212df-f32a-4cc9-a097-2db6fe89a251",
" health: HEALTH_OK",
" ",
" services:",
" mon: 1 daemons, quorum ceph-nano-lul-faa32aebf00b",
" mgr: ceph-nano-lul-faa32aebf00b(active)",
" osd: 1 osds: 1 up, 1 in",
" ",
" data:",
" pools: 4 pools, 32 pgs",
" objects: 224 objects, 2546 bytes",
" usage: 1027 MB used, 9212 MB / 10240 MB avail",
" pgs: 32 active+clean",
" "
]
}
Closes: https://bugzilla.redhat.com/show_bug.cgi?id=1602910
Signed-off-by: Sébastien Han <seb@redhat.com>
2018-07-25 22:39:35 +08:00
|
|
|
|
2019-04-04 11:33:15 +08:00
|
|
|
- hosts:
|
|
|
|
- rgwloadbalancers
|
|
|
|
gather_facts: false
|
|
|
|
become: True
|
|
|
|
any_errors_fatal: true
|
|
|
|
pre_tasks:
|
|
|
|
- name: set ceph rgw loadbalancer install 'In Progress'
|
|
|
|
run_once: true
|
|
|
|
set_stats:
|
|
|
|
data:
|
|
|
|
installer_phase_ceph_rgw_loadbalancer:
|
|
|
|
status: "In Progress"
|
|
|
|
start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
|
|
|
|
|
|
|
|
tasks:
|
|
|
|
- import_role:
|
|
|
|
name: ceph-defaults
|
|
|
|
tags: ['ceph_update_config']
|
|
|
|
- import_role:
|
|
|
|
name: ceph-facts
|
|
|
|
tags: ['ceph_update_config']
|
|
|
|
- import_role:
|
|
|
|
name: ceph-rgw-loadbalancer
|
|
|
|
|
|
|
|
post_tasks:
|
|
|
|
- name: set ceph rgw loadbalancer install 'Complete'
|
|
|
|
run_once: true
|
|
|
|
set_stats:
|
|
|
|
data:
|
|
|
|
installer_phase_ceph_rgw_loadbalancer:
|
|
|
|
status: "Complete"
|
|
|
|
end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
|
|
|
|
|
2019-10-18 03:15:20 +08:00
|
|
|
- import_playbook: dashboard.yml
|
|
|
|
when:
|
|
|
|
- dashboard_enabled | bool
|
2020-07-25 06:05:41 +08:00
|
|
|
- groups.get(monitoring_group_name, []) | length > 0
|
2019-10-18 03:15:20 +08:00
|
|
|
|
2020-07-03 16:21:49 +08:00
|
|
|
- hosts:
|
|
|
|
- mons
|
|
|
|
- osds
|
|
|
|
- mdss
|
|
|
|
- rgws
|
|
|
|
- rbdmirrors
|
|
|
|
- mgrs
|
|
|
|
|
|
|
|
gather_facts: false
|
|
|
|
become: True
|
|
|
|
any_errors_fatal: true
|
2021-07-05 22:11:57 +08:00
|
|
|
pre_tasks:
|
|
|
|
- name: set ceph crash install 'In Progress'
|
|
|
|
run_once: true
|
|
|
|
set_stats:
|
|
|
|
data:
|
|
|
|
installer_phase_ceph_crash:
|
|
|
|
status: "In Progress"
|
|
|
|
start: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
|
2020-07-03 16:21:49 +08:00
|
|
|
|
|
|
|
tasks:
|
|
|
|
- import_role:
|
|
|
|
name: ceph-defaults
|
|
|
|
- import_role:
|
|
|
|
name: ceph-facts
|
|
|
|
tasks_from: container_binary.yml
|
|
|
|
- import_role:
|
|
|
|
name: ceph-handler
|
|
|
|
- import_role:
|
|
|
|
name: ceph-crash
|
|
|
|
|
2021-07-05 22:11:57 +08:00
|
|
|
post_tasks:
|
|
|
|
- name: set ceph crash install 'Complete'
|
|
|
|
run_once: true
|
|
|
|
set_stats:
|
|
|
|
data:
|
|
|
|
installer_phase_ceph_crash:
|
|
|
|
status: "Complete"
|
|
|
|
end: "{{ lookup('pipe', 'date +%Y%m%d%H%M%SZ') }}"
|
|
|
|
|
site: report ceph -s status at the end of the deployment
We now show the output of 'ceph -s'. Example output below:
TASK [display post install message] **********************************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": [
" cluster:",
" id: 753212df-f32a-4cc9-a097-2db6fe89a251",
" health: HEALTH_OK",
" ",
" services:",
" mon: 1 daemons, quorum ceph-nano-lul-faa32aebf00b",
" mgr: ceph-nano-lul-faa32aebf00b(active)",
" osd: 1 osds: 1 up, 1 in",
" ",
" data:",
" pools: 4 pools, 32 pgs",
" objects: 224 objects, 2546 bytes",
" usage: 1027 MB used, 9212 MB / 10240 MB avail",
" pgs: 32 active+clean",
" "
]
}
Closes: https://bugzilla.redhat.com/show_bug.cgi?id=1602910
Signed-off-by: Sébastien Han <seb@redhat.com>
2018-07-25 22:39:35 +08:00
|
|
|
- hosts: mons
|
|
|
|
gather_facts: false
|
|
|
|
become: True
|
2018-12-17 18:34:46 +08:00
|
|
|
any_errors_fatal: true
|
site: report ceph -s status at the end of the deployment
We now show the output of 'ceph -s'. Example output below:
TASK [display post install message] **********************************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": [
" cluster:",
" id: 753212df-f32a-4cc9-a097-2db6fe89a251",
" health: HEALTH_OK",
" ",
" services:",
" mon: 1 daemons, quorum ceph-nano-lul-faa32aebf00b",
" mgr: ceph-nano-lul-faa32aebf00b(active)",
" osd: 1 osds: 1 up, 1 in",
" ",
" data:",
" pools: 4 pools, 32 pgs",
" objects: 224 objects, 2546 bytes",
" usage: 1027 MB used, 9212 MB / 10240 MB avail",
" pgs: 32 active+clean",
" "
]
}
Closes: https://bugzilla.redhat.com/show_bug.cgi?id=1602910
Signed-off-by: Sébastien Han <seb@redhat.com>
2018-07-25 22:39:35 +08:00
|
|
|
tasks:
|
2018-12-10 22:46:32 +08:00
|
|
|
- import_role:
|
|
|
|
name: ceph-defaults
|
site: report ceph -s status at the end of the deployment
We now show the output of 'ceph -s'. Example output below:
TASK [display post install message] **********************************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": [
" cluster:",
" id: 753212df-f32a-4cc9-a097-2db6fe89a251",
" health: HEALTH_OK",
" ",
" services:",
" mon: 1 daemons, quorum ceph-nano-lul-faa32aebf00b",
" mgr: ceph-nano-lul-faa32aebf00b(active)",
" osd: 1 osds: 1 up, 1 in",
" ",
" data:",
" pools: 4 pools, 32 pgs",
" objects: 224 objects, 2546 bytes",
" usage: 1027 MB used, 9212 MB / 10240 MB avail",
" pgs: 32 active+clean",
" "
]
}
Closes: https://bugzilla.redhat.com/show_bug.cgi?id=1602910
Signed-off-by: Sébastien Han <seb@redhat.com>
2018-07-25 22:39:35 +08:00
|
|
|
- name: get ceph status from the first monitor
|
2018-12-10 22:46:32 +08:00
|
|
|
command: ceph --cluster {{ cluster }} -s
|
site: report ceph -s status at the end of the deployment
We now show the output of 'ceph -s'. Example output below:
TASK [display post install message] **********************************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": [
" cluster:",
" id: 753212df-f32a-4cc9-a097-2db6fe89a251",
" health: HEALTH_OK",
" ",
" services:",
" mon: 1 daemons, quorum ceph-nano-lul-faa32aebf00b",
" mgr: ceph-nano-lul-faa32aebf00b(active)",
" osd: 1 osds: 1 up, 1 in",
" ",
" data:",
" pools: 4 pools, 32 pgs",
" objects: 224 objects, 2546 bytes",
" usage: 1027 MB used, 9212 MB / 10240 MB avail",
" pgs: 32 active+clean",
" "
]
}
Closes: https://bugzilla.redhat.com/show_bug.cgi?id=1602910
Signed-off-by: Sébastien Han <seb@redhat.com>
2018-07-25 22:39:35 +08:00
|
|
|
register: ceph_status
|
|
|
|
changed_when: false
|
2018-12-10 22:46:32 +08:00
|
|
|
delegate_to: "{{ groups[mon_group_name][0] }}"
|
site: report ceph -s status at the end of the deployment
We now show the output of 'ceph -s'. Example output below:
TASK [display post install message] **********************************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": [
" cluster:",
" id: 753212df-f32a-4cc9-a097-2db6fe89a251",
" health: HEALTH_OK",
" ",
" services:",
" mon: 1 daemons, quorum ceph-nano-lul-faa32aebf00b",
" mgr: ceph-nano-lul-faa32aebf00b(active)",
" osd: 1 osds: 1 up, 1 in",
" ",
" data:",
" pools: 4 pools, 32 pgs",
" objects: 224 objects, 2546 bytes",
" usage: 1027 MB used, 9212 MB / 10240 MB avail",
" pgs: 32 active+clean",
" "
]
}
Closes: https://bugzilla.redhat.com/show_bug.cgi?id=1602910
Signed-off-by: Sébastien Han <seb@redhat.com>
2018-07-25 22:39:35 +08:00
|
|
|
run_once: true
|
|
|
|
|
2018-12-10 22:46:32 +08:00
|
|
|
- name: "show ceph status for cluster {{ cluster }}"
|
site: report ceph -s status at the end of the deployment
We now show the output of 'ceph -s'. Example output below:
TASK [display post install message] **********************************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": [
" cluster:",
" id: 753212df-f32a-4cc9-a097-2db6fe89a251",
" health: HEALTH_OK",
" ",
" services:",
" mon: 1 daemons, quorum ceph-nano-lul-faa32aebf00b",
" mgr: ceph-nano-lul-faa32aebf00b(active)",
" osd: 1 osds: 1 up, 1 in",
" ",
" data:",
" pools: 4 pools, 32 pgs",
" objects: 224 objects, 2546 bytes",
" usage: 1027 MB used, 9212 MB / 10240 MB avail",
" pgs: 32 active+clean",
" "
]
}
Closes: https://bugzilla.redhat.com/show_bug.cgi?id=1602910
Signed-off-by: Sébastien Han <seb@redhat.com>
2018-07-25 22:39:35 +08:00
|
|
|
debug:
|
|
|
|
msg: "{{ ceph_status.stdout_lines }}"
|
2018-12-10 22:46:32 +08:00
|
|
|
delegate_to: "{{ groups[mon_group_name][0] }}"
|
site: report ceph -s status at the end of the deployment
We now show the output of 'ceph -s'. Example output below:
TASK [display post install message] **********************************************************************************************************************************************************************************************************
ok: [localhost] => {
"msg": [
" cluster:",
" id: 753212df-f32a-4cc9-a097-2db6fe89a251",
" health: HEALTH_OK",
" ",
" services:",
" mon: 1 daemons, quorum ceph-nano-lul-faa32aebf00b",
" mgr: ceph-nano-lul-faa32aebf00b(active)",
" osd: 1 osds: 1 up, 1 in",
" ",
" data:",
" pools: 4 pools, 32 pgs",
" objects: 224 objects, 2546 bytes",
" usage: 1027 MB used, 9212 MB / 10240 MB avail",
" pgs: 32 active+clean",
" "
]
}
Closes: https://bugzilla.redhat.com/show_bug.cgi?id=1602910
Signed-off-by: Sébastien Han <seb@redhat.com>
2018-07-25 22:39:35 +08:00
|
|
|
run_once: true
|
2020-09-01 17:24:59 +08:00
|
|
|
when:
|
|
|
|
- ceph_status is not skipped
|
|
|
|
- ceph_status is successful
|