Merge pull request #490 from ceph/ansible-2

Necessary change to work on ansible v2
pull/482/merge
Leseb 2016-01-25 14:09:26 +01:00
commit 2f0c853d32
5 changed files with 569 additions and 229 deletions

View File

@ -23,43 +23,42 @@
- ceph-mds - ceph-mds
- ceph-rgw - ceph-rgw
- ceph-restapi - ceph-restapi
basedir: "{{ lookup('pipe', 'git rev-parse --show-toplevel') }}"
tasks: tasks:
- name: check for github prefix option on commandline - name: check for github prefix option on commandline
tags: split tags: split
fail: > fail:
msg='github prefix missing! e.g: (--extra-vars github=ceph/ansible).' msg: 'github prefix missing! e.g: (--extra-vars github=ceph/ansible).'
when: github == False when: github == False
- name: split the repo in seperate branches - name: split the repo in seperate branches
tags: split tags: split
command: > command: git subtree split --prefix=roles/{{ item }} -b {{ item }} --squash
git subtree split --prefix=roles/{{ item }} -b {{ item }} --squash args:
chdir=../ chdir: "{{ basedir }}"
with_items: roles with_items: roles
- name: adds remote github repos for the splits - name: adds remote github repos for the splits
tags: split tags: split
command: > command: git remote add {{ item }} git@github.com:{{ github }}-{{ item }}.git
git remote add {{ item }} git@github.com:{{ github }}-{{ item }}.git args:
chdir=../ chdir: "{{ basedir }}"
with_items: roles with_items: roles
- name: adds upstream remote - name: adds upstream remote
tags: update tags: update
command: > command: git remote add upstream git@github.com:ceph/ceph-ansible.git
git remote add upstream git@github.com:ceph/ceph-ansible.git
failed_when: false failed_when: false
- name: pulles the newest changes from upstream - name: pulles the newest changes from upstream
tags: update tags: update
command: > command: git pull upstream master:master
git pull upstream master:master
- name: update the split repos from master - name: update the split repos from master
tags: update tags: update
shell: > shell: git push {{ item }} $(git subtree split --prefix roles/{{ item }} master):master --force
git push {{ item }} $(git subtree split --prefix roles/{{ item }} master):master --force args:
chdir=../ chdir: "{{ basedir }}"
with_items: roles with_items: roles

View File

@ -1,240 +1,581 @@
# Copyright 2015, Rackspace US, Inc. # (c) 2015, Kevin Carter <kevin.carter@rackspace.com>
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # This file is part of Ansible
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# #
# http://www.apache.org/licenses/LICENSE-2.0 # Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# #
# Unless required by applicable law or agreed to in writing, software # Ansible is distributed in the hope that it will be useful,
# distributed under the License is distributed on an "AS IS" BASIS, # but WITHOUT ANY WARRANTY; without even the implied warranty of
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# See the License for the specific language governing permissions and # GNU General Public License for more details.
# limitations under the License. #
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import ConfigParser import ConfigParser
import datetime
import io import io
import json import json
import os import os
import pwd
import time
import yaml import yaml
from ansible import errors
from ansible.runner.return_data import ReturnData # Ansible v2
from ansible import utils try:
from ansible.utils import template from ansible.plugins.action import ActionBase
from ansible.utils.unicode import to_bytes, to_unicode
from ansible import constants as C
from ansible import errors
CONFIG_TYPES = {
'ini': 'return_config_overrides_ini',
'json': 'return_config_overrides_json',
'yaml': 'return_config_overrides_yaml'
}
CONFIG_TYPES = { def _convert_2_string(item):
'ini': 'return_config_overrides_ini', """Return byte strings for all items.
'json': 'return_config_overrides_json',
'yaml': 'return_config_overrides_yaml'
}
This will convert everything within a dict, list or unicode string such
class ActionModule(object): that the values will be encode('utf-8') where applicable.
TRANSFERS_FILES = True
def __init__(self, runner):
self.runner = runner
def grab_options(self, complex_args, module_args):
"""Grab passed options from Ansible complex and module args.
:param complex_args: ``dict``
:param module_args: ``dict``
:returns: ``dict``
""" """
options = dict()
if complex_args:
options.update(complex_args)
options.update(utils.parse_kv(module_args)) if isinstance(item, dict):
return options # Old style dict comprehension for legacy python support
return dict(
@staticmethod (_convert_2_string(key), _convert_2_string(value))
def return_config_overrides_ini(config_overrides, resultant): for key, value in item.iteritems()
"""Returns string value from a modified config file. )
elif isinstance(item, list):
:param config_overrides: ``dict`` return [_convert_2_string(i) for i in item]
:param resultant: ``str`` || ``unicode`` elif isinstance(item, unicode):
:returns: ``str`` return item.encode('utf-8')
"""
config = ConfigParser.RawConfigParser(allow_no_value=True)
config_object = io.BytesIO(resultant.encode('utf-8'))
config.readfp(config_object)
for section, items in config_overrides.items():
# If the items value is not a dictionary it is assumed that the
# value is a default item for this config type.
if not isinstance(items, dict):
config.set('DEFAULT', section, str(items))
else:
# Attempt to add a section to the config file passing if
# an error is raised that is related to the section
# already existing.
try:
config.add_section(section)
except (ConfigParser.DuplicateSectionError, ValueError):
pass
for key, value in items.items():
config.set(section, key, str(value))
else: else:
config_object.close() return item
resultant_bytesio = io.BytesIO()
try:
config.write(resultant_bytesio)
return resultant_bytesio.getvalue()
finally:
resultant_bytesio.close()
def return_config_overrides_json(self, config_overrides, resultant): class ActionModule(ActionBase):
"""Returns config json TRANSFERS_FILES = True
Its important to note that file ordering will not be preserved as the @staticmethod
information within the json file will be sorted by keys. def return_config_overrides_ini(config_overrides, resultant):
"""Returns string value from a modified config file.
:param config_overrides: ``dict`` :param config_overrides: ``dict``
:param resultant: ``str`` || ``unicode`` :param resultant: ``str`` || ``unicode``
:returns: ``str`` :returns: ``str``
""" """
original_resultant = json.loads(resultant) # If there is an exception loading the RawConfigParser The config obj
merged_resultant = self._merge_dict( # is loaded again without the extra option. This is being done to
base_items=original_resultant, # support older python.
new_items=config_overrides try:
) config = ConfigParser.RawConfigParser(allow_no_value=True)
return json.dumps( except Exception:
merged_resultant, config = ConfigParser.RawConfigParser()
indent=4,
sort_keys=True
)
def return_config_overrides_yaml(self, config_overrides, resultant): config_object = io.BytesIO(str(resultant))
"""Return config yaml. config.readfp(config_object)
for section, items in config_overrides.items():
:param config_overrides: ``dict`` # If the items value is not a dictionary it is assumed that the
:param resultant: ``str`` || ``unicode`` # value is a default item for this config type.
:returns: ``str`` if not isinstance(items, dict):
""" config.set(
original_resultant = yaml.safe_load(resultant) 'DEFAULT',
merged_resultant = self._merge_dict( section.encode('utf-8'),
base_items=original_resultant, _convert_2_string(items)
new_items=config_overrides )
)
return yaml.safe_dump(
merged_resultant,
default_flow_style=False,
width=1000,
)
def _merge_dict(self, base_items, new_items):
"""Recursively merge new_items into base_items.
:param base_items: ``dict``
:param new_items: ``dict``
:returns: ``dict``
"""
for key, value in new_items.iteritems():
if isinstance(value, dict):
base_items[key] = self._merge_dict(
base_items.get(key, {}),
value
)
elif isinstance(value, list):
if key in base_items and isinstance(base_items[key], list):
base_items[key].extend(value)
else: else:
base_items[key] = value # Attempt to add a section to the config file passing if
# an error is raised that is related to the section
# already existing.
try:
config.add_section(section.encode('utf-8'))
except (ConfigParser.DuplicateSectionError, ValueError):
pass
for key, value in items.items():
value = _convert_2_string(value)
try:
config.set(
section.encode('utf-8'),
key.encode('utf-8'),
value
)
except ConfigParser.NoSectionError as exp:
error_msg = str(exp)
error_msg += (
' Try being more explicit with your override'
' data. Sections are case sensitive.'
)
raise errors.AnsibleModuleError(error_msg)
else: else:
base_items[key] = new_items[key] config_object.close()
return base_items
def run(self, conn, tmp, module_name, module_args, inject, resultant_bytesio = io.BytesIO()
complex_args=None, **kwargs): try:
"""Run the method""" config.write(resultant_bytesio)
if not self.runner.is_playbook: return resultant_bytesio.getvalue()
raise errors.AnsibleError( finally:
'FAILED: `config_templates` are only available in playbooks' resultant_bytesio.close()
def return_config_overrides_json(self, config_overrides, resultant):
"""Returns config json
Its important to note that file ordering will not be preserved as the
information within the json file will be sorted by keys.
:param config_overrides: ``dict``
:param resultant: ``str`` || ``unicode``
:returns: ``str``
"""
original_resultant = json.loads(resultant)
merged_resultant = self._merge_dict(
base_items=original_resultant,
new_items=config_overrides
)
return json.dumps(
merged_resultant,
indent=4,
sort_keys=True
) )
options = self.grab_options(complex_args, module_args) def return_config_overrides_yaml(self, config_overrides, resultant):
try: """Return config yaml.
source = options['src']
dest = options['dest']
config_overrides = options.get('config_overrides', dict()) :param config_overrides: ``dict``
config_type = options['config_type'] :param resultant: ``str`` || ``unicode``
assert config_type.lower() in ['ini', 'json', 'yaml'] :returns: ``str``
except KeyError as exp: """
result = dict(failed=True, msg=exp) original_resultant = yaml.safe_load(resultant)
return ReturnData(conn=conn, comm_ok=False, result=result) merged_resultant = self._merge_dict(
base_items=original_resultant,
source_template = template.template( new_items=config_overrides
self.runner.basedir,
source,
inject
)
if '_original_file' in inject:
source_file = utils.path_dwim_relative(
inject['_original_file'],
'templates',
source_template,
self.runner.basedir
) )
else: return yaml.safe_dump(
source_file = utils.path_dwim(self.runner.basedir, source_template) merged_resultant,
default_flow_style=False,
# Open the template file and return the data as a string. This is width=1000,
# being done here so that the file can be a vault encrypted file.
resultant = template.template_from_file(
self.runner.basedir,
source_file,
inject,
vault_password=self.runner.vault_pass
)
if config_overrides:
type_merger = getattr(self, CONFIG_TYPES.get(config_type))
resultant = type_merger(
config_overrides=config_overrides,
resultant=resultant
) )
# Retemplate the resultant object as it may have new data within it def _merge_dict(self, base_items, new_items):
# as provided by an override variable. """Recursively merge new_items into base_items.
template.template_from_string(
basedir=self.runner.basedir,
data=resultant,
vars=inject,
fail_on_undefined=True
)
# Access to protected method is unavoidable in Ansible 1.x. :param base_items: ``dict``
new_module_args = dict( :param new_items: ``dict``
src=self.runner._transfer_str(conn, tmp, 'source', resultant), :returns: ``dict``
dest=dest, """
original_basename=os.path.basename(source), for key, value in new_items.iteritems():
follow=True, if isinstance(value, dict):
) base_items[key] = self._merge_dict(
base_items.get(key, {}),
value
)
elif isinstance(value, list):
if key in base_items and isinstance(base_items[key], list):
base_items[key].extend(value)
else:
base_items[key] = value
else:
base_items[key] = new_items[key]
return base_items
module_args_tmp = utils.merge_module_args( def _load_options_and_status(self, task_vars):
module_args, """Return options and status from module load."""
new_module_args
)
# Remove data types that are not available to the copy module config_type = self._task.args.get('config_type')
complex_args.pop('config_overrides') if config_type not in ['ini', 'yaml', 'json']:
complex_args.pop('config_type') return False, dict(
failed=True,
msg="No valid [ config_type ] was provided. Valid options are"
" ini, yaml, or json."
)
# Return the copy module status. Access to protected method is # Access to protected method is unavoidable in Ansible
# unavoidable in Ansible 1.x. searchpath = [self._loader._basedir]
return self.runner._execute_module(
conn, faf = self._task.first_available_file
tmp, if faf:
'copy', task_file = task_vars.get('_original_file', None, 'templates')
module_args_tmp, source = self._get_first_available_file(faf, task_file)
inject=inject, if not source:
complex_args=complex_args return False, dict(
) failed=True,
msg="could not find src in first_available_file list"
)
else:
# Access to protected method is unavoidable in Ansible
if self._task._role:
file_path = self._task._role._role_path
searchpath.insert(1, C.DEFAULT_ROLES_PATH)
searchpath.insert(1, self._task._role._role_path)
else:
file_path = self._loader.get_basedir()
user_source = self._task.args.get('src')
if not user_source:
return False, dict(
failed=True,
msg="No user provided [ src ] was provided"
)
source = self._loader.path_dwim_relative(
file_path,
'templates',
user_source
)
searchpath.insert(1, os.path.dirname(source))
_dest = self._task.args.get('dest')
if not _dest:
return False, dict(
failed=True,
msg="No [ dest ] was provided"
)
else:
# Expand any user home dir specification
user_dest = self._remote_expand_user(_dest)
if user_dest.endswith(os.sep):
user_dest = os.path.join(user_dest, os.path.basename(source))
return True, dict(
source=source,
dest=user_dest,
config_overrides=self._task.args.get('config_overrides', dict()),
config_type=config_type,
searchpath=searchpath
)
def run(self, tmp=None, task_vars=None):
"""Run the method"""
if not tmp:
tmp = self._make_tmp_path()
_status, _vars = self._load_options_and_status(task_vars=task_vars)
if not _status:
return _vars
temp_vars = task_vars.copy()
template_host = temp_vars['template_host'] = os.uname()[1]
source = temp_vars['template_path'] = _vars['source']
temp_vars['template_mtime'] = datetime.datetime.fromtimestamp(
os.path.getmtime(source)
)
try:
template_uid = temp_vars['template_uid'] = pwd.getpwuid(
os.stat(source).st_uid
).pw_name
except Exception:
template_uid = temp_vars['template_uid'] = os.stat(source).st_uid
managed_default = C.DEFAULT_MANAGED_STR
managed_str = managed_default.format(
host=template_host,
uid=template_uid,
file=to_bytes(source)
)
temp_vars['ansible_managed'] = time.strftime(
managed_str,
time.localtime(os.path.getmtime(source))
)
temp_vars['template_fullpath'] = os.path.abspath(source)
temp_vars['template_run_date'] = datetime.datetime.now()
with open(source, 'r') as f:
template_data = to_unicode(f.read())
self._templar.environment.loader.searchpath = _vars['searchpath']
self._templar.set_available_variables(temp_vars)
resultant = self._templar.template(
template_data,
preserve_trailing_newlines=True,
escape_backslashes=False,
convert_data=False
)
# Access to protected method is unavoidable in Ansible
self._templar.set_available_variables(
self._templar._available_variables
)
if _vars['config_overrides']:
type_merger = getattr(self, CONFIG_TYPES.get(_vars['config_type']))
resultant = type_merger(
config_overrides=_vars['config_overrides'],
resultant=resultant
)
# Re-template the resultant object as it may have new data within it
# as provided by an override variable.
resultant = self._templar.template(
resultant,
preserve_trailing_newlines=True,
escape_backslashes=False,
convert_data=False
)
# run the copy module
new_module_args = self._task.args.copy()
# Access to protected method is unavoidable in Ansible
transferred_data = self._transfer_data(
self._connection._shell.join_path(tmp, 'source'),
resultant
)
new_module_args.update(
dict(
src=transferred_data,
dest=_vars['dest'],
original_basename=os.path.basename(source),
follow=True,
),
)
# Remove data types that are not available to the copy module
new_module_args.pop('config_overrides', None)
new_module_args.pop('config_type', None)
# Run the copy module
return self._execute_module(
module_name='copy',
module_args=new_module_args,
task_vars=task_vars
)
# Ansible v1
except ImportError:
import ConfigParser
import io
import json
import os
import yaml
from ansible import errors
from ansible.runner.return_data import ReturnData
from ansible import utils
from ansible.utils import template
CONFIG_TYPES = {
'ini': 'return_config_overrides_ini',
'json': 'return_config_overrides_json',
'yaml': 'return_config_overrides_yaml'
}
class ActionModule(object):
TRANSFERS_FILES = True
def __init__(self, runner):
self.runner = runner
def grab_options(self, complex_args, module_args):
"""Grab passed options from Ansible complex and module args.
:param complex_args: ``dict``
:param module_args: ``dict``
:returns: ``dict``
"""
options = dict()
if complex_args:
options.update(complex_args)
options.update(utils.parse_kv(module_args))
return options
@staticmethod
def return_config_overrides_ini(config_overrides, resultant):
"""Returns string value from a modified config file.
:param config_overrides: ``dict``
:param resultant: ``str`` || ``unicode``
:returns: ``str``
"""
config = ConfigParser.RawConfigParser(allow_no_value=True)
config_object = io.BytesIO(resultant.encode('utf-8'))
config.readfp(config_object)
for section, items in config_overrides.items():
# If the items value is not a dictionary it is assumed that the
# value is a default item for this config type.
if not isinstance(items, dict):
config.set('DEFAULT', section, str(items))
else:
# Attempt to add a section to the config file passing if
# an error is raised that is related to the section
# already existing.
try:
config.add_section(section)
except (ConfigParser.DuplicateSectionError, ValueError):
pass
for key, value in items.items():
config.set(section, key, str(value))
else:
config_object.close()
resultant_bytesio = io.BytesIO()
try:
config.write(resultant_bytesio)
return resultant_bytesio.getvalue()
finally:
resultant_bytesio.close()
def return_config_overrides_json(self, config_overrides, resultant):
"""Returns config json
Its important to note that file ordering will not be preserved as the
information within the json file will be sorted by keys.
:param config_overrides: ``dict``
:param resultant: ``str`` || ``unicode``
:returns: ``str``
"""
original_resultant = json.loads(resultant)
merged_resultant = self._merge_dict(
base_items=original_resultant,
new_items=config_overrides
)
return json.dumps(
merged_resultant,
indent=4,
sort_keys=True
)
def return_config_overrides_yaml(self, config_overrides, resultant):
"""Return config yaml.
:param config_overrides: ``dict``
:param resultant: ``str`` || ``unicode``
:returns: ``str``
"""
original_resultant = yaml.safe_load(resultant)
merged_resultant = self._merge_dict(
base_items=original_resultant,
new_items=config_overrides
)
return yaml.safe_dump(
merged_resultant,
default_flow_style=False,
width=1000,
)
def _merge_dict(self, base_items, new_items):
"""Recursively merge new_items into base_items.
:param base_items: ``dict``
:param new_items: ``dict``
:returns: ``dict``
"""
for key, value in new_items.iteritems():
if isinstance(value, dict):
base_items[key] = self._merge_dict(
base_items.get(key, {}),
value
)
elif isinstance(value, list):
if key in base_items and isinstance(base_items[key], list):
base_items[key].extend(value)
else:
base_items[key] = value
else:
base_items[key] = new_items[key]
return base_items
def run(self, conn, tmp, module_name, module_args, inject,
complex_args=None, **kwargs):
"""Run the method"""
if not self.runner.is_playbook:
raise errors.AnsibleError(
'FAILED: `config_templates` are only available in playbooks'
)
options = self.grab_options(complex_args, module_args)
try:
source = options['src']
dest = options['dest']
config_overrides = options.get('config_overrides', dict())
config_type = options['config_type']
assert config_type.lower() in ['ini', 'json', 'yaml']
except KeyError as exp:
result = dict(failed=True, msg=exp)
return ReturnData(conn=conn, comm_ok=False, result=result)
source_template = template.template(
self.runner.basedir,
source,
inject
)
if '_original_file' in inject:
source_file = utils.path_dwim_relative(
inject['_original_file'],
'templates',
source_template,
self.runner.basedir
)
else:
source_file = utils.path_dwim(self.runner.basedir, source_template)
# Open the template file and return the data as a string. This is
# being done here so that the file can be a vault encrypted file.
resultant = template.template_from_file(
self.runner.basedir,
source_file,
inject,
vault_password=self.runner.vault_pass
)
if config_overrides:
type_merger = getattr(self, CONFIG_TYPES.get(config_type))
resultant = type_merger(
config_overrides=config_overrides,
resultant=resultant
)
# Retemplate the resultant object as it may have new data within it
# as provided by an override variable.
template.template_from_string(
basedir=self.runner.basedir,
data=resultant,
vars=inject,
fail_on_undefined=True
)
# Access to protected method is unavoidable in Ansible 1.x.
new_module_args = dict(
src=self.runner._transfer_str(conn, tmp, 'source', resultant),
dest=dest,
original_basename=os.path.basename(source),
follow=True,
)
module_args_tmp = utils.merge_module_args(
module_args,
new_module_args
)
# Remove data types that are not available to the copy module
complex_args.pop('config_overrides')
complex_args.pop('config_type')
# Return the copy module status. Access to protected method is
# unavoidable in Ansible 1.x.
return self.runner._execute_module(
conn,
tmp,
'copy',
module_args_tmp,
inject=inject,
complex_args=complex_args
)

View File

@ -25,9 +25,9 @@
local_action: shell ansible --version | awk '/[0-9].[0-9].[0-9]/ {print $2}' local_action: shell ansible --version | awk '/[0-9].[0-9].[0-9]/ {print $2}'
changed_when: false changed_when: false
sudo: false sudo: false
register: ansible_version register: ansible__version
- name: fail on unsupported ansible version - name: fail on unsupported ansible version
fail: fail:
msg: "Ansible version must be >= 1.9, please update!" msg: "Ansible version must be >= 1.9, please update!"
when: "{{ ansible_version.stdout | version_compare('1.9', '<') }}" when: "{{ ansible__version.stdout | version_compare('1.9', '<') }}"

View File

@ -2,19 +2,19 @@
- name: install the ceph repository stable key - name: install the ceph repository stable key
apt_key: apt_key:
data: "{{ lookup('file', '../../files/cephstable.asc') }}" data: "{{ lookup('file', role_path+'/files/cephstable.asc') }}"
state: present state: present
when: ceph_stable when: ceph_stable
- name: install the ceph development repository key - name: install the ceph development repository key
apt_key: apt_key:
data: "{{ lookup('file', '../../files/cephdev.asc') }}" data: "{{ lookup('file', role_path+'/files/cephdev.asc') }}"
state: present state: present
when: ceph_dev when: ceph_dev
- name: install intank ceph enterprise repository key - name: install intank ceph enterprise repository key
apt_key: apt_key:
data: "{{ lookup('file', '../../files/cephstableice.asc') }}" data: "{{ lookup('file', role_path+'/files/cephstableice.asc') }}"
state: present state: present
when: ceph_stable_ice when: ceph_stable_ice

View File

@ -80,11 +80,11 @@
- name: create ceph conf directory - name: create ceph conf directory
file: file:
path: /etc/ceph path: /etc/ceph
state: directory state: directory
owner: "{{ conf_directory_owner }}" owner: "{{ conf_directory_owner }}"
group: "{{ conf_directory_group }}" group: "{{ conf_directory_group }}"
mode: "{{ conf_directory_mode }}" mode: "{{ conf_directory_mode }}"
- name: generate ceph configuration file - name: generate ceph configuration file
config_template: config_template: