mirror of
https://github.com/ansible-collections/community.general.git
synced 2025-07-22 21:00:22 -07:00
Initial commit
This commit is contained in:
commit
aebc1b03fd
4861 changed files with 812621 additions and 0 deletions
576
plugins/modules/files/archive.py
Normal file
576
plugins/modules/files/archive.py
Normal file
|
@ -0,0 +1,576 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2016, Ben Doherty <bendohmv@gmail.com>
|
||||
# Sponsored by Oomph, Inc. http://www.oomphinc.com
|
||||
# Copyright: (c) 2017, Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: archive
|
||||
short_description: Creates a compressed archive of one or more files or trees
|
||||
extends_documentation_fragment: files
|
||||
description:
|
||||
- Creates or extends an archive.
|
||||
- The source and archive are on the remote host, and the archive I(is not) copied to the local host.
|
||||
- Source files can be deleted after archival by specifying I(remove=True).
|
||||
options:
|
||||
path:
|
||||
description:
|
||||
- Remote absolute path, glob, or list of paths or globs for the file or files to compress or archive.
|
||||
type: list
|
||||
required: true
|
||||
format:
|
||||
description:
|
||||
- The type of compression to use.
|
||||
- Support for xz was added in Ansible 2.5.
|
||||
type: str
|
||||
choices: [ bz2, gz, tar, xz, zip ]
|
||||
default: gz
|
||||
dest:
|
||||
description:
|
||||
- The file name of the destination archive. The parent directory must exists on the remote host.
|
||||
- This is required when C(path) refers to multiple files by either specifying a glob, a directory or multiple paths in a list.
|
||||
type: path
|
||||
exclude_path:
|
||||
description:
|
||||
- Remote absolute path, glob, or list of paths or globs for the file or files to exclude from the archive.
|
||||
type: list
|
||||
force_archive:
|
||||
description:
|
||||
- Allow you to force the module to treat this as an archive even if only a single file is specified.
|
||||
- By default behaviour is maintained. i.e A when a single file is specified it is compressed only (not archived).
|
||||
type: bool
|
||||
default: false
|
||||
remove:
|
||||
description:
|
||||
- Remove any added source files and trees after adding to archive.
|
||||
type: bool
|
||||
default: no
|
||||
notes:
|
||||
- Requires tarfile, zipfile, gzip and bzip2 packages on target host.
|
||||
- Requires lzma or backports.lzma if using xz format.
|
||||
- Can produce I(gzip), I(bzip2), I(lzma) and I(zip) compressed files or archives.
|
||||
seealso:
|
||||
- module: unarchive
|
||||
author:
|
||||
- Ben Doherty (@bendoh)
|
||||
'''
|
||||
|
||||
EXAMPLES = r'''
|
||||
- name: Compress directory /path/to/foo/ into /path/to/foo.tgz
|
||||
archive:
|
||||
path: /path/to/foo
|
||||
dest: /path/to/foo.tgz
|
||||
|
||||
- name: Compress regular file /path/to/foo into /path/to/foo.gz and remove it
|
||||
archive:
|
||||
path: /path/to/foo
|
||||
remove: yes
|
||||
|
||||
- name: Create a zip archive of /path/to/foo
|
||||
archive:
|
||||
path: /path/to/foo
|
||||
format: zip
|
||||
|
||||
- name: Create a bz2 archive of multiple files, rooted at /path
|
||||
archive:
|
||||
path:
|
||||
- /path/to/foo
|
||||
- /path/wong/foo
|
||||
dest: /path/file.tar.bz2
|
||||
format: bz2
|
||||
|
||||
- name: Create a bz2 archive of a globbed path, while excluding specific dirnames
|
||||
archive:
|
||||
path:
|
||||
- /path/to/foo/*
|
||||
dest: /path/file.tar.bz2
|
||||
exclude_path:
|
||||
- /path/to/foo/bar
|
||||
- /path/to/foo/baz
|
||||
format: bz2
|
||||
|
||||
- name: Create a bz2 archive of a globbed path, while excluding a glob of dirnames
|
||||
archive:
|
||||
path:
|
||||
- /path/to/foo/*
|
||||
dest: /path/file.tar.bz2
|
||||
exclude_path:
|
||||
- /path/to/foo/ba*
|
||||
format: bz2
|
||||
|
||||
- name: Use gzip to compress a single archive (i.e don't archive it first with tar)
|
||||
archive:
|
||||
path: /path/to/foo/single.file
|
||||
dest: /path/file.gz
|
||||
format: gz
|
||||
|
||||
- name: Create a tar.gz archive of a single file.
|
||||
archive:
|
||||
path: /path/to/foo/single.file
|
||||
dest: /path/file.tar.gz
|
||||
format: gz
|
||||
force_archive: true
|
||||
'''
|
||||
|
||||
RETURN = r'''
|
||||
state:
|
||||
description:
|
||||
The current state of the archived file.
|
||||
If 'absent', then no source files were found and the archive does not exist.
|
||||
If 'compress', then the file source file is in the compressed state.
|
||||
If 'archive', then the source file or paths are currently archived.
|
||||
If 'incomplete', then an archive was created, but not all source paths were found.
|
||||
type: str
|
||||
returned: always
|
||||
missing:
|
||||
description: Any files that were missing from the source.
|
||||
type: list
|
||||
returned: success
|
||||
archived:
|
||||
description: Any files that were compressed or added to the archive.
|
||||
type: list
|
||||
returned: success
|
||||
arcroot:
|
||||
description: The archive root.
|
||||
type: str
|
||||
returned: always
|
||||
expanded_paths:
|
||||
description: The list of matching paths from paths argument.
|
||||
type: list
|
||||
returned: always
|
||||
expanded_exclude_paths:
|
||||
description: The list of matching exclude paths from the exclude_path argument.
|
||||
type: list
|
||||
returned: always
|
||||
'''
|
||||
|
||||
import bz2
|
||||
import filecmp
|
||||
import glob
|
||||
import gzip
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import tarfile
|
||||
import zipfile
|
||||
from traceback import format_exc
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
from ansible.module_utils._text import to_bytes, to_native
|
||||
from ansible.module_utils.six import PY3
|
||||
|
||||
|
||||
LZMA_IMP_ERR = None
|
||||
if PY3:
|
||||
try:
|
||||
import lzma
|
||||
HAS_LZMA = True
|
||||
except ImportError:
|
||||
LZMA_IMP_ERR = format_exc()
|
||||
HAS_LZMA = False
|
||||
else:
|
||||
try:
|
||||
from backports import lzma
|
||||
HAS_LZMA = True
|
||||
except ImportError:
|
||||
LZMA_IMP_ERR = format_exc()
|
||||
HAS_LZMA = False
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
path=dict(type='list', required=True),
|
||||
format=dict(type='str', default='gz', choices=['bz2', 'gz', 'tar', 'xz', 'zip']),
|
||||
dest=dict(type='path'),
|
||||
exclude_path=dict(type='list'),
|
||||
force_archive=dict(type='bool', default=False),
|
||||
remove=dict(type='bool', default=False),
|
||||
),
|
||||
add_file_common_args=True,
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
params = module.params
|
||||
check_mode = module.check_mode
|
||||
paths = params['path']
|
||||
dest = params['dest']
|
||||
b_dest = None if not dest else to_bytes(dest, errors='surrogate_or_strict')
|
||||
exclude_paths = params['exclude_path']
|
||||
remove = params['remove']
|
||||
|
||||
b_expanded_paths = []
|
||||
b_expanded_exclude_paths = []
|
||||
fmt = params['format']
|
||||
b_fmt = to_bytes(fmt, errors='surrogate_or_strict')
|
||||
force_archive = params['force_archive']
|
||||
globby = False
|
||||
changed = False
|
||||
state = 'absent'
|
||||
|
||||
# Simple or archive file compression (inapplicable with 'zip' since it's always an archive)
|
||||
archive = False
|
||||
b_successes = []
|
||||
|
||||
# Fail early
|
||||
if not HAS_LZMA and fmt == 'xz':
|
||||
module.fail_json(msg=missing_required_lib("lzma or backports.lzma", reason="when using xz format"),
|
||||
exception=LZMA_IMP_ERR)
|
||||
module.fail_json(msg="lzma or backports.lzma is required when using xz format.")
|
||||
|
||||
for path in paths:
|
||||
b_path = os.path.expanduser(
|
||||
os.path.expandvars(
|
||||
to_bytes(path, errors='surrogate_or_strict')
|
||||
)
|
||||
)
|
||||
|
||||
# Expand any glob characters. If found, add the expanded glob to the
|
||||
# list of expanded_paths, which might be empty.
|
||||
if (b'*' in b_path or b'?' in b_path):
|
||||
b_expanded_paths.extend(glob.glob(b_path))
|
||||
globby = True
|
||||
|
||||
# If there are no glob characters the path is added to the expanded paths
|
||||
# whether the path exists or not
|
||||
else:
|
||||
b_expanded_paths.append(b_path)
|
||||
|
||||
# Only attempt to expand the exclude paths if it exists
|
||||
if exclude_paths:
|
||||
for exclude_path in exclude_paths:
|
||||
b_exclude_path = os.path.expanduser(
|
||||
os.path.expandvars(
|
||||
to_bytes(exclude_path, errors='surrogate_or_strict')
|
||||
)
|
||||
)
|
||||
|
||||
# Expand any glob characters. If found, add the expanded glob to the
|
||||
# list of expanded_paths, which might be empty.
|
||||
if (b'*' in b_exclude_path or b'?' in b_exclude_path):
|
||||
b_expanded_exclude_paths.extend(glob.glob(b_exclude_path))
|
||||
|
||||
# If there are no glob character the exclude path is added to the expanded
|
||||
# exclude paths whether the path exists or not.
|
||||
else:
|
||||
b_expanded_exclude_paths.append(b_exclude_path)
|
||||
|
||||
if not b_expanded_paths:
|
||||
return module.fail_json(
|
||||
path=', '.join(paths),
|
||||
expanded_paths=to_native(b', '.join(b_expanded_paths), errors='surrogate_or_strict'),
|
||||
msg='Error, no source paths were found'
|
||||
)
|
||||
|
||||
# Only try to determine if we are working with an archive or not if we haven't set archive to true
|
||||
if not force_archive:
|
||||
# If we actually matched multiple files or TRIED to, then
|
||||
# treat this as a multi-file archive
|
||||
archive = globby or os.path.isdir(b_expanded_paths[0]) or len(b_expanded_paths) > 1
|
||||
else:
|
||||
archive = True
|
||||
|
||||
# Default created file name (for single-file archives) to
|
||||
# <file>.<format>
|
||||
if not b_dest and not archive:
|
||||
b_dest = b'%s.%s' % (b_expanded_paths[0], b_fmt)
|
||||
|
||||
# Force archives to specify 'dest'
|
||||
if archive and not b_dest:
|
||||
module.fail_json(dest=dest, path=', '.join(paths), msg='Error, must specify "dest" when archiving multiple files or trees')
|
||||
|
||||
b_sep = to_bytes(os.sep, errors='surrogate_or_strict')
|
||||
|
||||
b_archive_paths = []
|
||||
b_missing = []
|
||||
b_arcroot = b''
|
||||
|
||||
for b_path in b_expanded_paths:
|
||||
# Use the longest common directory name among all the files
|
||||
# as the archive root path
|
||||
if b_arcroot == b'':
|
||||
b_arcroot = os.path.dirname(b_path) + b_sep
|
||||
else:
|
||||
for i in range(len(b_arcroot)):
|
||||
if b_path[i] != b_arcroot[i]:
|
||||
break
|
||||
|
||||
if i < len(b_arcroot):
|
||||
b_arcroot = os.path.dirname(b_arcroot[0:i + 1])
|
||||
|
||||
b_arcroot += b_sep
|
||||
|
||||
# Don't allow archives to be created anywhere within paths to be removed
|
||||
if remove and os.path.isdir(b_path):
|
||||
b_path_dir = b_path
|
||||
if not b_path.endswith(b'/'):
|
||||
b_path_dir += b'/'
|
||||
|
||||
if b_dest.startswith(b_path_dir):
|
||||
module.fail_json(
|
||||
path=', '.join(paths),
|
||||
msg='Error, created archive can not be contained in source paths when remove=True'
|
||||
)
|
||||
|
||||
if os.path.lexists(b_path) and b_path not in b_expanded_exclude_paths:
|
||||
b_archive_paths.append(b_path)
|
||||
else:
|
||||
b_missing.append(b_path)
|
||||
|
||||
# No source files were found but the named archive exists: are we 'compress' or 'archive' now?
|
||||
if len(b_missing) == len(b_expanded_paths) and b_dest and os.path.exists(b_dest):
|
||||
# Just check the filename to know if it's an archive or simple compressed file
|
||||
if re.search(br'(\.tar|\.tar\.gz|\.tgz|\.tbz2|\.tar\.bz2|\.tar\.xz|\.zip)$', os.path.basename(b_dest), re.IGNORECASE):
|
||||
state = 'archive'
|
||||
else:
|
||||
state = 'compress'
|
||||
|
||||
# Multiple files, or globbiness
|
||||
elif archive:
|
||||
if not b_archive_paths:
|
||||
# No source files were found, but the archive is there.
|
||||
if os.path.lexists(b_dest):
|
||||
state = 'archive'
|
||||
elif b_missing:
|
||||
# SOME source files were found, but not all of them
|
||||
state = 'incomplete'
|
||||
|
||||
archive = None
|
||||
size = 0
|
||||
errors = []
|
||||
|
||||
if os.path.lexists(b_dest):
|
||||
size = os.path.getsize(b_dest)
|
||||
|
||||
if state != 'archive':
|
||||
if check_mode:
|
||||
changed = True
|
||||
|
||||
else:
|
||||
try:
|
||||
# Slightly more difficult (and less efficient!) compression using zipfile module
|
||||
if fmt == 'zip':
|
||||
arcfile = zipfile.ZipFile(
|
||||
to_native(b_dest, errors='surrogate_or_strict', encoding='ascii'),
|
||||
'w',
|
||||
zipfile.ZIP_DEFLATED,
|
||||
True
|
||||
)
|
||||
|
||||
# Easier compression using tarfile module
|
||||
elif fmt == 'gz' or fmt == 'bz2':
|
||||
arcfile = tarfile.open(to_native(b_dest, errors='surrogate_or_strict', encoding='ascii'), 'w|' + fmt)
|
||||
|
||||
# python3 tarfile module allows xz format but for python2 we have to create the tarfile
|
||||
# in memory and then compress it with lzma.
|
||||
elif fmt == 'xz':
|
||||
arcfileIO = io.BytesIO()
|
||||
arcfile = tarfile.open(fileobj=arcfileIO, mode='w')
|
||||
|
||||
# Or plain tar archiving
|
||||
elif fmt == 'tar':
|
||||
arcfile = tarfile.open(to_native(b_dest, errors='surrogate_or_strict', encoding='ascii'), 'w')
|
||||
|
||||
b_match_root = re.compile(br'^%s' % re.escape(b_arcroot))
|
||||
for b_path in b_archive_paths:
|
||||
if os.path.isdir(b_path):
|
||||
# Recurse into directories
|
||||
for b_dirpath, b_dirnames, b_filenames in os.walk(b_path, topdown=True):
|
||||
if not b_dirpath.endswith(b_sep):
|
||||
b_dirpath += b_sep
|
||||
|
||||
for b_dirname in b_dirnames:
|
||||
b_fullpath = b_dirpath + b_dirname
|
||||
n_fullpath = to_native(b_fullpath, errors='surrogate_or_strict', encoding='ascii')
|
||||
n_arcname = to_native(b_match_root.sub(b'', b_fullpath), errors='surrogate_or_strict')
|
||||
|
||||
try:
|
||||
if fmt == 'zip':
|
||||
arcfile.write(n_fullpath, n_arcname)
|
||||
else:
|
||||
arcfile.add(n_fullpath, n_arcname, recursive=False)
|
||||
|
||||
except Exception as e:
|
||||
errors.append('%s: %s' % (n_fullpath, to_native(e)))
|
||||
|
||||
for b_filename in b_filenames:
|
||||
b_fullpath = b_dirpath + b_filename
|
||||
n_fullpath = to_native(b_fullpath, errors='surrogate_or_strict', encoding='ascii')
|
||||
n_arcname = to_native(b_match_root.sub(b'', b_fullpath), errors='surrogate_or_strict')
|
||||
|
||||
try:
|
||||
if fmt == 'zip':
|
||||
arcfile.write(n_fullpath, n_arcname)
|
||||
else:
|
||||
arcfile.add(n_fullpath, n_arcname, recursive=False)
|
||||
|
||||
b_successes.append(b_fullpath)
|
||||
except Exception as e:
|
||||
errors.append('Adding %s: %s' % (to_native(b_path), to_native(e)))
|
||||
else:
|
||||
path = to_native(b_path, errors='surrogate_or_strict', encoding='ascii')
|
||||
arcname = to_native(b_match_root.sub(b'', b_path), errors='surrogate_or_strict')
|
||||
if fmt == 'zip':
|
||||
arcfile.write(path, arcname)
|
||||
else:
|
||||
arcfile.add(path, arcname, recursive=False)
|
||||
|
||||
b_successes.append(b_path)
|
||||
|
||||
except Exception as e:
|
||||
expanded_fmt = 'zip' if fmt == 'zip' else ('tar.' + fmt)
|
||||
module.fail_json(
|
||||
msg='Error when writing %s archive at %s: %s' % (expanded_fmt, dest, to_native(e)),
|
||||
exception=format_exc()
|
||||
)
|
||||
|
||||
if arcfile:
|
||||
arcfile.close()
|
||||
state = 'archive'
|
||||
|
||||
if fmt == 'xz':
|
||||
with lzma.open(b_dest, 'wb') as f:
|
||||
f.write(arcfileIO.getvalue())
|
||||
arcfileIO.close()
|
||||
|
||||
if errors:
|
||||
module.fail_json(msg='Errors when writing archive at %s: %s' % (dest, '; '.join(errors)))
|
||||
|
||||
if state in ['archive', 'incomplete'] and remove:
|
||||
for b_path in b_successes:
|
||||
try:
|
||||
if os.path.isdir(b_path):
|
||||
shutil.rmtree(b_path)
|
||||
elif not check_mode:
|
||||
os.remove(b_path)
|
||||
except OSError as e:
|
||||
errors.append(to_native(b_path))
|
||||
|
||||
if errors:
|
||||
module.fail_json(dest=dest, msg='Error deleting some source files: ', files=errors)
|
||||
|
||||
# Rudimentary check: If size changed then file changed. Not perfect, but easy.
|
||||
if not check_mode and os.path.getsize(b_dest) != size:
|
||||
changed = True
|
||||
|
||||
if b_successes and state != 'incomplete':
|
||||
state = 'archive'
|
||||
|
||||
# Simple, single-file compression
|
||||
else:
|
||||
b_path = b_expanded_paths[0]
|
||||
|
||||
# No source or compressed file
|
||||
if not (os.path.exists(b_path) or os.path.lexists(b_dest)):
|
||||
state = 'absent'
|
||||
|
||||
# if it already exists and the source file isn't there, consider this done
|
||||
elif not os.path.lexists(b_path) and os.path.lexists(b_dest):
|
||||
state = 'compress'
|
||||
|
||||
else:
|
||||
if module.check_mode:
|
||||
if not os.path.exists(b_dest):
|
||||
changed = True
|
||||
else:
|
||||
size = 0
|
||||
f_in = f_out = arcfile = None
|
||||
|
||||
if os.path.lexists(b_dest):
|
||||
size = os.path.getsize(b_dest)
|
||||
|
||||
try:
|
||||
if fmt == 'zip':
|
||||
arcfile = zipfile.ZipFile(
|
||||
to_native(b_dest, errors='surrogate_or_strict', encoding='ascii'),
|
||||
'w',
|
||||
zipfile.ZIP_DEFLATED,
|
||||
True
|
||||
)
|
||||
arcfile.write(
|
||||
to_native(b_path, errors='surrogate_or_strict', encoding='ascii'),
|
||||
to_native(b_path[len(b_arcroot):], errors='surrogate_or_strict')
|
||||
)
|
||||
arcfile.close()
|
||||
state = 'archive' # because all zip files are archives
|
||||
elif fmt == 'tar':
|
||||
arcfile = tarfile.open(to_native(b_dest, errors='surrogate_or_strict', encoding='ascii'), 'w')
|
||||
arcfile.add(to_native(b_path, errors='surrogate_or_strict', encoding='ascii'))
|
||||
arcfile.close()
|
||||
else:
|
||||
f_in = open(b_path, 'rb')
|
||||
|
||||
n_dest = to_native(b_dest, errors='surrogate_or_strict', encoding='ascii')
|
||||
if fmt == 'gz':
|
||||
f_out = gzip.open(n_dest, 'wb')
|
||||
elif fmt == 'bz2':
|
||||
f_out = bz2.BZ2File(n_dest, 'wb')
|
||||
elif fmt == 'xz':
|
||||
f_out = lzma.LZMAFile(n_dest, 'wb')
|
||||
else:
|
||||
raise OSError("Invalid format")
|
||||
|
||||
shutil.copyfileobj(f_in, f_out)
|
||||
|
||||
b_successes.append(b_path)
|
||||
|
||||
except OSError as e:
|
||||
module.fail_json(
|
||||
path=to_native(b_path),
|
||||
dest=dest,
|
||||
msg='Unable to write to compressed file: %s' % to_native(e), exception=format_exc()
|
||||
)
|
||||
|
||||
if arcfile:
|
||||
arcfile.close()
|
||||
if f_in:
|
||||
f_in.close()
|
||||
if f_out:
|
||||
f_out.close()
|
||||
|
||||
# Rudimentary check: If size changed then file changed. Not perfect, but easy.
|
||||
if os.path.getsize(b_dest) != size:
|
||||
changed = True
|
||||
|
||||
state = 'compress'
|
||||
|
||||
if remove and not check_mode:
|
||||
try:
|
||||
os.remove(b_path)
|
||||
|
||||
except OSError as e:
|
||||
module.fail_json(
|
||||
path=to_native(b_path),
|
||||
msg='Unable to remove source file: %s' % to_native(e), exception=format_exc()
|
||||
)
|
||||
|
||||
file_args = module.load_file_common_arguments(params, path=b_dest)
|
||||
|
||||
if not check_mode:
|
||||
changed = module.set_fs_attributes_if_different(file_args, changed)
|
||||
|
||||
module.exit_json(
|
||||
archived=[to_native(p, errors='surrogate_or_strict') for p in b_successes],
|
||||
dest=dest,
|
||||
changed=changed,
|
||||
state=state,
|
||||
arcroot=to_native(b_arcroot, errors='surrogate_or_strict'),
|
||||
missing=[to_native(p, errors='surrogate_or_strict') for p in b_missing],
|
||||
expanded_paths=[to_native(p, errors='surrogate_or_strict') for p in b_expanded_paths],
|
||||
expanded_exclude_paths=[to_native(p, errors='surrogate_or_strict') for p in b_expanded_exclude_paths],
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
338
plugins/modules/files/ini_file.py
Normal file
338
plugins/modules/files/ini_file.py
Normal file
|
@ -0,0 +1,338 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2012, Jan-Piet Mens <jpmens () gmail.com>
|
||||
# Copyright: (c) 2015, Ales Nosek <anosek.nosek () gmail.com>
|
||||
# Copyright: (c) 2017, Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: ini_file
|
||||
short_description: Tweak settings in INI files
|
||||
extends_documentation_fragment: files
|
||||
description:
|
||||
- Manage (add, remove, change) individual settings in an INI-style file without having
|
||||
to manage the file as a whole with, say, M(template) or M(assemble).
|
||||
- Adds missing sections if they don't exist.
|
||||
- Before Ansible 2.0, comments are discarded when the source file is read, and therefore will not show up in the destination file.
|
||||
- Since Ansible 2.3, this module adds missing ending newlines to files to keep in line with the POSIX standard, even when
|
||||
no other modifications need to be applied.
|
||||
options:
|
||||
path:
|
||||
description:
|
||||
- Path to the INI-style file; this file is created if required.
|
||||
- Before Ansible 2.3 this option was only usable as I(dest).
|
||||
type: path
|
||||
required: true
|
||||
aliases: [ dest ]
|
||||
section:
|
||||
description:
|
||||
- Section name in INI file. This is added if C(state=present) automatically when
|
||||
a single value is being set.
|
||||
- If left empty or set to C(null), the I(option) will be placed before the first I(section).
|
||||
- Using C(null) is also required if the config format does not support sections.
|
||||
type: str
|
||||
required: true
|
||||
option:
|
||||
description:
|
||||
- If set (required for changing a I(value)), this is the name of the option.
|
||||
- May be omitted if adding/removing a whole I(section).
|
||||
type: str
|
||||
value:
|
||||
description:
|
||||
- The string value to be associated with an I(option).
|
||||
- May be omitted when removing an I(option).
|
||||
type: str
|
||||
backup:
|
||||
description:
|
||||
- Create a backup file including the timestamp information so you can get
|
||||
the original file back if you somehow clobbered it incorrectly.
|
||||
type: bool
|
||||
default: no
|
||||
state:
|
||||
description:
|
||||
- If set to C(absent) the option or section will be removed if present instead of created.
|
||||
type: str
|
||||
choices: [ absent, present ]
|
||||
default: present
|
||||
no_extra_spaces:
|
||||
description:
|
||||
- Do not insert spaces before and after '=' symbol.
|
||||
type: bool
|
||||
default: no
|
||||
create:
|
||||
description:
|
||||
- If set to C(no), the module will fail if the file does not already exist.
|
||||
- By default it will create the file if it is missing.
|
||||
type: bool
|
||||
default: yes
|
||||
allow_no_value:
|
||||
description:
|
||||
- Allow option without value and without '=' symbol.
|
||||
type: bool
|
||||
default: no
|
||||
notes:
|
||||
- While it is possible to add an I(option) without specifying a I(value), this makes no sense.
|
||||
- As of Ansible 2.3, the I(dest) option has been changed to I(path) as default, but I(dest) still works as well.
|
||||
author:
|
||||
- Jan-Piet Mens (@jpmens)
|
||||
- Ales Nosek (@noseka1)
|
||||
'''
|
||||
|
||||
EXAMPLES = r'''
|
||||
# Before Ansible 2.3, option 'dest' was used instead of 'path'
|
||||
- name: Ensure "fav=lemonade is in section "[drinks]" in specified file
|
||||
ini_file:
|
||||
path: /etc/conf
|
||||
section: drinks
|
||||
option: fav
|
||||
value: lemonade
|
||||
mode: '0600'
|
||||
backup: yes
|
||||
|
||||
- name: Ensure "temperature=cold is in section "[drinks]" in specified file
|
||||
ini_file:
|
||||
path: /etc/anotherconf
|
||||
section: drinks
|
||||
option: temperature
|
||||
value: cold
|
||||
backup: yes
|
||||
'''
|
||||
|
||||
import os
|
||||
import re
|
||||
import tempfile
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
def match_opt(option, line):
|
||||
option = re.escape(option)
|
||||
return re.match('( |\t)*%s( |\t)*(=|$)' % option, line) \
|
||||
or re.match('#( |\t)*%s( |\t)*(=|$)' % option, line) \
|
||||
or re.match(';( |\t)*%s( |\t)*(=|$)' % option, line)
|
||||
|
||||
|
||||
def match_active_opt(option, line):
|
||||
option = re.escape(option)
|
||||
return re.match('( |\t)*%s( |\t)*(=|$)' % option, line)
|
||||
|
||||
|
||||
def do_ini(module, filename, section=None, option=None, value=None,
|
||||
state='present', backup=False, no_extra_spaces=False, create=True,
|
||||
allow_no_value=False):
|
||||
|
||||
diff = dict(
|
||||
before='',
|
||||
after='',
|
||||
before_header='%s (content)' % filename,
|
||||
after_header='%s (content)' % filename,
|
||||
)
|
||||
|
||||
if not os.path.exists(filename):
|
||||
if not create:
|
||||
module.fail_json(rc=257, msg='Destination %s does not exist !' % filename)
|
||||
destpath = os.path.dirname(filename)
|
||||
if not os.path.exists(destpath) and not module.check_mode:
|
||||
os.makedirs(destpath)
|
||||
ini_lines = []
|
||||
else:
|
||||
ini_file = open(filename, 'r')
|
||||
try:
|
||||
ini_lines = ini_file.readlines()
|
||||
finally:
|
||||
ini_file.close()
|
||||
|
||||
if module._diff:
|
||||
diff['before'] = ''.join(ini_lines)
|
||||
|
||||
changed = False
|
||||
|
||||
# ini file could be empty
|
||||
if not ini_lines:
|
||||
ini_lines.append('\n')
|
||||
|
||||
# last line of file may not contain a trailing newline
|
||||
if ini_lines[-1] == "" or ini_lines[-1][-1] != '\n':
|
||||
ini_lines[-1] += '\n'
|
||||
changed = True
|
||||
|
||||
# append fake section lines to simplify the logic
|
||||
# At top:
|
||||
# Fake random section to do not match any other in the file
|
||||
# Using commit hash as fake section name
|
||||
fake_section_name = "ad01e11446efb704fcdbdb21f2c43757423d91c5"
|
||||
|
||||
# Insert it at the beginning
|
||||
ini_lines.insert(0, '[%s]' % fake_section_name)
|
||||
|
||||
# At botton:
|
||||
ini_lines.append('[')
|
||||
|
||||
# If no section is defined, fake section is used
|
||||
if not section:
|
||||
section = fake_section_name
|
||||
|
||||
within_section = not section
|
||||
section_start = 0
|
||||
msg = 'OK'
|
||||
if no_extra_spaces:
|
||||
assignment_format = '%s=%s\n'
|
||||
else:
|
||||
assignment_format = '%s = %s\n'
|
||||
|
||||
for index, line in enumerate(ini_lines):
|
||||
if line.startswith('[%s]' % section):
|
||||
within_section = True
|
||||
section_start = index
|
||||
elif line.startswith('['):
|
||||
if within_section:
|
||||
if state == 'present':
|
||||
# insert missing option line at the end of the section
|
||||
for i in range(index, 0, -1):
|
||||
# search backwards for previous non-blank or non-comment line
|
||||
if not re.match(r'^[ \t]*([#;].*)?$', ini_lines[i - 1]):
|
||||
if not value and allow_no_value:
|
||||
ini_lines.insert(i, '%s\n' % option)
|
||||
else:
|
||||
ini_lines.insert(i, assignment_format % (option, value))
|
||||
msg = 'option added'
|
||||
changed = True
|
||||
break
|
||||
elif state == 'absent' and not option:
|
||||
# remove the entire section
|
||||
del ini_lines[section_start:index]
|
||||
msg = 'section removed'
|
||||
changed = True
|
||||
break
|
||||
else:
|
||||
if within_section and option:
|
||||
if state == 'present':
|
||||
# change the existing option line
|
||||
if match_opt(option, line):
|
||||
if not value and allow_no_value:
|
||||
newline = '%s\n' % option
|
||||
else:
|
||||
newline = assignment_format % (option, value)
|
||||
option_changed = ini_lines[index] != newline
|
||||
changed = changed or option_changed
|
||||
if option_changed:
|
||||
msg = 'option changed'
|
||||
ini_lines[index] = newline
|
||||
if option_changed:
|
||||
# remove all possible option occurrences from the rest of the section
|
||||
index = index + 1
|
||||
while index < len(ini_lines):
|
||||
line = ini_lines[index]
|
||||
if line.startswith('['):
|
||||
break
|
||||
if match_active_opt(option, line):
|
||||
del ini_lines[index]
|
||||
else:
|
||||
index = index + 1
|
||||
break
|
||||
elif state == 'absent':
|
||||
# delete the existing line
|
||||
if match_active_opt(option, line):
|
||||
del ini_lines[index]
|
||||
changed = True
|
||||
msg = 'option changed'
|
||||
break
|
||||
|
||||
# remove the fake section line
|
||||
del ini_lines[0]
|
||||
del ini_lines[-1:]
|
||||
|
||||
if not within_section and option and state == 'present':
|
||||
ini_lines.append('[%s]\n' % section)
|
||||
if not value and allow_no_value:
|
||||
ini_lines.append('%s\n' % option)
|
||||
else:
|
||||
ini_lines.append(assignment_format % (option, value))
|
||||
changed = True
|
||||
msg = 'section and option added'
|
||||
|
||||
if module._diff:
|
||||
diff['after'] = ''.join(ini_lines)
|
||||
|
||||
backup_file = None
|
||||
if changed and not module.check_mode:
|
||||
if backup:
|
||||
backup_file = module.backup_local(filename)
|
||||
|
||||
try:
|
||||
tmpfd, tmpfile = tempfile.mkstemp(dir=module.tmpdir)
|
||||
f = os.fdopen(tmpfd, 'w')
|
||||
f.writelines(ini_lines)
|
||||
f.close()
|
||||
except IOError:
|
||||
module.fail_json(msg="Unable to create temporary file %s", traceback=traceback.format_exc())
|
||||
|
||||
try:
|
||||
module.atomic_move(tmpfile, filename)
|
||||
except IOError:
|
||||
module.ansible.fail_json(msg='Unable to move temporary \
|
||||
file %s to %s, IOError' % (tmpfile, filename), traceback=traceback.format_exc())
|
||||
|
||||
return (changed, backup_file, diff, msg)
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
path=dict(type='path', required=True, aliases=['dest']),
|
||||
section=dict(type='str', required=True),
|
||||
option=dict(type='str'),
|
||||
value=dict(type='str'),
|
||||
backup=dict(type='bool', default=False),
|
||||
state=dict(type='str', default='present', choices=['absent', 'present']),
|
||||
no_extra_spaces=dict(type='bool', default=False),
|
||||
allow_no_value=dict(type='bool', default=False),
|
||||
create=dict(type='bool', default=True)
|
||||
),
|
||||
add_file_common_args=True,
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
path = module.params['path']
|
||||
section = module.params['section']
|
||||
option = module.params['option']
|
||||
value = module.params['value']
|
||||
state = module.params['state']
|
||||
backup = module.params['backup']
|
||||
no_extra_spaces = module.params['no_extra_spaces']
|
||||
allow_no_value = module.params['allow_no_value']
|
||||
create = module.params['create']
|
||||
|
||||
(changed, backup_file, diff, msg) = do_ini(module, path, section, option, value, state, backup, no_extra_spaces, create, allow_no_value)
|
||||
|
||||
if not module.check_mode and os.path.exists(path):
|
||||
file_args = module.load_file_common_arguments(module.params)
|
||||
changed = module.set_fs_attributes_if_different(file_args, changed)
|
||||
|
||||
results = dict(
|
||||
changed=changed,
|
||||
diff=diff,
|
||||
msg=msg,
|
||||
path=path,
|
||||
)
|
||||
if backup_file is not None:
|
||||
results['backup_file'] = backup_file
|
||||
|
||||
# Mission complete
|
||||
module.exit_json(**results)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
220
plugins/modules/files/iso_extract.py
Normal file
220
plugins/modules/files/iso_extract.py
Normal file
|
@ -0,0 +1,220 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2013, Jeroen Hoekx <jeroen.hoekx@dsquare.be>
|
||||
# Copyright: (c) 2016, Matt Robinson <git@nerdoftheherd.com>
|
||||
# Copyright: (c) 2017, Dag Wieers <dag@wieers.com>
|
||||
# Copyright: (c) 2017, Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
author:
|
||||
- Jeroen Hoekx (@jhoekx)
|
||||
- Matt Robinson (@ribbons)
|
||||
- Dag Wieers (@dagwieers)
|
||||
module: iso_extract
|
||||
short_description: Extract files from an ISO image
|
||||
description:
|
||||
- This module has two possible ways of operation.
|
||||
- If 7zip is installed on the system, this module extracts files from an ISO
|
||||
into a temporary directory and copies files to a given destination,
|
||||
if needed.
|
||||
- If the user has mount-capabilities (CAP_SYS_ADMIN on Linux) this module
|
||||
mounts the ISO image to a temporary location, and copies files to a given
|
||||
destination, if needed.
|
||||
requirements:
|
||||
- Either 7z (from I(7zip) or I(p7zip) package)
|
||||
- Or mount capabilities (root-access, or CAP_SYS_ADMIN capability on Linux)
|
||||
options:
|
||||
image:
|
||||
description:
|
||||
- The ISO image to extract files from.
|
||||
type: path
|
||||
required: yes
|
||||
aliases: [ path, src ]
|
||||
dest:
|
||||
description:
|
||||
- The destination directory to extract files to.
|
||||
type: path
|
||||
required: yes
|
||||
files:
|
||||
description:
|
||||
- A list of files to extract from the image.
|
||||
- Extracting directories does not work.
|
||||
type: list
|
||||
required: yes
|
||||
force:
|
||||
description:
|
||||
- If C(yes), which will replace the remote file when contents are different than the source.
|
||||
- If C(no), the file will only be extracted and copied if the destination does not already exist.
|
||||
- Alias C(thirsty) has been deprecated and will be removed in 2.13.
|
||||
type: bool
|
||||
default: yes
|
||||
aliases: [ thirsty ]
|
||||
executable:
|
||||
description:
|
||||
- The path to the C(7z) executable to use for extracting files from the ISO.
|
||||
type: path
|
||||
default: '7z'
|
||||
notes:
|
||||
- Only the file checksum (content) is taken into account when extracting files
|
||||
from the ISO image. If C(force=no), only checks the presence of the file.
|
||||
- In Ansible 2.3 this module was using C(mount) and C(umount) commands only,
|
||||
requiring root access. This is no longer needed with the introduction of 7zip
|
||||
for extraction.
|
||||
'''
|
||||
|
||||
EXAMPLES = r'''
|
||||
- name: Extract kernel and ramdisk from a LiveCD
|
||||
iso_extract:
|
||||
image: /tmp/rear-test.iso
|
||||
dest: /tmp/virt-rear/
|
||||
files:
|
||||
- isolinux/kernel
|
||||
- isolinux/initrd.cgz
|
||||
'''
|
||||
|
||||
RETURN = r'''
|
||||
#
|
||||
'''
|
||||
|
||||
import os.path
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
try: # python 3.3+
|
||||
from shlex import quote
|
||||
except ImportError: # older python
|
||||
from pipes import quote
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
image=dict(type='path', required=True, aliases=['path', 'src']),
|
||||
dest=dict(type='path', required=True),
|
||||
files=dict(type='list', required=True),
|
||||
force=dict(type='bool', default=True, aliases=['thirsty']),
|
||||
executable=dict(type='path'), # No default on purpose
|
||||
),
|
||||
supports_check_mode=True,
|
||||
)
|
||||
image = module.params['image']
|
||||
dest = module.params['dest']
|
||||
files = module.params['files']
|
||||
force = module.params['force']
|
||||
executable = module.params['executable']
|
||||
|
||||
if module.params.get('thirsty'):
|
||||
module.deprecate('The alias "thirsty" has been deprecated and will be removed, use "force" instead', version='2.13')
|
||||
|
||||
result = dict(
|
||||
changed=False,
|
||||
dest=dest,
|
||||
image=image,
|
||||
)
|
||||
|
||||
# We want to know if the user provided it or not, so we set default here
|
||||
if executable is None:
|
||||
executable = '7z'
|
||||
|
||||
binary = module.get_bin_path(executable, None)
|
||||
|
||||
# When executable was provided and binary not found, warn user !
|
||||
if module.params['executable'] is not None and not binary:
|
||||
module.warn("Executable '%s' is not found on the system, trying to mount ISO instead." % executable)
|
||||
|
||||
if not os.path.exists(dest):
|
||||
module.fail_json(msg="Directory '%s' does not exist" % dest)
|
||||
|
||||
if not os.path.exists(os.path.dirname(image)):
|
||||
module.fail_json(msg="ISO image '%s' does not exist" % image)
|
||||
|
||||
result['files'] = []
|
||||
extract_files = list(files)
|
||||
|
||||
if not force:
|
||||
# Check if we have to process any files based on existence
|
||||
for f in files:
|
||||
dest_file = os.path.join(dest, os.path.basename(f))
|
||||
if os.path.exists(dest_file):
|
||||
result['files'].append(dict(
|
||||
checksum=None,
|
||||
dest=dest_file,
|
||||
src=f,
|
||||
))
|
||||
extract_files.remove(f)
|
||||
|
||||
if not extract_files:
|
||||
module.exit_json(**result)
|
||||
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
|
||||
# Use 7zip when we have a binary, otherwise try to mount
|
||||
if binary:
|
||||
cmd = '%s x "%s" -o"%s" %s' % (binary, image, tmp_dir, ' '.join([quote(f) for f in extract_files]))
|
||||
else:
|
||||
cmd = 'mount -o loop,ro "%s" "%s"' % (image, tmp_dir)
|
||||
|
||||
rc, out, err = module.run_command(cmd)
|
||||
if rc != 0:
|
||||
result.update(dict(
|
||||
cmd=cmd,
|
||||
rc=rc,
|
||||
stderr=err,
|
||||
stdout=out,
|
||||
))
|
||||
shutil.rmtree(tmp_dir)
|
||||
|
||||
if binary:
|
||||
module.fail_json(msg="Failed to extract from ISO image '%s' to '%s'" % (image, tmp_dir), **result)
|
||||
else:
|
||||
module.fail_json(msg="Failed to mount ISO image '%s' to '%s', and we could not find executable '%s'." % (image, tmp_dir, executable), **result)
|
||||
|
||||
try:
|
||||
for f in extract_files:
|
||||
tmp_src = os.path.join(tmp_dir, f)
|
||||
if not os.path.exists(tmp_src):
|
||||
module.fail_json(msg="Failed to extract '%s' from ISO image" % f, **result)
|
||||
|
||||
src_checksum = module.sha1(tmp_src)
|
||||
|
||||
dest_file = os.path.join(dest, os.path.basename(f))
|
||||
|
||||
if os.path.exists(dest_file):
|
||||
dest_checksum = module.sha1(dest_file)
|
||||
else:
|
||||
dest_checksum = None
|
||||
|
||||
result['files'].append(dict(
|
||||
checksum=src_checksum,
|
||||
dest=dest_file,
|
||||
src=f,
|
||||
))
|
||||
|
||||
if src_checksum != dest_checksum:
|
||||
if not module.check_mode:
|
||||
shutil.copy(tmp_src, dest_file)
|
||||
|
||||
result['changed'] = True
|
||||
finally:
|
||||
if not binary:
|
||||
module.run_command('umount "%s"' % tmp_dir)
|
||||
|
||||
shutil.rmtree(tmp_dir)
|
||||
|
||||
module.exit_json(**result)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
214
plugins/modules/files/patch.py
Normal file
214
plugins/modules/files/patch.py
Normal file
|
@ -0,0 +1,214 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2012, Luis Alberto Perez Lazaro <luisperlazaro@gmail.com>
|
||||
# Copyright: (c) 2015, Jakub Jirutka <jakub@jirutka.cz>
|
||||
# Copyright: (c) 2017, Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['stableinterface'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: patch
|
||||
author:
|
||||
- Jakub Jirutka (@jirutka)
|
||||
- Luis Alberto Perez Lazaro (@luisperlaz)
|
||||
description:
|
||||
- Apply patch files using the GNU patch tool.
|
||||
short_description: Apply patch files using the GNU patch tool
|
||||
options:
|
||||
basedir:
|
||||
description:
|
||||
- Path of a base directory in which the patch file will be applied.
|
||||
- May be omitted when C(dest) option is specified, otherwise required.
|
||||
type: path
|
||||
dest:
|
||||
description:
|
||||
- Path of the file on the remote machine to be patched.
|
||||
- The names of the files to be patched are usually taken from the patch
|
||||
file, but if there's just one file to be patched it can specified with
|
||||
this option.
|
||||
type: path
|
||||
aliases: [ originalfile ]
|
||||
src:
|
||||
description:
|
||||
- Path of the patch file as accepted by the GNU patch tool. If
|
||||
C(remote_src) is 'no', the patch source file is looked up from the
|
||||
module's I(files) directory.
|
||||
type: path
|
||||
required: true
|
||||
aliases: [ patchfile ]
|
||||
state:
|
||||
description:
|
||||
- Whether the patch should be applied or reverted.
|
||||
type: str
|
||||
choices: [ absent, present ]
|
||||
default: present
|
||||
remote_src:
|
||||
description:
|
||||
- If C(no), it will search for src at originating/master machine, if C(yes) it will
|
||||
go to the remote/target machine for the C(src).
|
||||
type: bool
|
||||
default: no
|
||||
strip:
|
||||
description:
|
||||
- Number that indicates the smallest prefix containing leading slashes
|
||||
that will be stripped from each file name found in the patch file.
|
||||
- For more information see the strip parameter of the GNU patch tool.
|
||||
type: int
|
||||
default: 0
|
||||
backup:
|
||||
description:
|
||||
- Passes C(--backup --version-control=numbered) to patch, producing numbered backup copies.
|
||||
type: bool
|
||||
default: no
|
||||
binary:
|
||||
description:
|
||||
- Setting to C(yes) will disable patch's heuristic for transforming CRLF
|
||||
line endings into LF.
|
||||
- Line endings of src and dest must match.
|
||||
- If set to C(no), C(patch) will replace CRLF in C(src) files on POSIX.
|
||||
type: bool
|
||||
default: no
|
||||
notes:
|
||||
- This module requires GNU I(patch) utility to be installed on the remote host.
|
||||
'''
|
||||
|
||||
EXAMPLES = r'''
|
||||
- name: Apply patch to one file
|
||||
patch:
|
||||
src: /tmp/index.html.patch
|
||||
dest: /var/www/index.html
|
||||
|
||||
- name: Apply patch to multiple files under basedir
|
||||
patch:
|
||||
src: /tmp/customize.patch
|
||||
basedir: /var/www
|
||||
strip: 1
|
||||
|
||||
- name: Revert patch to one file
|
||||
patch:
|
||||
src: /tmp/index.html.patch
|
||||
dest: /var/www/index.html
|
||||
state: absent
|
||||
'''
|
||||
|
||||
import os
|
||||
import platform
|
||||
from traceback import format_exc
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
class PatchError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def add_dry_run_option(opts):
|
||||
# Older versions of FreeBSD, OpenBSD and NetBSD support the --check option only.
|
||||
if platform.system().lower() in ['openbsd', 'netbsd', 'freebsd']:
|
||||
opts.append('--check')
|
||||
else:
|
||||
opts.append('--dry-run')
|
||||
|
||||
|
||||
def is_already_applied(patch_func, patch_file, basedir, dest_file=None, binary=False, strip=0, state='present'):
|
||||
opts = ['--quiet', '--forward',
|
||||
"--strip=%s" % strip, "--directory='%s'" % basedir,
|
||||
"--input='%s'" % patch_file]
|
||||
add_dry_run_option(opts)
|
||||
if binary:
|
||||
opts.append('--binary')
|
||||
if dest_file:
|
||||
opts.append("'%s'" % dest_file)
|
||||
if state == 'present':
|
||||
opts.append('--reverse')
|
||||
|
||||
(rc, _, _) = patch_func(opts)
|
||||
return rc == 0
|
||||
|
||||
|
||||
def apply_patch(patch_func, patch_file, basedir, dest_file=None, binary=False, strip=0, dry_run=False, backup=False, state='present'):
|
||||
opts = ['--quiet', '--forward', '--batch', '--reject-file=-',
|
||||
"--strip=%s" % strip, "--directory='%s'" % basedir,
|
||||
"--input='%s'" % patch_file]
|
||||
if dry_run:
|
||||
add_dry_run_option(opts)
|
||||
if binary:
|
||||
opts.append('--binary')
|
||||
if dest_file:
|
||||
opts.append("'%s'" % dest_file)
|
||||
if backup:
|
||||
opts.append('--backup --version-control=numbered')
|
||||
if state == 'absent':
|
||||
opts.append('--reverse')
|
||||
|
||||
(rc, out, err) = patch_func(opts)
|
||||
if rc != 0:
|
||||
msg = err or out
|
||||
raise PatchError(msg)
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
src=dict(type='path', required=True, aliases=['patchfile']),
|
||||
dest=dict(type='path', aliases=['originalfile']),
|
||||
basedir=dict(type='path'),
|
||||
strip=dict(type='int', default=0),
|
||||
remote_src=dict(type='bool', default=False),
|
||||
# NB: for 'backup' parameter, semantics is slightly different from standard
|
||||
# since patch will create numbered copies, not strftime("%Y-%m-%d@%H:%M:%S~")
|
||||
backup=dict(type='bool', default=False),
|
||||
binary=dict(type='bool', default=False),
|
||||
state=dict(type='str', default='present', choices=['absent', 'present']),
|
||||
),
|
||||
required_one_of=[['dest', 'basedir']],
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
# Create type object as namespace for module params
|
||||
p = type('Params', (), module.params)
|
||||
|
||||
if not os.access(p.src, os.R_OK):
|
||||
module.fail_json(msg="src %s doesn't exist or not readable" % (p.src))
|
||||
|
||||
if p.dest and not os.access(p.dest, os.W_OK):
|
||||
module.fail_json(msg="dest %s doesn't exist or not writable" % (p.dest))
|
||||
|
||||
if p.basedir and not os.path.exists(p.basedir):
|
||||
module.fail_json(msg="basedir %s doesn't exist" % (p.basedir))
|
||||
|
||||
if not p.basedir:
|
||||
p.basedir = os.path.dirname(p.dest)
|
||||
|
||||
patch_bin = module.get_bin_path('patch')
|
||||
if patch_bin is None:
|
||||
module.fail_json(msg="patch command not found")
|
||||
|
||||
def patch_func(opts):
|
||||
return module.run_command('%s %s' % (patch_bin, ' '.join(opts)))
|
||||
|
||||
# patch need an absolute file name
|
||||
p.src = os.path.abspath(p.src)
|
||||
|
||||
changed = False
|
||||
if not is_already_applied(patch_func, p.src, p.basedir, dest_file=p.dest, binary=p.binary, strip=p.strip, state=p.state):
|
||||
try:
|
||||
apply_patch(patch_func, p.src, p.basedir, dest_file=p.dest, binary=p.binary, strip=p.strip,
|
||||
dry_run=module.check_mode, backup=p.backup, state=p.state)
|
||||
changed = True
|
||||
except PatchError as e:
|
||||
module.fail_json(msg=to_native(e), exception=format_exc())
|
||||
|
||||
module.exit_json(changed=changed)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
244
plugins/modules/files/read_csv.py
Normal file
244
plugins/modules/files/read_csv.py
Normal file
|
@ -0,0 +1,244 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2018, Dag Wieers (@dagwieers) <dag@wieers.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: read_csv
|
||||
short_description: Read a CSV file
|
||||
description:
|
||||
- Read a CSV file and return a list or a dictionary, containing one dictionary per row.
|
||||
author:
|
||||
- Dag Wieers (@dagwieers)
|
||||
options:
|
||||
path:
|
||||
description:
|
||||
- The CSV filename to read data from.
|
||||
type: path
|
||||
required: yes
|
||||
aliases: [ filename ]
|
||||
key:
|
||||
description:
|
||||
- The column name used as a key for the resulting dictionary.
|
||||
- If C(key) is unset, the module returns a list of dictionaries,
|
||||
where each dictionary is a row in the CSV file.
|
||||
type: str
|
||||
dialect:
|
||||
description:
|
||||
- The CSV dialect to use when parsing the CSV file.
|
||||
- Possible values include C(excel), C(excel-tab) or C(unix).
|
||||
type: str
|
||||
default: excel
|
||||
fieldnames:
|
||||
description:
|
||||
- A list of field names for every column.
|
||||
- This is needed if the CSV does not have a header.
|
||||
type: list
|
||||
unique:
|
||||
description:
|
||||
- Whether the C(key) used is expected to be unique.
|
||||
type: bool
|
||||
default: yes
|
||||
delimiter:
|
||||
description:
|
||||
- A one-character string used to separate fields.
|
||||
- When using this parameter, you change the default value used by C(dialect).
|
||||
- The default value depends on the dialect used.
|
||||
type: str
|
||||
skipinitialspace:
|
||||
description:
|
||||
- Whether to ignore any whitespaces immediately following the delimiter.
|
||||
- When using this parameter, you change the default value used by C(dialect).
|
||||
- The default value depends on the dialect used.
|
||||
type: bool
|
||||
strict:
|
||||
description:
|
||||
- Whether to raise an exception on bad CSV input.
|
||||
- When using this parameter, you change the default value used by C(dialect).
|
||||
- The default value depends on the dialect used.
|
||||
type: bool
|
||||
notes:
|
||||
- Ansible also ships with the C(csvfile) lookup plugin, which can be used to do selective lookups in CSV files from Jinja.
|
||||
'''
|
||||
|
||||
EXAMPLES = r'''
|
||||
# Example CSV file with header
|
||||
#
|
||||
# name,uid,gid
|
||||
# dag,500,500
|
||||
# jeroen,501,500
|
||||
|
||||
# Read a CSV file and access user 'dag'
|
||||
- name: Read users from CSV file and return a dictionary
|
||||
read_csv:
|
||||
path: users.csv
|
||||
key: name
|
||||
register: users
|
||||
delegate_to: localhost
|
||||
|
||||
- debug:
|
||||
msg: 'User {{ users.dict.dag.name }} has UID {{ users.dict.dag.uid }} and GID {{ users.dict.dag.gid }}'
|
||||
|
||||
# Read a CSV file and access the first item
|
||||
- name: Read users from CSV file and return a list
|
||||
read_csv:
|
||||
path: users.csv
|
||||
register: users
|
||||
delegate_to: localhost
|
||||
|
||||
- debug:
|
||||
msg: 'User {{ users.list.1.name }} has UID {{ users.list.1.uid }} and GID {{ users.list.1.gid }}'
|
||||
|
||||
# Example CSV file without header and semi-colon delimiter
|
||||
#
|
||||
# dag;500;500
|
||||
# jeroen;501;500
|
||||
|
||||
# Read a CSV file without headers
|
||||
- name: Read users from CSV file and return a list
|
||||
read_csv:
|
||||
path: users.csv
|
||||
fieldnames: name,uid,gid
|
||||
delimiter: ';'
|
||||
register: users
|
||||
delegate_to: localhost
|
||||
'''
|
||||
|
||||
RETURN = r'''
|
||||
dict:
|
||||
description: The CSV content as a dictionary.
|
||||
returned: success
|
||||
type: dict
|
||||
sample:
|
||||
dag:
|
||||
name: dag
|
||||
uid: 500
|
||||
gid: 500
|
||||
jeroen:
|
||||
name: jeroen
|
||||
uid: 501
|
||||
gid: 500
|
||||
list:
|
||||
description: The CSV content as a list.
|
||||
returned: success
|
||||
type: list
|
||||
sample:
|
||||
- name: dag
|
||||
uid: 500
|
||||
gid: 500
|
||||
- name: jeroen
|
||||
uid: 501
|
||||
gid: 500
|
||||
'''
|
||||
|
||||
import csv
|
||||
from io import BytesIO, StringIO
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils._text import to_text
|
||||
from ansible.module_utils.six import PY3
|
||||
|
||||
|
||||
# Add Unix dialect from Python 3
|
||||
class unix_dialect(csv.Dialect):
|
||||
"""Describe the usual properties of Unix-generated CSV files."""
|
||||
delimiter = ','
|
||||
quotechar = '"'
|
||||
doublequote = True
|
||||
skipinitialspace = False
|
||||
lineterminator = '\n'
|
||||
quoting = csv.QUOTE_ALL
|
||||
|
||||
|
||||
csv.register_dialect("unix", unix_dialect)
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
path=dict(type='path', required=True, aliases=['filename']),
|
||||
dialect=dict(type='str', default='excel'),
|
||||
key=dict(type='str'),
|
||||
fieldnames=dict(type='list'),
|
||||
unique=dict(type='bool', default=True),
|
||||
delimiter=dict(type='str'),
|
||||
skipinitialspace=dict(type='bool'),
|
||||
strict=dict(type='bool'),
|
||||
),
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
path = module.params['path']
|
||||
dialect = module.params['dialect']
|
||||
key = module.params['key']
|
||||
fieldnames = module.params['fieldnames']
|
||||
unique = module.params['unique']
|
||||
|
||||
if dialect not in csv.list_dialects():
|
||||
module.fail_json(msg="Dialect '%s' is not supported by your version of python." % dialect)
|
||||
|
||||
dialect_options = dict(
|
||||
delimiter=module.params['delimiter'],
|
||||
skipinitialspace=module.params['skipinitialspace'],
|
||||
strict=module.params['strict'],
|
||||
)
|
||||
|
||||
# Create a dictionary from only set options
|
||||
dialect_params = dict((k, v) for k, v in dialect_options.items() if v is not None)
|
||||
if dialect_params:
|
||||
try:
|
||||
csv.register_dialect('custom', dialect, **dialect_params)
|
||||
except TypeError as e:
|
||||
module.fail_json(msg="Unable to create custom dialect: %s" % to_text(e))
|
||||
dialect = 'custom'
|
||||
|
||||
try:
|
||||
with open(path, 'rb') as f:
|
||||
data = f.read()
|
||||
except (IOError, OSError) as e:
|
||||
module.fail_json(msg="Unable to open file: %s" % to_text(e))
|
||||
|
||||
if PY3:
|
||||
# Manually decode on Python3 so that we can use the surrogateescape error handler
|
||||
data = to_text(data, errors='surrogate_or_strict')
|
||||
fake_fh = StringIO(data)
|
||||
else:
|
||||
fake_fh = BytesIO(data)
|
||||
|
||||
reader = csv.DictReader(fake_fh, fieldnames=fieldnames, dialect=dialect)
|
||||
|
||||
if key and key not in reader.fieldnames:
|
||||
module.fail_json(msg="Key '%s' was not found in the CSV header fields: %s" % (key, ', '.join(reader.fieldnames)))
|
||||
|
||||
data_dict = dict()
|
||||
data_list = list()
|
||||
|
||||
if key is None:
|
||||
try:
|
||||
for row in reader:
|
||||
data_list.append(row)
|
||||
except csv.Error as e:
|
||||
module.fail_json(msg="Unable to process file: %s" % to_text(e))
|
||||
else:
|
||||
try:
|
||||
for row in reader:
|
||||
if unique and row[key] in data_dict:
|
||||
module.fail_json(msg="Key '%s' is not unique for value '%s'" % (key, row[key]))
|
||||
data_dict[row[key]] = row
|
||||
except csv.Error as e:
|
||||
module.fail_json(msg="Unable to process file: %s" % to_text(e))
|
||||
|
||||
module.exit_json(dict=data_dict, list=data_list)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
241
plugins/modules/files/xattr.py
Normal file
241
plugins/modules/files/xattr.py
Normal file
|
@ -0,0 +1,241 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
# Copyright: (c) 2017, Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['stableinterface'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: xattr
|
||||
short_description: Manage user defined extended attributes
|
||||
description:
|
||||
- Manages filesystem user defined extended attributes.
|
||||
- Requires that extended attributes are enabled on the target filesystem
|
||||
and that the setfattr/getfattr utilities are present.
|
||||
options:
|
||||
path:
|
||||
description:
|
||||
- The full path of the file/object to get the facts of.
|
||||
- Before 2.3 this option was only usable as I(name).
|
||||
type: path
|
||||
required: true
|
||||
aliases: [ name ]
|
||||
namespace:
|
||||
description:
|
||||
- Namespace of the named name/key.
|
||||
type: str
|
||||
default: user
|
||||
key:
|
||||
description:
|
||||
- The name of a specific Extended attribute key to set/retrieve.
|
||||
type: str
|
||||
value:
|
||||
description:
|
||||
- The value to set the named name/key to, it automatically sets the C(state) to 'set'.
|
||||
type: str
|
||||
state:
|
||||
description:
|
||||
- defines which state you want to do.
|
||||
C(read) retrieves the current value for a C(key) (default)
|
||||
C(present) sets C(name) to C(value), default if value is set
|
||||
C(all) dumps all data
|
||||
C(keys) retrieves all keys
|
||||
C(absent) deletes the key
|
||||
type: str
|
||||
choices: [ absent, all, keys, present, read ]
|
||||
default: read
|
||||
follow:
|
||||
description:
|
||||
- If C(yes), dereferences symlinks and sets/gets attributes on symlink target,
|
||||
otherwise acts on symlink itself.
|
||||
type: bool
|
||||
default: yes
|
||||
notes:
|
||||
- As of Ansible 2.3, the I(name) option has been changed to I(path) as default, but I(name) still works as well.
|
||||
author:
|
||||
- Brian Coca (@bcoca)
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Obtain the extended attributes of /etc/foo.conf
|
||||
xattr:
|
||||
path: /etc/foo.conf
|
||||
|
||||
- name: Set the key 'user.foo' to value 'bar'
|
||||
xattr:
|
||||
path: /etc/foo.conf
|
||||
key: foo
|
||||
value: bar
|
||||
|
||||
- name: Set the key 'trusted.glusterfs.volume-id' to value '0x817b94343f164f199e5b573b4ea1f914'
|
||||
xattr:
|
||||
path: /mnt/bricks/brick1
|
||||
namespace: trusted
|
||||
key: glusterfs.volume-id
|
||||
value: "0x817b94343f164f199e5b573b4ea1f914"
|
||||
|
||||
- name: Remove the key 'user.foo'
|
||||
xattr:
|
||||
path: /etc/foo.conf
|
||||
key: foo
|
||||
state: absent
|
||||
|
||||
- name: Remove the key 'trusted.glusterfs.volume-id'
|
||||
xattr:
|
||||
path: /mnt/bricks/brick1
|
||||
namespace: trusted
|
||||
key: glusterfs.volume-id
|
||||
state: absent
|
||||
'''
|
||||
|
||||
import os
|
||||
|
||||
# import module snippets
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
def get_xattr_keys(module, path, follow):
|
||||
cmd = [module.get_bin_path('getfattr', True)]
|
||||
# prevents warning and not sure why it's not default
|
||||
cmd.append('--absolute-names')
|
||||
if not follow:
|
||||
cmd.append('-h')
|
||||
cmd.append(path)
|
||||
|
||||
return _run_xattr(module, cmd)
|
||||
|
||||
|
||||
def get_xattr(module, path, key, follow):
|
||||
|
||||
cmd = [module.get_bin_path('getfattr', True)]
|
||||
# prevents warning and not sure why it's not default
|
||||
cmd.append('--absolute-names')
|
||||
if not follow:
|
||||
cmd.append('-h')
|
||||
if key is None:
|
||||
cmd.append('-d')
|
||||
else:
|
||||
cmd.append('-n %s' % key)
|
||||
cmd.append(path)
|
||||
|
||||
return _run_xattr(module, cmd, False)
|
||||
|
||||
|
||||
def set_xattr(module, path, key, value, follow):
|
||||
|
||||
cmd = [module.get_bin_path('setfattr', True)]
|
||||
if not follow:
|
||||
cmd.append('-h')
|
||||
cmd.append('-n %s' % key)
|
||||
cmd.append('-v %s' % value)
|
||||
cmd.append(path)
|
||||
|
||||
return _run_xattr(module, cmd)
|
||||
|
||||
|
||||
def rm_xattr(module, path, key, follow):
|
||||
|
||||
cmd = [module.get_bin_path('setfattr', True)]
|
||||
if not follow:
|
||||
cmd.append('-h')
|
||||
cmd.append('-x %s' % key)
|
||||
cmd.append(path)
|
||||
|
||||
return _run_xattr(module, cmd, False)
|
||||
|
||||
|
||||
def _run_xattr(module, cmd, check_rc=True):
|
||||
|
||||
try:
|
||||
(rc, out, err) = module.run_command(' '.join(cmd), check_rc=check_rc)
|
||||
except Exception as e:
|
||||
module.fail_json(msg="%s!" % to_native(e))
|
||||
|
||||
# result = {'raw': out}
|
||||
result = {}
|
||||
for line in out.splitlines():
|
||||
if line.startswith('#') or line == '':
|
||||
pass
|
||||
elif '=' in line:
|
||||
(key, val) = line.split('=')
|
||||
result[key] = val.strip('"')
|
||||
else:
|
||||
result[line] = ''
|
||||
return result
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
path=dict(type='path', required=True, aliases=['name']),
|
||||
namespace=dict(type='str', default='user'),
|
||||
key=dict(type='str'),
|
||||
value=dict(type='str'),
|
||||
state=dict(type='str', default='read', choices=['absent', 'all', 'keys', 'present', 'read']),
|
||||
follow=dict(type='bool', default=True),
|
||||
),
|
||||
supports_check_mode=True,
|
||||
)
|
||||
path = module.params.get('path')
|
||||
namespace = module.params.get('namespace')
|
||||
key = module.params.get('key')
|
||||
value = module.params.get('value')
|
||||
state = module.params.get('state')
|
||||
follow = module.params.get('follow')
|
||||
|
||||
if not os.path.exists(path):
|
||||
module.fail_json(msg="path not found or not accessible!")
|
||||
|
||||
changed = False
|
||||
msg = ""
|
||||
res = {}
|
||||
|
||||
if key is None and state in ['absent', 'present']:
|
||||
module.fail_json(msg="%s needs a key parameter" % state)
|
||||
|
||||
# Prepend the key with the namespace if defined
|
||||
if (
|
||||
key is not None and
|
||||
namespace is not None and
|
||||
len(namespace) > 0 and
|
||||
not (namespace == 'user' and key.startswith('user.'))):
|
||||
key = '%s.%s' % (namespace, key)
|
||||
|
||||
if (state == 'present' or value is not None):
|
||||
current = get_xattr(module, path, key, follow)
|
||||
if current is None or key not in current or value != current[key]:
|
||||
if not module.check_mode:
|
||||
res = set_xattr(module, path, key, value, follow)
|
||||
changed = True
|
||||
res = current
|
||||
msg = "%s set to %s" % (key, value)
|
||||
elif state == 'absent':
|
||||
current = get_xattr(module, path, key, follow)
|
||||
if current is not None and key in current:
|
||||
if not module.check_mode:
|
||||
res = rm_xattr(module, path, key, follow)
|
||||
changed = True
|
||||
res = current
|
||||
msg = "%s removed" % (key)
|
||||
elif state == 'keys':
|
||||
res = get_xattr_keys(module, path, follow)
|
||||
msg = "returning all keys"
|
||||
elif state == 'all':
|
||||
res = get_xattr(module, path, None, follow)
|
||||
msg = "dumping all"
|
||||
else:
|
||||
res = get_xattr(module, path, key, follow)
|
||||
msg = "returning %s" % key
|
||||
|
||||
module.exit_json(changed=changed, msg=msg, xattr=res)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
961
plugins/modules/files/xml.py
Normal file
961
plugins/modules/files/xml.py
Normal file
|
@ -0,0 +1,961 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2014, Red Hat, Inc.
|
||||
# Copyright: (c) 2014, Tim Bielawa <tbielawa@redhat.com>
|
||||
# Copyright: (c) 2014, Magnus Hedemark <mhedemar@redhat.com>
|
||||
# Copyright: (c) 2017, Dag Wieers <dag@wieers.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: xml
|
||||
short_description: Manage bits and pieces of XML files or strings
|
||||
description:
|
||||
- A CRUD-like interface to managing bits of XML files.
|
||||
options:
|
||||
path:
|
||||
description:
|
||||
- Path to the file to operate on.
|
||||
- This file must exist ahead of time.
|
||||
- This parameter is required, unless C(xmlstring) is given.
|
||||
type: path
|
||||
required: yes
|
||||
aliases: [ dest, file ]
|
||||
xmlstring:
|
||||
description:
|
||||
- A string containing XML on which to operate.
|
||||
- This parameter is required, unless C(path) is given.
|
||||
type: str
|
||||
required: yes
|
||||
xpath:
|
||||
description:
|
||||
- A valid XPath expression describing the item(s) you want to manipulate.
|
||||
- Operates on the document root, C(/), by default.
|
||||
type: str
|
||||
namespaces:
|
||||
description:
|
||||
- The namespace C(prefix:uri) mapping for the XPath expression.
|
||||
- Needs to be a C(dict), not a C(list) of items.
|
||||
type: dict
|
||||
state:
|
||||
description:
|
||||
- Set or remove an xpath selection (node(s), attribute(s)).
|
||||
type: str
|
||||
choices: [ absent, present ]
|
||||
default: present
|
||||
aliases: [ ensure ]
|
||||
attribute:
|
||||
description:
|
||||
- The attribute to select when using parameter C(value).
|
||||
- This is a string, not prepended with C(@).
|
||||
type: raw
|
||||
value:
|
||||
description:
|
||||
- Desired state of the selected attribute.
|
||||
- Either a string, or to unset a value, the Python C(None) keyword (YAML Equivalent, C(null)).
|
||||
- Elements default to no value (but present).
|
||||
- Attributes default to an empty string.
|
||||
type: raw
|
||||
add_children:
|
||||
description:
|
||||
- Add additional child-element(s) to a selected element for a given C(xpath).
|
||||
- Child elements must be given in a list and each item may be either a string
|
||||
(eg. C(children=ansible) to add an empty C(<ansible/>) child element),
|
||||
or a hash where the key is an element name and the value is the element value.
|
||||
- This parameter requires C(xpath) to be set.
|
||||
type: list
|
||||
set_children:
|
||||
description:
|
||||
- Set the child-element(s) of a selected element for a given C(xpath).
|
||||
- Removes any existing children.
|
||||
- Child elements must be specified as in C(add_children).
|
||||
- This parameter requires C(xpath) to be set.
|
||||
type: list
|
||||
count:
|
||||
description:
|
||||
- Search for a given C(xpath) and provide the count of any matches.
|
||||
- This parameter requires C(xpath) to be set.
|
||||
type: bool
|
||||
default: no
|
||||
print_match:
|
||||
description:
|
||||
- Search for a given C(xpath) and print out any matches.
|
||||
- This parameter requires C(xpath) to be set.
|
||||
type: bool
|
||||
default: no
|
||||
pretty_print:
|
||||
description:
|
||||
- Pretty print XML output.
|
||||
type: bool
|
||||
default: no
|
||||
content:
|
||||
description:
|
||||
- Search for a given C(xpath) and get content.
|
||||
- This parameter requires C(xpath) to be set.
|
||||
type: str
|
||||
choices: [ attribute, text ]
|
||||
input_type:
|
||||
description:
|
||||
- Type of input for C(add_children) and C(set_children).
|
||||
type: str
|
||||
choices: [ xml, yaml ]
|
||||
default: yaml
|
||||
backup:
|
||||
description:
|
||||
- Create a backup file including the timestamp information so you can get
|
||||
the original file back if you somehow clobbered it incorrectly.
|
||||
type: bool
|
||||
default: no
|
||||
strip_cdata_tags:
|
||||
description:
|
||||
- Remove CDATA tags surrounding text values.
|
||||
- Note that this might break your XML file if text values contain characters that could be interpreted as XML.
|
||||
type: bool
|
||||
default: no
|
||||
insertbefore:
|
||||
description:
|
||||
- Add additional child-element(s) before the first selected element for a given C(xpath).
|
||||
- Child elements must be given in a list and each item may be either a string
|
||||
(eg. C(children=ansible) to add an empty C(<ansible/>) child element),
|
||||
or a hash where the key is an element name and the value is the element value.
|
||||
- This parameter requires C(xpath) to be set.
|
||||
type: bool
|
||||
default: no
|
||||
insertafter:
|
||||
description:
|
||||
- Add additional child-element(s) after the last selected element for a given C(xpath).
|
||||
- Child elements must be given in a list and each item may be either a string
|
||||
(eg. C(children=ansible) to add an empty C(<ansible/>) child element),
|
||||
or a hash where the key is an element name and the value is the element value.
|
||||
- This parameter requires C(xpath) to be set.
|
||||
type: bool
|
||||
default: no
|
||||
requirements:
|
||||
- lxml >= 2.3.0
|
||||
notes:
|
||||
- Use the C(--check) and C(--diff) options when testing your expressions.
|
||||
- The diff output is automatically pretty-printed, so may not reflect the actual file content, only the file structure.
|
||||
- This module does not handle complicated xpath expressions, so limit xpath selectors to simple expressions.
|
||||
- Beware that in case your XML elements are namespaced, you need to use the C(namespaces) parameter, see the examples.
|
||||
- Namespaces prefix should be used for all children of an element where namespace is defined, unless another namespace is defined for them.
|
||||
seealso:
|
||||
- name: Xml module development community wiki
|
||||
description: More information related to the development of this xml module.
|
||||
link: https://github.com/ansible/community/wiki/Module:-xml
|
||||
- name: Introduction to XPath
|
||||
description: A brief tutorial on XPath (w3schools.com).
|
||||
link: https://www.w3schools.com/xml/xpath_intro.asp
|
||||
- name: XPath Reference document
|
||||
description: The reference documentation on XSLT/XPath (developer.mozilla.org).
|
||||
link: https://developer.mozilla.org/en-US/docs/Web/XPath
|
||||
author:
|
||||
- Tim Bielawa (@tbielawa)
|
||||
- Magnus Hedemark (@magnus919)
|
||||
- Dag Wieers (@dagwieers)
|
||||
'''
|
||||
|
||||
EXAMPLES = r'''
|
||||
# Consider the following XML file:
|
||||
#
|
||||
# <business type="bar">
|
||||
# <name>Tasty Beverage Co.</name>
|
||||
# <beers>
|
||||
# <beer>Rochefort 10</beer>
|
||||
# <beer>St. Bernardus Abbot 12</beer>
|
||||
# <beer>Schlitz</beer>
|
||||
# </beers>
|
||||
# <rating subjective="true">10</rating>
|
||||
# <website>
|
||||
# <mobilefriendly/>
|
||||
# <address>http://tastybeverageco.com</address>
|
||||
# </website>
|
||||
# </business>
|
||||
|
||||
- name: Remove the 'subjective' attribute of the 'rating' element
|
||||
xml:
|
||||
path: /foo/bar.xml
|
||||
xpath: /business/rating/@subjective
|
||||
state: absent
|
||||
|
||||
- name: Set the rating to '11'
|
||||
xml:
|
||||
path: /foo/bar.xml
|
||||
xpath: /business/rating
|
||||
value: 11
|
||||
|
||||
# Retrieve and display the number of nodes
|
||||
- name: Get count of 'beers' nodes
|
||||
xml:
|
||||
path: /foo/bar.xml
|
||||
xpath: /business/beers/beer
|
||||
count: yes
|
||||
register: hits
|
||||
|
||||
- debug:
|
||||
var: hits.count
|
||||
|
||||
# Example where parent XML nodes are created automatically
|
||||
- name: Add a 'phonenumber' element to the 'business' element
|
||||
xml:
|
||||
path: /foo/bar.xml
|
||||
xpath: /business/phonenumber
|
||||
value: 555-555-1234
|
||||
|
||||
- name: Add several more beers to the 'beers' element
|
||||
xml:
|
||||
path: /foo/bar.xml
|
||||
xpath: /business/beers
|
||||
add_children:
|
||||
- beer: Old Rasputin
|
||||
- beer: Old Motor Oil
|
||||
- beer: Old Curmudgeon
|
||||
|
||||
- name: Add several more beers to the 'beers' element and add them before the 'Rochefort 10' element
|
||||
xml:
|
||||
path: /foo/bar.xml
|
||||
xpath: '/business/beers/beer[text()="Rochefort 10"]'
|
||||
insertbefore: yes
|
||||
add_children:
|
||||
- beer: Old Rasputin
|
||||
- beer: Old Motor Oil
|
||||
- beer: Old Curmudgeon
|
||||
|
||||
# NOTE: The 'state' defaults to 'present' and 'value' defaults to 'null' for elements
|
||||
- name: Add a 'validxhtml' element to the 'website' element
|
||||
xml:
|
||||
path: /foo/bar.xml
|
||||
xpath: /business/website/validxhtml
|
||||
|
||||
- name: Add an empty 'validatedon' attribute to the 'validxhtml' element
|
||||
xml:
|
||||
path: /foo/bar.xml
|
||||
xpath: /business/website/validxhtml/@validatedon
|
||||
|
||||
- name: Add or modify an attribute, add element if needed
|
||||
xml:
|
||||
path: /foo/bar.xml
|
||||
xpath: /business/website/validxhtml
|
||||
attribute: validatedon
|
||||
value: 1976-08-05
|
||||
|
||||
# How to read an attribute value and access it in Ansible
|
||||
- name: Read an element's attribute values
|
||||
xml:
|
||||
path: /foo/bar.xml
|
||||
xpath: /business/website/validxhtml
|
||||
content: attribute
|
||||
register: xmlresp
|
||||
|
||||
- name: Show an attribute value
|
||||
debug:
|
||||
var: xmlresp.matches[0].validxhtml.validatedon
|
||||
|
||||
- name: Remove all children from the 'website' element (option 1)
|
||||
xml:
|
||||
path: /foo/bar.xml
|
||||
xpath: /business/website/*
|
||||
state: absent
|
||||
|
||||
- name: Remove all children from the 'website' element (option 2)
|
||||
xml:
|
||||
path: /foo/bar.xml
|
||||
xpath: /business/website
|
||||
children: []
|
||||
|
||||
# In case of namespaces, like in below XML, they have to be explicitly stated.
|
||||
#
|
||||
# <foo xmlns="http://x.test" xmlns:attr="http://z.test">
|
||||
# <bar>
|
||||
# <baz xmlns="http://y.test" attr:my_namespaced_attribute="true" />
|
||||
# </bar>
|
||||
# </foo>
|
||||
|
||||
# NOTE: There is the prefix 'x' in front of the 'bar' element, too.
|
||||
- name: Set namespaced '/x:foo/x:bar/y:baz/@z:my_namespaced_attribute' to 'false'
|
||||
xml:
|
||||
path: foo.xml
|
||||
xpath: /x:foo/x:bar/y:baz
|
||||
namespaces:
|
||||
x: http://x.test
|
||||
y: http://y.test
|
||||
z: http://z.test
|
||||
attribute: z:my_namespaced_attribute
|
||||
value: 'false'
|
||||
'''
|
||||
|
||||
RETURN = r'''
|
||||
actions:
|
||||
description: A dictionary with the original xpath, namespaces and state.
|
||||
type: dict
|
||||
returned: success
|
||||
sample: {xpath: xpath, namespaces: [namespace1, namespace2], state=present}
|
||||
backup_file:
|
||||
description: The name of the backup file that was created
|
||||
type: str
|
||||
returned: when backup=yes
|
||||
sample: /path/to/file.xml.1942.2017-08-24@14:16:01~
|
||||
count:
|
||||
description: The count of xpath matches.
|
||||
type: int
|
||||
returned: when parameter 'count' is set
|
||||
sample: 2
|
||||
matches:
|
||||
description: The xpath matches found.
|
||||
type: list
|
||||
returned: when parameter 'print_match' is set
|
||||
msg:
|
||||
description: A message related to the performed action(s).
|
||||
type: str
|
||||
returned: always
|
||||
xmlstring:
|
||||
description: An XML string of the resulting output.
|
||||
type: str
|
||||
returned: when parameter 'xmlstring' is set
|
||||
'''
|
||||
|
||||
import copy
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import traceback
|
||||
|
||||
from distutils.version import LooseVersion
|
||||
from io import BytesIO
|
||||
|
||||
LXML_IMP_ERR = None
|
||||
try:
|
||||
from lxml import etree, objectify
|
||||
HAS_LXML = True
|
||||
except ImportError:
|
||||
LXML_IMP_ERR = traceback.format_exc()
|
||||
HAS_LXML = False
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, json_dict_bytes_to_unicode, missing_required_lib
|
||||
from ansible.module_utils.six import iteritems, string_types
|
||||
from ansible.module_utils._text import to_bytes, to_native
|
||||
from ansible.module_utils.common._collections_compat import MutableMapping
|
||||
|
||||
_IDENT = r"[a-zA-Z-][a-zA-Z0-9_\-\.]*"
|
||||
_NSIDENT = _IDENT + "|" + _IDENT + ":" + _IDENT
|
||||
# Note: we can't reasonably support the 'if you need to put both ' and " in a string, concatenate
|
||||
# strings wrapped by the other delimiter' XPath trick, especially as simple XPath.
|
||||
_XPSTR = "('(?:.*)'|\"(?:.*)\")"
|
||||
|
||||
_RE_SPLITSIMPLELAST = re.compile("^(.*)/(" + _NSIDENT + ")$")
|
||||
_RE_SPLITSIMPLELASTEQVALUE = re.compile("^(.*)/(" + _NSIDENT + ")/text\\(\\)=" + _XPSTR + "$")
|
||||
_RE_SPLITSIMPLEATTRLAST = re.compile("^(.*)/(@(?:" + _NSIDENT + "))$")
|
||||
_RE_SPLITSIMPLEATTRLASTEQVALUE = re.compile("^(.*)/(@(?:" + _NSIDENT + "))=" + _XPSTR + "$")
|
||||
_RE_SPLITSUBLAST = re.compile("^(.*)/(" + _NSIDENT + ")\\[(.*)\\]$")
|
||||
_RE_SPLITONLYEQVALUE = re.compile("^(.*)/text\\(\\)=" + _XPSTR + "$")
|
||||
|
||||
|
||||
def has_changed(doc):
|
||||
orig_obj = etree.tostring(objectify.fromstring(etree.tostring(orig_doc)))
|
||||
obj = etree.tostring(objectify.fromstring(etree.tostring(doc)))
|
||||
return (orig_obj != obj)
|
||||
|
||||
|
||||
def do_print_match(module, tree, xpath, namespaces):
|
||||
match = tree.xpath(xpath, namespaces=namespaces)
|
||||
match_xpaths = []
|
||||
for m in match:
|
||||
match_xpaths.append(tree.getpath(m))
|
||||
match_str = json.dumps(match_xpaths)
|
||||
msg = "selector '%s' match: %s" % (xpath, match_str)
|
||||
finish(module, tree, xpath, namespaces, changed=False, msg=msg)
|
||||
|
||||
|
||||
def count_nodes(module, tree, xpath, namespaces):
|
||||
""" Return the count of nodes matching the xpath """
|
||||
hits = tree.xpath("count(/%s)" % xpath, namespaces=namespaces)
|
||||
msg = "found %d nodes" % hits
|
||||
finish(module, tree, xpath, namespaces, changed=False, msg=msg, hitcount=int(hits))
|
||||
|
||||
|
||||
def is_node(tree, xpath, namespaces):
|
||||
""" Test if a given xpath matches anything and if that match is a node.
|
||||
|
||||
For now we just assume you're only searching for one specific thing."""
|
||||
if xpath_matches(tree, xpath, namespaces):
|
||||
# OK, it found something
|
||||
match = tree.xpath(xpath, namespaces=namespaces)
|
||||
if isinstance(match[0], etree._Element):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def is_attribute(tree, xpath, namespaces):
|
||||
""" Test if a given xpath matches and that match is an attribute
|
||||
|
||||
An xpath attribute search will only match one item"""
|
||||
if xpath_matches(tree, xpath, namespaces):
|
||||
match = tree.xpath(xpath, namespaces=namespaces)
|
||||
if isinstance(match[0], etree._ElementStringResult):
|
||||
return True
|
||||
elif isinstance(match[0], etree._ElementUnicodeResult):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def xpath_matches(tree, xpath, namespaces):
|
||||
""" Test if a node exists """
|
||||
if tree.xpath(xpath, namespaces=namespaces):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def delete_xpath_target(module, tree, xpath, namespaces):
|
||||
""" Delete an attribute or element from a tree """
|
||||
try:
|
||||
for result in tree.xpath(xpath, namespaces=namespaces):
|
||||
# Get the xpath for this result
|
||||
if is_attribute(tree, xpath, namespaces):
|
||||
# Delete an attribute
|
||||
parent = result.getparent()
|
||||
# Pop this attribute match out of the parent
|
||||
# node's 'attrib' dict by using this match's
|
||||
# 'attrname' attribute for the key
|
||||
parent.attrib.pop(result.attrname)
|
||||
elif is_node(tree, xpath, namespaces):
|
||||
# Delete an element
|
||||
result.getparent().remove(result)
|
||||
else:
|
||||
raise Exception("Impossible error")
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Couldn't delete xpath target: %s (%s)" % (xpath, e))
|
||||
else:
|
||||
finish(module, tree, xpath, namespaces, changed=True)
|
||||
|
||||
|
||||
def replace_children_of(children, match):
|
||||
for element in list(match):
|
||||
match.remove(element)
|
||||
match.extend(children)
|
||||
|
||||
|
||||
def set_target_children_inner(module, tree, xpath, namespaces, children, in_type):
|
||||
matches = tree.xpath(xpath, namespaces=namespaces)
|
||||
|
||||
# Create a list of our new children
|
||||
children = children_to_nodes(module, children, in_type)
|
||||
children_as_string = [etree.tostring(c) for c in children]
|
||||
|
||||
changed = False
|
||||
|
||||
# xpaths always return matches as a list, so....
|
||||
for match in matches:
|
||||
# Check if elements differ
|
||||
if len(list(match)) == len(children):
|
||||
for idx, element in enumerate(list(match)):
|
||||
if etree.tostring(element) != children_as_string[idx]:
|
||||
replace_children_of(children, match)
|
||||
changed = True
|
||||
break
|
||||
else:
|
||||
replace_children_of(children, match)
|
||||
changed = True
|
||||
|
||||
return changed
|
||||
|
||||
|
||||
def set_target_children(module, tree, xpath, namespaces, children, in_type):
|
||||
changed = set_target_children_inner(module, tree, xpath, namespaces, children, in_type)
|
||||
# Write it out
|
||||
finish(module, tree, xpath, namespaces, changed=changed)
|
||||
|
||||
|
||||
def add_target_children(module, tree, xpath, namespaces, children, in_type, insertbefore, insertafter):
|
||||
if is_node(tree, xpath, namespaces):
|
||||
new_kids = children_to_nodes(module, children, in_type)
|
||||
if insertbefore or insertafter:
|
||||
insert_target_children(tree, xpath, namespaces, new_kids, insertbefore, insertafter)
|
||||
else:
|
||||
for node in tree.xpath(xpath, namespaces=namespaces):
|
||||
node.extend(new_kids)
|
||||
finish(module, tree, xpath, namespaces, changed=True)
|
||||
else:
|
||||
finish(module, tree, xpath, namespaces)
|
||||
|
||||
|
||||
def insert_target_children(tree, xpath, namespaces, children, insertbefore, insertafter):
|
||||
"""
|
||||
Insert the given children before or after the given xpath. If insertbefore is True, it is inserted before the
|
||||
first xpath hit, with insertafter, it is inserted after the last xpath hit.
|
||||
"""
|
||||
insert_target = tree.xpath(xpath, namespaces=namespaces)
|
||||
loc_index = 0 if insertbefore else -1
|
||||
index_in_parent = insert_target[loc_index].getparent().index(insert_target[loc_index])
|
||||
parent = insert_target[0].getparent()
|
||||
if insertafter:
|
||||
index_in_parent += 1
|
||||
for child in children:
|
||||
parent.insert(index_in_parent, child)
|
||||
index_in_parent += 1
|
||||
|
||||
|
||||
def _extract_xpstr(g):
|
||||
return g[1:-1]
|
||||
|
||||
|
||||
def split_xpath_last(xpath):
|
||||
"""split an XPath of the form /foo/bar/baz into /foo/bar and baz"""
|
||||
xpath = xpath.strip()
|
||||
m = _RE_SPLITSIMPLELAST.match(xpath)
|
||||
if m:
|
||||
# requesting an element to exist
|
||||
return (m.group(1), [(m.group(2), None)])
|
||||
m = _RE_SPLITSIMPLELASTEQVALUE.match(xpath)
|
||||
if m:
|
||||
# requesting an element to exist with an inner text
|
||||
return (m.group(1), [(m.group(2), _extract_xpstr(m.group(3)))])
|
||||
|
||||
m = _RE_SPLITSIMPLEATTRLAST.match(xpath)
|
||||
if m:
|
||||
# requesting an attribute to exist
|
||||
return (m.group(1), [(m.group(2), None)])
|
||||
m = _RE_SPLITSIMPLEATTRLASTEQVALUE.match(xpath)
|
||||
if m:
|
||||
# requesting an attribute to exist with a value
|
||||
return (m.group(1), [(m.group(2), _extract_xpstr(m.group(3)))])
|
||||
|
||||
m = _RE_SPLITSUBLAST.match(xpath)
|
||||
if m:
|
||||
content = [x.strip() for x in m.group(3).split(" and ")]
|
||||
return (m.group(1), [('/' + m.group(2), content)])
|
||||
|
||||
m = _RE_SPLITONLYEQVALUE.match(xpath)
|
||||
if m:
|
||||
# requesting a change of inner text
|
||||
return (m.group(1), [("", _extract_xpstr(m.group(2)))])
|
||||
return (xpath, [])
|
||||
|
||||
|
||||
def nsnameToClark(name, namespaces):
|
||||
if ":" in name:
|
||||
(nsname, rawname) = name.split(":")
|
||||
# return "{{%s}}%s" % (namespaces[nsname], rawname)
|
||||
return "{{{0}}}{1}".format(namespaces[nsname], rawname)
|
||||
|
||||
# no namespace name here
|
||||
return name
|
||||
|
||||
|
||||
def check_or_make_target(module, tree, xpath, namespaces):
|
||||
(inner_xpath, changes) = split_xpath_last(xpath)
|
||||
if (inner_xpath == xpath) or (changes is None):
|
||||
module.fail_json(msg="Can't process Xpath %s in order to spawn nodes! tree is %s" %
|
||||
(xpath, etree.tostring(tree, pretty_print=True)))
|
||||
return False
|
||||
|
||||
changed = False
|
||||
|
||||
if not is_node(tree, inner_xpath, namespaces):
|
||||
changed = check_or_make_target(module, tree, inner_xpath, namespaces)
|
||||
|
||||
# we test again after calling check_or_make_target
|
||||
if is_node(tree, inner_xpath, namespaces) and changes:
|
||||
for (eoa, eoa_value) in changes:
|
||||
if eoa and eoa[0] != '@' and eoa[0] != '/':
|
||||
# implicitly creating an element
|
||||
new_kids = children_to_nodes(module, [nsnameToClark(eoa, namespaces)], "yaml")
|
||||
if eoa_value:
|
||||
for nk in new_kids:
|
||||
nk.text = eoa_value
|
||||
|
||||
for node in tree.xpath(inner_xpath, namespaces=namespaces):
|
||||
node.extend(new_kids)
|
||||
changed = True
|
||||
# module.fail_json(msg="now tree=%s" % etree.tostring(tree, pretty_print=True))
|
||||
elif eoa and eoa[0] == '/':
|
||||
element = eoa[1:]
|
||||
new_kids = children_to_nodes(module, [nsnameToClark(element, namespaces)], "yaml")
|
||||
for node in tree.xpath(inner_xpath, namespaces=namespaces):
|
||||
node.extend(new_kids)
|
||||
for nk in new_kids:
|
||||
for subexpr in eoa_value:
|
||||
# module.fail_json(msg="element=%s subexpr=%s node=%s now tree=%s" %
|
||||
# (element, subexpr, etree.tostring(node, pretty_print=True), etree.tostring(tree, pretty_print=True))
|
||||
check_or_make_target(module, nk, "./" + subexpr, namespaces)
|
||||
changed = True
|
||||
|
||||
# module.fail_json(msg="now tree=%s" % etree.tostring(tree, pretty_print=True))
|
||||
elif eoa == "":
|
||||
for node in tree.xpath(inner_xpath, namespaces=namespaces):
|
||||
if (node.text != eoa_value):
|
||||
node.text = eoa_value
|
||||
changed = True
|
||||
|
||||
elif eoa and eoa[0] == '@':
|
||||
attribute = nsnameToClark(eoa[1:], namespaces)
|
||||
|
||||
for element in tree.xpath(inner_xpath, namespaces=namespaces):
|
||||
changing = (attribute not in element.attrib or element.attrib[attribute] != eoa_value)
|
||||
|
||||
if changing:
|
||||
changed = changed or changing
|
||||
if eoa_value is None:
|
||||
value = ""
|
||||
else:
|
||||
value = eoa_value
|
||||
element.attrib[attribute] = value
|
||||
|
||||
# module.fail_json(msg="arf %s changing=%s as curval=%s changed tree=%s" %
|
||||
# (xpath, changing, etree.tostring(tree, changing, element[attribute], pretty_print=True)))
|
||||
|
||||
else:
|
||||
module.fail_json(msg="unknown tree transformation=%s" % etree.tostring(tree, pretty_print=True))
|
||||
|
||||
return changed
|
||||
|
||||
|
||||
def ensure_xpath_exists(module, tree, xpath, namespaces):
|
||||
changed = False
|
||||
|
||||
if not is_node(tree, xpath, namespaces):
|
||||
changed = check_or_make_target(module, tree, xpath, namespaces)
|
||||
|
||||
finish(module, tree, xpath, namespaces, changed)
|
||||
|
||||
|
||||
def set_target_inner(module, tree, xpath, namespaces, attribute, value):
|
||||
changed = False
|
||||
|
||||
try:
|
||||
if not is_node(tree, xpath, namespaces):
|
||||
changed = check_or_make_target(module, tree, xpath, namespaces)
|
||||
except Exception as e:
|
||||
missing_namespace = ""
|
||||
# NOTE: This checks only the namespaces defined in root element!
|
||||
# TODO: Implement a more robust check to check for child namespaces' existence
|
||||
if tree.getroot().nsmap and ":" not in xpath:
|
||||
missing_namespace = "XML document has namespace(s) defined, but no namespace prefix(es) used in xpath!\n"
|
||||
module.fail_json(msg="%sXpath %s causes a failure: %s\n -- tree is %s" %
|
||||
(missing_namespace, xpath, e, etree.tostring(tree, pretty_print=True)), exception=traceback.format_exc())
|
||||
|
||||
if not is_node(tree, xpath, namespaces):
|
||||
module.fail_json(msg="Xpath %s does not reference a node! tree is %s" %
|
||||
(xpath, etree.tostring(tree, pretty_print=True)))
|
||||
|
||||
for element in tree.xpath(xpath, namespaces=namespaces):
|
||||
if not attribute:
|
||||
changed = changed or (element.text != value)
|
||||
if element.text != value:
|
||||
element.text = value
|
||||
else:
|
||||
changed = changed or (element.get(attribute) != value)
|
||||
if ":" in attribute:
|
||||
attr_ns, attr_name = attribute.split(":")
|
||||
# attribute = "{{%s}}%s" % (namespaces[attr_ns], attr_name)
|
||||
attribute = "{{{0}}}{1}".format(namespaces[attr_ns], attr_name)
|
||||
if element.get(attribute) != value:
|
||||
element.set(attribute, value)
|
||||
|
||||
return changed
|
||||
|
||||
|
||||
def set_target(module, tree, xpath, namespaces, attribute, value):
|
||||
changed = set_target_inner(module, tree, xpath, namespaces, attribute, value)
|
||||
finish(module, tree, xpath, namespaces, changed)
|
||||
|
||||
|
||||
def get_element_text(module, tree, xpath, namespaces):
|
||||
if not is_node(tree, xpath, namespaces):
|
||||
module.fail_json(msg="Xpath %s does not reference a node!" % xpath)
|
||||
|
||||
elements = []
|
||||
for element in tree.xpath(xpath, namespaces=namespaces):
|
||||
elements.append({element.tag: element.text})
|
||||
|
||||
finish(module, tree, xpath, namespaces, changed=False, msg=len(elements), hitcount=len(elements), matches=elements)
|
||||
|
||||
|
||||
def get_element_attr(module, tree, xpath, namespaces):
|
||||
if not is_node(tree, xpath, namespaces):
|
||||
module.fail_json(msg="Xpath %s does not reference a node!" % xpath)
|
||||
|
||||
elements = []
|
||||
for element in tree.xpath(xpath, namespaces=namespaces):
|
||||
child = {}
|
||||
for key in element.keys():
|
||||
value = element.get(key)
|
||||
child.update({key: value})
|
||||
elements.append({element.tag: child})
|
||||
|
||||
finish(module, tree, xpath, namespaces, changed=False, msg=len(elements), hitcount=len(elements), matches=elements)
|
||||
|
||||
|
||||
def child_to_element(module, child, in_type):
|
||||
if in_type == 'xml':
|
||||
infile = BytesIO(to_bytes(child, errors='surrogate_or_strict'))
|
||||
|
||||
try:
|
||||
parser = etree.XMLParser()
|
||||
node = etree.parse(infile, parser)
|
||||
return node.getroot()
|
||||
except etree.XMLSyntaxError as e:
|
||||
module.fail_json(msg="Error while parsing child element: %s" % e)
|
||||
elif in_type == 'yaml':
|
||||
if isinstance(child, string_types):
|
||||
return etree.Element(child)
|
||||
elif isinstance(child, MutableMapping):
|
||||
if len(child) > 1:
|
||||
module.fail_json(msg="Can only create children from hashes with one key")
|
||||
|
||||
(key, value) = next(iteritems(child))
|
||||
if isinstance(value, MutableMapping):
|
||||
children = value.pop('_', None)
|
||||
|
||||
node = etree.Element(key, value)
|
||||
|
||||
if children is not None:
|
||||
if not isinstance(children, list):
|
||||
module.fail_json(msg="Invalid children type: %s, must be list." % type(children))
|
||||
|
||||
subnodes = children_to_nodes(module, children)
|
||||
node.extend(subnodes)
|
||||
else:
|
||||
node = etree.Element(key)
|
||||
node.text = value
|
||||
return node
|
||||
else:
|
||||
module.fail_json(msg="Invalid child type: %s. Children must be either strings or hashes." % type(child))
|
||||
else:
|
||||
module.fail_json(msg="Invalid child input type: %s. Type must be either xml or yaml." % in_type)
|
||||
|
||||
|
||||
def children_to_nodes(module=None, children=None, type='yaml'):
|
||||
"""turn a str/hash/list of str&hash into a list of elements"""
|
||||
children = [] if children is None else children
|
||||
|
||||
return [child_to_element(module, child, type) for child in children]
|
||||
|
||||
|
||||
def make_pretty(module, tree):
|
||||
xml_string = etree.tostring(tree, xml_declaration=True, encoding='UTF-8', pretty_print=module.params['pretty_print'])
|
||||
|
||||
result = dict(
|
||||
changed=False,
|
||||
)
|
||||
|
||||
if module.params['path']:
|
||||
xml_file = module.params['path']
|
||||
with open(xml_file, 'rb') as xml_content:
|
||||
if xml_string != xml_content.read():
|
||||
result['changed'] = True
|
||||
if not module.check_mode:
|
||||
if module.params['backup']:
|
||||
result['backup_file'] = module.backup_local(module.params['path'])
|
||||
tree.write(xml_file, xml_declaration=True, encoding='UTF-8', pretty_print=module.params['pretty_print'])
|
||||
|
||||
elif module.params['xmlstring']:
|
||||
result['xmlstring'] = xml_string
|
||||
# NOTE: Modifying a string is not considered a change !
|
||||
if xml_string != module.params['xmlstring']:
|
||||
result['changed'] = True
|
||||
|
||||
module.exit_json(**result)
|
||||
|
||||
|
||||
def finish(module, tree, xpath, namespaces, changed=False, msg='', hitcount=0, matches=tuple()):
|
||||
|
||||
result = dict(
|
||||
actions=dict(
|
||||
xpath=xpath,
|
||||
namespaces=namespaces,
|
||||
state=module.params['state']
|
||||
),
|
||||
changed=has_changed(tree),
|
||||
)
|
||||
|
||||
if module.params['count'] or hitcount:
|
||||
result['count'] = hitcount
|
||||
|
||||
if module.params['print_match'] or matches:
|
||||
result['matches'] = matches
|
||||
|
||||
if msg:
|
||||
result['msg'] = msg
|
||||
|
||||
if result['changed']:
|
||||
if module._diff:
|
||||
result['diff'] = dict(
|
||||
before=etree.tostring(orig_doc, xml_declaration=True, encoding='UTF-8', pretty_print=True),
|
||||
after=etree.tostring(tree, xml_declaration=True, encoding='UTF-8', pretty_print=True),
|
||||
)
|
||||
|
||||
if module.params['path'] and not module.check_mode:
|
||||
if module.params['backup']:
|
||||
result['backup_file'] = module.backup_local(module.params['path'])
|
||||
tree.write(module.params['path'], xml_declaration=True, encoding='UTF-8', pretty_print=module.params['pretty_print'])
|
||||
|
||||
if module.params['xmlstring']:
|
||||
result['xmlstring'] = etree.tostring(tree, xml_declaration=True, encoding='UTF-8', pretty_print=module.params['pretty_print'])
|
||||
|
||||
module.exit_json(**result)
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
path=dict(type='path', aliases=['dest', 'file']),
|
||||
xmlstring=dict(type='str'),
|
||||
xpath=dict(type='str'),
|
||||
namespaces=dict(type='dict', default={}),
|
||||
state=dict(type='str', default='present', choices=['absent', 'present'], aliases=['ensure']),
|
||||
value=dict(type='raw'),
|
||||
attribute=dict(type='raw'),
|
||||
add_children=dict(type='list'),
|
||||
set_children=dict(type='list'),
|
||||
count=dict(type='bool', default=False),
|
||||
print_match=dict(type='bool', default=False),
|
||||
pretty_print=dict(type='bool', default=False),
|
||||
content=dict(type='str', choices=['attribute', 'text']),
|
||||
input_type=dict(type='str', default='yaml', choices=['xml', 'yaml']),
|
||||
backup=dict(type='bool', default=False),
|
||||
strip_cdata_tags=dict(type='bool', default=False),
|
||||
insertbefore=dict(type='bool', default=False),
|
||||
insertafter=dict(type='bool', default=False),
|
||||
),
|
||||
supports_check_mode=True,
|
||||
required_by=dict(
|
||||
add_children=['xpath'],
|
||||
# TODO: Reinstate this in Ansible v2.12 when we have deprecated the incorrect use below
|
||||
# attribute=['value'],
|
||||
content=['xpath'],
|
||||
set_children=['xpath'],
|
||||
value=['xpath'],
|
||||
),
|
||||
required_if=[
|
||||
['count', True, ['xpath']],
|
||||
['print_match', True, ['xpath']],
|
||||
['insertbefore', True, ['xpath']],
|
||||
['insertafter', True, ['xpath']],
|
||||
],
|
||||
required_one_of=[
|
||||
['path', 'xmlstring'],
|
||||
['add_children', 'content', 'count', 'pretty_print', 'print_match', 'set_children', 'value'],
|
||||
],
|
||||
mutually_exclusive=[
|
||||
['add_children', 'content', 'count', 'print_match', 'set_children', 'value'],
|
||||
['path', 'xmlstring'],
|
||||
['insertbefore', 'insertafter'],
|
||||
],
|
||||
)
|
||||
|
||||
xml_file = module.params['path']
|
||||
xml_string = module.params['xmlstring']
|
||||
xpath = module.params['xpath']
|
||||
namespaces = module.params['namespaces']
|
||||
state = module.params['state']
|
||||
value = json_dict_bytes_to_unicode(module.params['value'])
|
||||
attribute = module.params['attribute']
|
||||
set_children = json_dict_bytes_to_unicode(module.params['set_children'])
|
||||
add_children = json_dict_bytes_to_unicode(module.params['add_children'])
|
||||
pretty_print = module.params['pretty_print']
|
||||
content = module.params['content']
|
||||
input_type = module.params['input_type']
|
||||
print_match = module.params['print_match']
|
||||
count = module.params['count']
|
||||
backup = module.params['backup']
|
||||
strip_cdata_tags = module.params['strip_cdata_tags']
|
||||
insertbefore = module.params['insertbefore']
|
||||
insertafter = module.params['insertafter']
|
||||
|
||||
# Check if we have lxml 2.3.0 or newer installed
|
||||
if not HAS_LXML:
|
||||
module.fail_json(msg=missing_required_lib("lxml"), exception=LXML_IMP_ERR)
|
||||
elif LooseVersion('.'.join(to_native(f) for f in etree.LXML_VERSION)) < LooseVersion('2.3.0'):
|
||||
module.fail_json(msg='The xml ansible module requires lxml 2.3.0 or newer installed on the managed machine')
|
||||
elif LooseVersion('.'.join(to_native(f) for f in etree.LXML_VERSION)) < LooseVersion('3.0.0'):
|
||||
module.warn('Using lxml version lower than 3.0.0 does not guarantee predictable element attribute order.')
|
||||
|
||||
# Report wrongly used attribute parameter when using content=attribute
|
||||
# TODO: Remove this in Ansible v2.12 (and reinstate strict parameter test above) and remove the integration test example
|
||||
if content == 'attribute' and attribute is not None:
|
||||
module.deprecate("Parameter 'attribute=%s' is ignored when using 'content=attribute' only 'xpath' is used. Please remove entry." % attribute, '2.12')
|
||||
|
||||
# Check if the file exists
|
||||
if xml_string:
|
||||
infile = BytesIO(to_bytes(xml_string, errors='surrogate_or_strict'))
|
||||
elif os.path.isfile(xml_file):
|
||||
infile = open(xml_file, 'rb')
|
||||
else:
|
||||
module.fail_json(msg="The target XML source '%s' does not exist." % xml_file)
|
||||
|
||||
# Parse and evaluate xpath expression
|
||||
if xpath is not None:
|
||||
try:
|
||||
etree.XPath(xpath)
|
||||
except etree.XPathSyntaxError as e:
|
||||
module.fail_json(msg="Syntax error in xpath expression: %s (%s)" % (xpath, e))
|
||||
except etree.XPathEvalError as e:
|
||||
module.fail_json(msg="Evaluation error in xpath expression: %s (%s)" % (xpath, e))
|
||||
|
||||
# Try to parse in the target XML file
|
||||
try:
|
||||
parser = etree.XMLParser(remove_blank_text=pretty_print, strip_cdata=strip_cdata_tags)
|
||||
doc = etree.parse(infile, parser)
|
||||
except etree.XMLSyntaxError as e:
|
||||
module.fail_json(msg="Error while parsing document: %s (%s)" % (xml_file or 'xml_string', e))
|
||||
|
||||
# Ensure we have the original copy to compare
|
||||
global orig_doc
|
||||
orig_doc = copy.deepcopy(doc)
|
||||
|
||||
if print_match:
|
||||
do_print_match(module, doc, xpath, namespaces)
|
||||
|
||||
if count:
|
||||
count_nodes(module, doc, xpath, namespaces)
|
||||
|
||||
if content == 'attribute':
|
||||
get_element_attr(module, doc, xpath, namespaces)
|
||||
elif content == 'text':
|
||||
get_element_text(module, doc, xpath, namespaces)
|
||||
|
||||
# File exists:
|
||||
if state == 'absent':
|
||||
# - absent: delete xpath target
|
||||
delete_xpath_target(module, doc, xpath, namespaces)
|
||||
|
||||
# - present: carry on
|
||||
|
||||
# children && value both set?: should have already aborted by now
|
||||
# add_children && set_children both set?: should have already aborted by now
|
||||
|
||||
# set_children set?
|
||||
if set_children:
|
||||
set_target_children(module, doc, xpath, namespaces, set_children, input_type)
|
||||
|
||||
# add_children set?
|
||||
if add_children:
|
||||
add_target_children(module, doc, xpath, namespaces, add_children, input_type, insertbefore, insertafter)
|
||||
|
||||
# No?: Carry on
|
||||
|
||||
# Is the xpath target an attribute selector?
|
||||
if value is not None:
|
||||
set_target(module, doc, xpath, namespaces, attribute, value)
|
||||
|
||||
# If an xpath was provided, we need to do something with the data
|
||||
if xpath is not None:
|
||||
ensure_xpath_exists(module, doc, xpath, namespaces)
|
||||
|
||||
# Otherwise only reformat the xml data?
|
||||
if pretty_print:
|
||||
make_pretty(module, doc)
|
||||
|
||||
module.fail_json(msg="Don't know what to do")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
Loading…
Add table
Add a link
Reference in a new issue