mirror of
https://github.com/ansible-collections/community.general.git
synced 2025-08-22 14:01:42 -07:00
Split modules/network into two parts (#24024)
* Split modules/network into two parts Given the dedicated team we we have working on Ansible Networking a clearer split is needed between Networking modules and "things that happen to use the network" * nmcli to net_tools * nmcli moved
This commit is contained in:
parent
2d9d1762ba
commit
1c61b9bae7
23 changed files with 12 additions and 12 deletions
0
lib/ansible/modules/net_tools/basics/__init__.py
Normal file
0
lib/ansible/modules/net_tools/basics/__init__.py
Normal file
489
lib/ansible/modules/net_tools/basics/get_url.py
Normal file
489
lib/ansible/modules/net_tools/basics/get_url.py
Normal file
|
@ -0,0 +1,489 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2012, Jan-Piet Mens <jpmens () gmail.com>
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
# see examples/playbooks/get_url.yml
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.0',
|
||||
'status': ['stableinterface'],
|
||||
'supported_by': 'core'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: get_url
|
||||
short_description: Downloads files from HTTP, HTTPS, or FTP to node
|
||||
description:
|
||||
- Downloads files from HTTP, HTTPS, or FTP to the remote server. The remote
|
||||
server I(must) have direct access to the remote resource.
|
||||
- By default, if an environment variable C(<protocol>_proxy) is set on
|
||||
the target host, requests will be sent through that proxy. This
|
||||
behaviour can be overridden by setting a variable for this task
|
||||
(see `setting the environment
|
||||
<http://docs.ansible.com/playbooks_environment.html>`_),
|
||||
or by using the use_proxy option.
|
||||
- HTTP redirects can redirect from HTTP to HTTPS so you should be sure that
|
||||
your proxy environment for both protocols is correct.
|
||||
version_added: "0.6"
|
||||
options:
|
||||
url:
|
||||
description:
|
||||
- HTTP, HTTPS, or FTP URL in the form (http|https|ftp)://[user[:pass]]@host.domain[:port]/path
|
||||
required: true
|
||||
dest:
|
||||
description:
|
||||
- absolute path of where to download the file to.
|
||||
- If C(dest) is a directory, either the server provided filename or, if
|
||||
none provided, the base name of the URL on the remote server will be
|
||||
used. If a directory, C(force) has no effect.
|
||||
If C(dest) is a directory, the file will always be
|
||||
downloaded (regardless of the force option), but replaced only if the contents changed.
|
||||
required: true
|
||||
tmp_dest:
|
||||
description:
|
||||
- absolute path of where temporary file is downloaded to.
|
||||
- Defaults to TMPDIR, TEMP or TMP env variables or a platform specific value
|
||||
- https://docs.python.org/2/library/tempfile.html#tempfile.tempdir
|
||||
required: false
|
||||
default: ''
|
||||
version_added: '2.1'
|
||||
force:
|
||||
description:
|
||||
- If C(yes) and C(dest) is not a directory, will download the file every
|
||||
time and replace the file if the contents change. If C(no), the file
|
||||
will only be downloaded if the destination does not exist. Generally
|
||||
should be C(yes) only for small local files. Prior to 0.6, this module
|
||||
behaved as if C(yes) was the default.
|
||||
version_added: "0.7"
|
||||
required: false
|
||||
choices: [ "yes", "no" ]
|
||||
default: "no"
|
||||
aliases: [ "thirsty" ]
|
||||
backup:
|
||||
description:
|
||||
- Create a backup file including the timestamp information so you can get
|
||||
the original file back if you somehow clobbered it incorrectly.
|
||||
required: false
|
||||
choices: [ "yes", "no" ]
|
||||
default: "no"
|
||||
version_added: '2.1'
|
||||
sha256sum:
|
||||
description:
|
||||
- If a SHA-256 checksum is passed to this parameter, the digest of the
|
||||
destination file will be calculated after it is downloaded to ensure
|
||||
its integrity and verify that the transfer completed successfully.
|
||||
This option is deprecated. Use 'checksum'.
|
||||
version_added: "1.3"
|
||||
required: false
|
||||
default: null
|
||||
checksum:
|
||||
description:
|
||||
- 'If a checksum is passed to this parameter, the digest of the
|
||||
destination file will be calculated after it is downloaded to ensure
|
||||
its integrity and verify that the transfer completed successfully.
|
||||
Format: <algorithm>:<checksum>, e.g.: checksum="sha256:D98291AC[...]B6DC7B97"
|
||||
If you worry about portability, only the sha1 algorithm is available
|
||||
on all platforms and python versions. The third party hashlib
|
||||
library can be installed for access to additional algorithms.
|
||||
Additionally, if a checksum is passed to this parameter, and the file exist under
|
||||
the C(dest) location, the destination_checksum would be calculated, and if
|
||||
checksum equals destination_checksum, the file download would be skipped
|
||||
(unless C(force) is true). '
|
||||
version_added: "2.0"
|
||||
required: false
|
||||
default: null
|
||||
use_proxy:
|
||||
description:
|
||||
- if C(no), it will not use a proxy, even if one is defined in
|
||||
an environment variable on the target hosts.
|
||||
required: false
|
||||
default: 'yes'
|
||||
choices: ['yes', 'no']
|
||||
validate_certs:
|
||||
description:
|
||||
- If C(no), SSL certificates will not be validated. This should only be used
|
||||
on personally controlled sites using self-signed certificates.
|
||||
required: false
|
||||
default: 'yes'
|
||||
choices: ['yes', 'no']
|
||||
timeout:
|
||||
description:
|
||||
- Timeout in seconds for URL request
|
||||
required: false
|
||||
default: 10
|
||||
version_added: '1.8'
|
||||
headers:
|
||||
description:
|
||||
- 'Add custom HTTP headers to a request in the format "key:value,key:value"'
|
||||
required: false
|
||||
default: null
|
||||
version_added: '2.0'
|
||||
url_username:
|
||||
description:
|
||||
- The username for use in HTTP basic authentication. This parameter can be used
|
||||
without C(url_password) for sites that allow empty passwords.
|
||||
required: false
|
||||
version_added: '1.6'
|
||||
url_password:
|
||||
description:
|
||||
- The password for use in HTTP basic authentication. If the C(url_username)
|
||||
parameter is not specified, the C(url_password) parameter will not be used.
|
||||
required: false
|
||||
version_added: '1.6'
|
||||
force_basic_auth:
|
||||
version_added: '2.0'
|
||||
description:
|
||||
- httplib2, the library used by the uri module only sends authentication information when a webservice
|
||||
responds to an initial request with a 401 status. Since some basic auth services do not properly
|
||||
send a 401, logins will fail. This option forces the sending of the Basic authentication header
|
||||
upon initial request.
|
||||
required: false
|
||||
choices: [ "yes", "no" ]
|
||||
default: "no"
|
||||
client_cert:
|
||||
required: false
|
||||
default: null
|
||||
description:
|
||||
- PEM formatted certificate chain file to be used for SSL client
|
||||
authentication. This file can also include the key as well, and if
|
||||
the key is included, I(client_key) is not required
|
||||
version_added: 2.4
|
||||
client_key:
|
||||
required: false
|
||||
default: null
|
||||
description:
|
||||
- PEM formatted file that contains your private key to be used for SSL
|
||||
client authentication. If I(client_cert) contains both the certificate
|
||||
and key, this option is not required.
|
||||
version_added: 2.4
|
||||
others:
|
||||
description:
|
||||
- all arguments accepted by the M(file) module also work here
|
||||
required: false
|
||||
# informational: requirements for nodes
|
||||
requirements: [ ]
|
||||
extends_documentation_fragment:
|
||||
- files
|
||||
author: "Jan-Piet Mens (@jpmens)"
|
||||
'''
|
||||
|
||||
EXAMPLES='''
|
||||
- name: download foo.conf
|
||||
get_url:
|
||||
url: http://example.com/path/file.conf
|
||||
dest: /etc/foo.conf
|
||||
mode: 0440
|
||||
|
||||
- name: download file and force basic auth
|
||||
get_url:
|
||||
url: http://example.com/path/file.conf
|
||||
dest: /etc/foo.conf
|
||||
force_basic_auth: yes
|
||||
|
||||
- name: download file with custom HTTP headers
|
||||
get_url:
|
||||
url: http://example.com/path/file.conf
|
||||
dest: /etc/foo.conf
|
||||
headers: 'key:value,key:value'
|
||||
|
||||
- name: download file with check (sha256)
|
||||
get_url:
|
||||
url: http://example.com/path/file.conf
|
||||
dest: /etc/foo.conf
|
||||
checksum: sha256:b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c
|
||||
|
||||
- name: download file with check (md5)
|
||||
get_url:
|
||||
url: http://example.com/path/file.conf
|
||||
dest: /etc/foo.conf
|
||||
checksum: md5:66dffb5228a211e61d6d7ef4a86f5758
|
||||
|
||||
- name: download file from a file path
|
||||
get_url:
|
||||
url: "file:///tmp/afile.txt"
|
||||
dest: /tmp/afilecopy.txt
|
||||
'''
|
||||
|
||||
import shutil
|
||||
import datetime
|
||||
import re
|
||||
import tempfile
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlsplit
|
||||
|
||||
# ==============================================================
|
||||
# url handling
|
||||
|
||||
def url_filename(url):
|
||||
fn = os.path.basename(urlsplit(url)[2])
|
||||
if fn == '':
|
||||
return 'index.html'
|
||||
return fn
|
||||
|
||||
def url_get(module, url, dest, use_proxy, last_mod_time, force, timeout=10, headers=None, tmp_dest=''):
|
||||
"""
|
||||
Download data from the url and store in a temporary file.
|
||||
|
||||
Return (tempfile, info about the request)
|
||||
"""
|
||||
|
||||
rsp, info = fetch_url(module, url, use_proxy=use_proxy, force=force, last_mod_time=last_mod_time, timeout=timeout, headers=headers)
|
||||
|
||||
if info['status'] == 304:
|
||||
module.exit_json(url=url, dest=dest, changed=False, msg=info.get('msg', ''))
|
||||
|
||||
# Exceptions in fetch_url may result in a status -1, the ensures a proper error to the user in all cases
|
||||
if info['status'] == -1:
|
||||
module.fail_json(msg=info['msg'], url=url, dest=dest)
|
||||
|
||||
if info['status'] != 200 and not url.startswith('file:/') and not (url.startswith('ftp:/') and info.get('msg', '').startswith('OK')):
|
||||
module.fail_json(msg="Request failed", status_code=info['status'], response=info['msg'], url=url, dest=dest)
|
||||
|
||||
# create a temporary file and copy content to do checksum-based replacement
|
||||
if tmp_dest != '':
|
||||
# tmp_dest should be an existing dir
|
||||
tmp_dest_is_dir = os.path.isdir(tmp_dest)
|
||||
if not tmp_dest_is_dir:
|
||||
if os.path.exists(tmp_dest):
|
||||
module.fail_json(msg="%s is a file but should be a directory." % tmp_dest)
|
||||
else:
|
||||
module.fail_json(msg="%s directory does not exist." % tmp_dest)
|
||||
|
||||
fd, tempname = tempfile.mkstemp(dir=tmp_dest)
|
||||
else:
|
||||
fd, tempname = tempfile.mkstemp()
|
||||
|
||||
f = os.fdopen(fd, 'wb')
|
||||
try:
|
||||
shutil.copyfileobj(rsp, f)
|
||||
except Exception:
|
||||
err = get_exception()
|
||||
os.remove(tempname)
|
||||
module.fail_json(msg="failed to create temporary content file: %s" % str(err))
|
||||
f.close()
|
||||
rsp.close()
|
||||
return tempname, info
|
||||
|
||||
def extract_filename_from_headers(headers):
|
||||
"""
|
||||
Extracts a filename from the given dict of HTTP headers.
|
||||
|
||||
Looks for the content-disposition header and applies a regex.
|
||||
Returns the filename if successful, else None."""
|
||||
cont_disp_regex = 'attachment; ?filename="?([^"]+)'
|
||||
res = None
|
||||
|
||||
if 'content-disposition' in headers:
|
||||
cont_disp = headers['content-disposition']
|
||||
match = re.match(cont_disp_regex, cont_disp)
|
||||
if match:
|
||||
res = match.group(1)
|
||||
# Try preventing any funny business.
|
||||
res = os.path.basename(res)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
# ==============================================================
|
||||
# main
|
||||
|
||||
def main():
|
||||
argument_spec = url_argument_spec()
|
||||
argument_spec.update(
|
||||
url = dict(required=True),
|
||||
dest = dict(required=True, type='path'),
|
||||
backup = dict(default=False, type='bool'),
|
||||
sha256sum = dict(default=''),
|
||||
checksum = dict(default=''),
|
||||
timeout = dict(required=False, type='int', default=10),
|
||||
headers = dict(required=False, default=None),
|
||||
tmp_dest = dict(required=False, default='', type='path'),
|
||||
)
|
||||
|
||||
module = AnsibleModule(
|
||||
# not checking because of daisy chain to file module
|
||||
argument_spec = argument_spec,
|
||||
add_file_common_args=True
|
||||
)
|
||||
|
||||
url = module.params['url']
|
||||
dest = module.params['dest']
|
||||
backup = module.params['backup']
|
||||
force = module.params['force']
|
||||
sha256sum = module.params['sha256sum']
|
||||
checksum = module.params['checksum']
|
||||
use_proxy = module.params['use_proxy']
|
||||
timeout = module.params['timeout']
|
||||
tmp_dest = module.params['tmp_dest']
|
||||
|
||||
# Parse headers to dict
|
||||
if module.params['headers']:
|
||||
try:
|
||||
headers = dict(item.split(':', 1) for item in module.params['headers'].split(','))
|
||||
except:
|
||||
module.fail_json(msg="The header parameter requires a key:value,key:value syntax to be properly parsed.")
|
||||
else:
|
||||
headers = None
|
||||
|
||||
dest_is_dir = os.path.isdir(dest)
|
||||
last_mod_time = None
|
||||
|
||||
# workaround for usage of deprecated sha256sum parameter
|
||||
if sha256sum != '':
|
||||
checksum = 'sha256:%s' % (sha256sum)
|
||||
|
||||
# checksum specified, parse for algorithm and checksum
|
||||
if checksum != '':
|
||||
try:
|
||||
algorithm, checksum = checksum.rsplit(':', 1)
|
||||
# Remove any non-alphanumeric characters, including the infamous
|
||||
# Unicode zero-width space
|
||||
checksum = re.sub(r'\W+', '', checksum).lower()
|
||||
# Ensure the checksum portion is a hexdigest
|
||||
int(checksum, 16)
|
||||
except ValueError:
|
||||
module.fail_json(msg="The checksum parameter has to be in format <algorithm>:<checksum>")
|
||||
|
||||
if not dest_is_dir and os.path.exists(dest):
|
||||
checksum_mismatch = False
|
||||
|
||||
# If the download is not forced and there is a checksum, allow
|
||||
# checksum match to skip the download.
|
||||
if not force and checksum != '':
|
||||
destination_checksum = module.digest_from_file(dest, algorithm)
|
||||
|
||||
if checksum == destination_checksum:
|
||||
module.exit_json(msg="file already exists", dest=dest, url=url, changed=False)
|
||||
|
||||
checksum_mismatch = True
|
||||
|
||||
# Not forcing redownload, unless checksum does not match
|
||||
if not force and not checksum_mismatch:
|
||||
# allow file attribute changes
|
||||
module.params['path'] = dest
|
||||
file_args = module.load_file_common_arguments(module.params)
|
||||
file_args['path'] = dest
|
||||
changed = module.set_fs_attributes_if_different(file_args, False)
|
||||
|
||||
if changed:
|
||||
module.exit_json(msg="file already exists but file attributes changed", dest=dest, url=url, changed=changed)
|
||||
module.exit_json(msg="file already exists", dest=dest, url=url, changed=changed)
|
||||
|
||||
# If the file already exists, prepare the last modified time for the
|
||||
# request.
|
||||
mtime = os.path.getmtime(dest)
|
||||
last_mod_time = datetime.datetime.utcfromtimestamp(mtime)
|
||||
|
||||
# If the checksum does not match we have to force the download
|
||||
# because last_mod_time may be newer than on remote
|
||||
if checksum_mismatch:
|
||||
force = True
|
||||
|
||||
# download to tmpsrc
|
||||
tmpsrc, info = url_get(module, url, dest, use_proxy, last_mod_time, force, timeout, headers, tmp_dest)
|
||||
|
||||
# Now the request has completed, we can finally generate the final
|
||||
# destination file name from the info dict.
|
||||
|
||||
if dest_is_dir:
|
||||
filename = extract_filename_from_headers(info)
|
||||
if not filename:
|
||||
# Fall back to extracting the filename from the URL.
|
||||
# Pluck the URL from the info, since a redirect could have changed
|
||||
# it.
|
||||
filename = url_filename(info['url'])
|
||||
dest = os.path.join(dest, filename)
|
||||
|
||||
checksum_src = None
|
||||
checksum_dest = None
|
||||
|
||||
# raise an error if there is no tmpsrc file
|
||||
if not os.path.exists(tmpsrc):
|
||||
os.remove(tmpsrc)
|
||||
module.fail_json(msg="Request failed", status_code=info['status'], response=info['msg'])
|
||||
if not os.access(tmpsrc, os.R_OK):
|
||||
os.remove(tmpsrc)
|
||||
module.fail_json( msg="Source %s not readable" % (tmpsrc))
|
||||
checksum_src = module.sha1(tmpsrc)
|
||||
|
||||
# check if there is no dest file
|
||||
if os.path.exists(dest):
|
||||
# raise an error if copy has no permission on dest
|
||||
if not os.access(dest, os.W_OK):
|
||||
os.remove(tmpsrc)
|
||||
module.fail_json( msg="Destination %s not writable" % (dest))
|
||||
if not os.access(dest, os.R_OK):
|
||||
os.remove(tmpsrc)
|
||||
module.fail_json( msg="Destination %s not readable" % (dest))
|
||||
checksum_dest = module.sha1(dest)
|
||||
else:
|
||||
if not os.access(os.path.dirname(dest), os.W_OK):
|
||||
os.remove(tmpsrc)
|
||||
module.fail_json( msg="Destination %s not writable" % (os.path.dirname(dest)))
|
||||
|
||||
backup_file = None
|
||||
if checksum_src != checksum_dest:
|
||||
try:
|
||||
if backup:
|
||||
if os.path.exists(dest):
|
||||
backup_file = module.backup_local(dest)
|
||||
shutil.copyfile(tmpsrc, dest)
|
||||
except Exception:
|
||||
err = get_exception()
|
||||
os.remove(tmpsrc)
|
||||
module.fail_json(msg="failed to copy %s to %s: %s" % (tmpsrc, dest, str(err)))
|
||||
changed = True
|
||||
else:
|
||||
changed = False
|
||||
|
||||
if checksum != '':
|
||||
destination_checksum = module.digest_from_file(dest, algorithm)
|
||||
|
||||
if checksum != destination_checksum:
|
||||
os.remove(dest)
|
||||
module.fail_json(msg="The checksum for %s did not match %s; it was %s." % (dest, checksum, destination_checksum))
|
||||
|
||||
os.remove(tmpsrc)
|
||||
|
||||
# allow file attribute changes
|
||||
module.params['path'] = dest
|
||||
file_args = module.load_file_common_arguments(module.params)
|
||||
file_args['path'] = dest
|
||||
changed = module.set_fs_attributes_if_different(file_args, changed)
|
||||
|
||||
# Backwards compat only. We'll return None on FIPS enabled systems
|
||||
try:
|
||||
md5sum = module.md5(dest)
|
||||
except ValueError:
|
||||
md5sum = None
|
||||
|
||||
res_args = dict(
|
||||
url = url, dest = dest, src = tmpsrc, md5sum = md5sum, checksum_src = checksum_src,
|
||||
checksum_dest = checksum_dest, changed = changed, msg = info.get('msg', ''), status_code=info.get('status','')
|
||||
)
|
||||
if backup_file:
|
||||
res_args['backup_file'] = backup_file
|
||||
|
||||
# Mission complete
|
||||
module.exit_json(**res_args)
|
||||
|
||||
# import module snippets
|
||||
from ansible.module_utils.basic import *
|
||||
from ansible.module_utils.urls import *
|
||||
if __name__ == '__main__':
|
||||
main()
|
98
lib/ansible/modules/net_tools/basics/slurp.py
Normal file
98
lib/ansible/modules/net_tools/basics/slurp.py
Normal file
|
@ -0,0 +1,98 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.0',
|
||||
'status': ['stableinterface'],
|
||||
'supported_by': 'core'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: slurp
|
||||
version_added: historical
|
||||
short_description: Slurps a file from remote nodes
|
||||
description:
|
||||
- This module works like M(fetch). It is used for fetching a base64-
|
||||
encoded blob containing the data in a remote file.
|
||||
options:
|
||||
src:
|
||||
description:
|
||||
- The file on the remote system to fetch. This I(must) be a file, not a
|
||||
directory.
|
||||
required: true
|
||||
default: null
|
||||
aliases: []
|
||||
notes:
|
||||
- This module returns an 'in memory' base64 encoded version of the file, take into account that this will require at least twice the RAM as the
|
||||
original file size.
|
||||
- "See also: M(fetch)"
|
||||
requirements: []
|
||||
author:
|
||||
- "Ansible Core Team"
|
||||
- "Michael DeHaan"
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Find out what the remote machine's mounts are:
|
||||
- slurp:
|
||||
src: /proc/mounts
|
||||
register: mounts
|
||||
|
||||
- debug:
|
||||
msg: "{{ mounts['content'] | b64decode }}"
|
||||
|
||||
# From the commandline, find the pid of the remote machine's sshd
|
||||
# $ ansible host -m slurp -a 'src=/var/run/sshd.pid'
|
||||
# host | SUCCESS => {
|
||||
# "changed": false,
|
||||
# "content": "MjE3OQo=",
|
||||
# "encoding": "base64",
|
||||
# "source": "/var/run/sshd.pid"
|
||||
# }
|
||||
# $ echo MjE3OQo= | base64 -d
|
||||
# 2179
|
||||
'''
|
||||
|
||||
import base64
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec = dict(
|
||||
src = dict(required=True, aliases=['path'], type='path'),
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
source = module.params['src']
|
||||
|
||||
if not os.path.exists(source):
|
||||
module.fail_json(msg="file not found: %s" % source)
|
||||
if not os.access(source, os.R_OK):
|
||||
module.fail_json(msg="file is not readable: %s" % source)
|
||||
|
||||
data = base64.b64encode(open(source, 'rb').read())
|
||||
|
||||
module.exit_json(content=data, source=source, encoding='base64')
|
||||
|
||||
# import module snippets
|
||||
from ansible.module_utils.basic import *
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
514
lib/ansible/modules/net_tools/basics/uri.py
Normal file
514
lib/ansible/modules/net_tools/basics/uri.py
Normal file
|
@ -0,0 +1,514 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2013, Romeo Theriault <romeot () hawaii.edu>
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
# see examples/playbooks/uri.yml
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.0',
|
||||
'status': ['stableinterface'],
|
||||
'supported_by': 'core'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: uri
|
||||
short_description: Interacts with webservices
|
||||
description:
|
||||
- Interacts with HTTP and HTTPS web services and supports Digest, Basic and WSSE
|
||||
HTTP authentication mechanisms.
|
||||
version_added: "1.1"
|
||||
options:
|
||||
url:
|
||||
description:
|
||||
- HTTP or HTTPS URL in the form (http|https)://host.domain[:port]/path
|
||||
required: true
|
||||
default: null
|
||||
dest:
|
||||
description:
|
||||
- path of where to download the file to (if desired). If I(dest) is a
|
||||
directory, the basename of the file on the remote server will be used.
|
||||
required: false
|
||||
default: null
|
||||
user:
|
||||
description:
|
||||
- username for the module to use for Digest, Basic or WSSE authentication.
|
||||
required: false
|
||||
default: null
|
||||
password:
|
||||
description:
|
||||
- password for the module to use for Digest, Basic or WSSE authentication.
|
||||
required: false
|
||||
default: null
|
||||
body:
|
||||
description:
|
||||
- The body of the http request/response to the web service. If C(body_format) is set
|
||||
to 'json' it will take an already formatted JSON string or convert a data structure
|
||||
into JSON.
|
||||
required: false
|
||||
default: null
|
||||
body_format:
|
||||
description:
|
||||
- The serialization format of the body. When set to json, encodes the
|
||||
body argument, if needed, and automatically sets the Content-Type header accordingly.
|
||||
As of C(2.3) it is possible to override the `Content-Type` header, when
|
||||
set to json via the I(headers) option.
|
||||
required: false
|
||||
choices: [ "raw", "json" ]
|
||||
default: raw
|
||||
version_added: "2.0"
|
||||
method:
|
||||
description:
|
||||
- The HTTP method of the request or response. It MUST be uppercase.
|
||||
required: false
|
||||
choices: [ "GET", "POST", "PUT", "HEAD", "DELETE", "OPTIONS", "PATCH", "TRACE", "CONNECT", "REFRESH" ]
|
||||
default: "GET"
|
||||
return_content:
|
||||
description:
|
||||
- Whether or not to return the body of the request as a "content" key in
|
||||
the dictionary result. If the reported Content-type is
|
||||
"application/json", then the JSON is additionally loaded into a key
|
||||
called C(json) in the dictionary results.
|
||||
required: false
|
||||
choices: [ "yes", "no" ]
|
||||
default: "no"
|
||||
force_basic_auth:
|
||||
description:
|
||||
- The library used by the uri module only sends authentication information when a webservice
|
||||
responds to an initial request with a 401 status. Since some basic auth services do not properly
|
||||
send a 401, logins will fail. This option forces the sending of the Basic authentication header
|
||||
upon initial request.
|
||||
required: false
|
||||
choices: [ "yes", "no" ]
|
||||
default: "no"
|
||||
follow_redirects:
|
||||
description:
|
||||
- Whether or not the URI module should follow redirects. C(all) will follow all redirects.
|
||||
C(safe) will follow only "safe" redirects, where "safe" means that the client is only
|
||||
doing a GET or HEAD on the URI to which it is being redirected. C(none) will not follow
|
||||
any redirects. Note that C(yes) and C(no) choices are accepted for backwards compatibility,
|
||||
where C(yes) is the equivalent of C(all) and C(no) is the equivalent of C(safe). C(yes) and C(no)
|
||||
are deprecated and will be removed in some future version of Ansible.
|
||||
required: false
|
||||
choices: [ "all", "safe", "none" ]
|
||||
default: "safe"
|
||||
creates:
|
||||
description:
|
||||
- a filename, when it already exists, this step will not be run.
|
||||
required: false
|
||||
removes:
|
||||
description:
|
||||
- a filename, when it does not exist, this step will not be run.
|
||||
required: false
|
||||
status_code:
|
||||
description:
|
||||
- A valid, numeric, HTTP status code that signifies success of the
|
||||
request. Can also be comma separated list of status codes.
|
||||
required: false
|
||||
default: 200
|
||||
timeout:
|
||||
description:
|
||||
- The socket level timeout in seconds
|
||||
required: false
|
||||
default: 30
|
||||
HEADER_:
|
||||
description:
|
||||
- Any parameter starting with "HEADER_" is a sent with your request as a header.
|
||||
For example, HEADER_Content-Type="application/json" would send the header
|
||||
"Content-Type" along with your request with a value of "application/json".
|
||||
This option is deprecated as of C(2.1) and may be removed in a future
|
||||
release. Use I(headers) instead.
|
||||
required: false
|
||||
default: null
|
||||
headers:
|
||||
description:
|
||||
- Add custom HTTP headers to a request in the format of a YAML hash. As
|
||||
of C(2.3) supplying C(Content-Type) here will override the header
|
||||
generated by supplying C(json) for I(body_format).
|
||||
required: false
|
||||
default: null
|
||||
version_added: '2.1'
|
||||
others:
|
||||
description:
|
||||
- all arguments accepted by the M(file) module also work here
|
||||
required: false
|
||||
validate_certs:
|
||||
description:
|
||||
- If C(no), SSL certificates will not be validated. This should only
|
||||
set to C(no) used on personally controlled sites using self-signed
|
||||
certificates. Prior to 1.9.2 the code defaulted to C(no).
|
||||
required: false
|
||||
default: 'yes'
|
||||
choices: ['yes', 'no']
|
||||
version_added: '1.9.2'
|
||||
client_cert:
|
||||
required: false
|
||||
default: null
|
||||
description:
|
||||
- PEM formatted certificate chain file to be used for SSL client
|
||||
authentication. This file can also include the key as well, and if
|
||||
the key is included, I(client_key) is not required
|
||||
version_added: 2.4
|
||||
client_key:
|
||||
required: false
|
||||
default: null
|
||||
description:
|
||||
- PEM formatted file that contains your private key to be used for SSL
|
||||
client authentication. If I(client_cert) contains both the certificate
|
||||
and key, this option is not required.
|
||||
version_added: 2.4
|
||||
notes:
|
||||
- The dependency on httplib2 was removed in Ansible 2.1
|
||||
author: "Romeo Theriault (@romeotheriault)"
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Check that you can connect (GET) to a page and it returns a status 200
|
||||
uri:
|
||||
url: http://www.example.com
|
||||
|
||||
# Check that a page returns a status 200 and fail if the word AWESOME is not
|
||||
# in the page contents.
|
||||
- uri:
|
||||
url: http://www.example.com
|
||||
return_content: yes
|
||||
register: webpage
|
||||
|
||||
- name: Fail if AWESOME is not in the page content
|
||||
fail:
|
||||
when: "'AWESOME' not in webpage.content"
|
||||
|
||||
|
||||
- name: Create a JIRA issue
|
||||
uri:
|
||||
url: https://your.jira.example.com/rest/api/2/issue/
|
||||
method: POST
|
||||
user: your_username
|
||||
password: your_pass
|
||||
body: "{{ lookup('file','issue.json') }}"
|
||||
force_basic_auth: yes
|
||||
status_code: 201
|
||||
body_format: json
|
||||
|
||||
# Login to a form based webpage, then use the returned cookie to
|
||||
# access the app in later tasks
|
||||
|
||||
- uri:
|
||||
url: https://your.form.based.auth.example.com/index.php
|
||||
method: POST
|
||||
body: "name=your_username&password=your_password&enter=Sign%20in"
|
||||
status_code: 302
|
||||
headers:
|
||||
Content-Type: "application/x-www-form-urlencoded"
|
||||
register: login
|
||||
|
||||
- uri:
|
||||
url: https://your.form.based.auth.example.com/dashboard.php
|
||||
method: GET
|
||||
return_content: yes
|
||||
headers:
|
||||
Cookie: "{{login.set_cookie}}"
|
||||
|
||||
- name: Queue build of a project in Jenkins
|
||||
uri:
|
||||
url: "http://{{ jenkins.host }}/job/{{ jenkins.job }}/build?token={{ jenkins.token }}"
|
||||
method: GET
|
||||
user: "{{ jenkins.user }}"
|
||||
password: "{{ jenkins.password }}"
|
||||
force_basic_auth: yes
|
||||
status_code: 201
|
||||
|
||||
'''
|
||||
|
||||
import cgi
|
||||
import datetime
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
try:
|
||||
import json
|
||||
except ImportError:
|
||||
import simplejson as json
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.pycompat24 import get_exception
|
||||
import ansible.module_utils.six as six
|
||||
from ansible.module_utils._text import to_text
|
||||
from ansible.module_utils.urls import fetch_url, url_argument_spec
|
||||
|
||||
|
||||
def write_file(module, url, dest, content):
|
||||
# create a tempfile with some test content
|
||||
fd, tmpsrc = tempfile.mkstemp()
|
||||
f = open(tmpsrc, 'wb')
|
||||
try:
|
||||
f.write(content)
|
||||
except Exception:
|
||||
err = get_exception()
|
||||
os.remove(tmpsrc)
|
||||
module.fail_json(msg="failed to create temporary content file: %s" % str(err))
|
||||
f.close()
|
||||
|
||||
checksum_src = None
|
||||
checksum_dest = None
|
||||
|
||||
# raise an error if there is no tmpsrc file
|
||||
if not os.path.exists(tmpsrc):
|
||||
os.remove(tmpsrc)
|
||||
module.fail_json(msg="Source %s does not exist" % (tmpsrc))
|
||||
if not os.access(tmpsrc, os.R_OK):
|
||||
os.remove(tmpsrc)
|
||||
module.fail_json( msg="Source %s not readable" % (tmpsrc))
|
||||
checksum_src = module.sha1(tmpsrc)
|
||||
|
||||
# check if there is no dest file
|
||||
if os.path.exists(dest):
|
||||
# raise an error if copy has no permission on dest
|
||||
if not os.access(dest, os.W_OK):
|
||||
os.remove(tmpsrc)
|
||||
module.fail_json(msg="Destination %s not writable" % (dest))
|
||||
if not os.access(dest, os.R_OK):
|
||||
os.remove(tmpsrc)
|
||||
module.fail_json(msg="Destination %s not readable" % (dest))
|
||||
checksum_dest = module.sha1(dest)
|
||||
else:
|
||||
if not os.access(os.path.dirname(dest), os.W_OK):
|
||||
os.remove(tmpsrc)
|
||||
module.fail_json(msg="Destination dir %s not writable" % (os.path.dirname(dest)))
|
||||
|
||||
if checksum_src != checksum_dest:
|
||||
try:
|
||||
shutil.copyfile(tmpsrc, dest)
|
||||
except Exception:
|
||||
err = get_exception()
|
||||
os.remove(tmpsrc)
|
||||
module.fail_json(msg="failed to copy %s to %s: %s" % (tmpsrc, dest, str(err)))
|
||||
|
||||
os.remove(tmpsrc)
|
||||
|
||||
|
||||
def url_filename(url):
|
||||
fn = os.path.basename(six.moves.urllib.parse.urlsplit(url)[2])
|
||||
if fn == '':
|
||||
return 'index.html'
|
||||
return fn
|
||||
|
||||
|
||||
def absolute_location(url, location):
|
||||
"""Attempts to create an absolute URL based on initial URL, and
|
||||
next URL, specifically in the case of a ``Location`` header.
|
||||
"""
|
||||
|
||||
if '://' in location:
|
||||
return location
|
||||
|
||||
elif location.startswith('/'):
|
||||
parts = six.moves.urllib.parse.urlsplit(url)
|
||||
base = url.replace(parts[2], '')
|
||||
return '%s%s' % (base, location)
|
||||
|
||||
elif not location.startswith('/'):
|
||||
base = os.path.dirname(url)
|
||||
return '%s/%s' % (base, location)
|
||||
|
||||
else:
|
||||
return location
|
||||
|
||||
|
||||
def uri(module, url, dest, body, body_format, method, headers, socket_timeout):
|
||||
# is dest is set and is a directory, let's check if we get redirected and
|
||||
# set the filename from that url
|
||||
redirected = False
|
||||
redir_info = {}
|
||||
r = {}
|
||||
if dest is not None:
|
||||
# Stash follow_redirects, in this block we don't want to follow
|
||||
# we'll reset back to the supplied value soon
|
||||
follow_redirects = module.params['follow_redirects']
|
||||
module.params['follow_redirects'] = False
|
||||
dest = os.path.expanduser(dest)
|
||||
if os.path.isdir(dest):
|
||||
# first check if we are redirected to a file download
|
||||
_, redir_info = fetch_url(module, url, data=body,
|
||||
headers=headers,
|
||||
method=method,
|
||||
timeout=socket_timeout)
|
||||
# if we are redirected, update the url with the location header,
|
||||
# and update dest with the new url filename
|
||||
if redir_info['status'] in (301, 302, 303, 307):
|
||||
url = redir_info['location']
|
||||
redirected = True
|
||||
dest = os.path.join(dest, url_filename(url))
|
||||
# if destination file already exist, only download if file newer
|
||||
if os.path.exists(dest):
|
||||
t = datetime.datetime.utcfromtimestamp(os.path.getmtime(dest))
|
||||
tstamp = t.strftime('%a, %d %b %Y %H:%M:%S +0000')
|
||||
headers['If-Modified-Since'] = tstamp
|
||||
|
||||
# Reset follow_redirects back to the stashed value
|
||||
module.params['follow_redirects'] = follow_redirects
|
||||
|
||||
resp, info = fetch_url(module, url, data=body, headers=headers,
|
||||
method=method, timeout=socket_timeout)
|
||||
|
||||
try:
|
||||
content = resp.read()
|
||||
except AttributeError:
|
||||
# there was no content, but the error read()
|
||||
# may have been stored in the info as 'body'
|
||||
content = info.pop('body', '')
|
||||
|
||||
r['redirected'] = redirected or info['url'] != url
|
||||
r.update(redir_info)
|
||||
r.update(info)
|
||||
|
||||
return r, content, dest
|
||||
|
||||
|
||||
def main():
|
||||
argument_spec = url_argument_spec()
|
||||
argument_spec.update(dict(
|
||||
dest = dict(required=False, default=None, type='path'),
|
||||
url_username = dict(required=False, default=None, aliases=['user']),
|
||||
url_password = dict(required=False, default=None, aliases=['password'], no_log=True),
|
||||
body = dict(required=False, default=None, type='raw'),
|
||||
body_format = dict(required=False, default='raw', choices=['raw', 'json']),
|
||||
method = dict(required=False, default='GET', choices=['GET', 'POST', 'PUT', 'HEAD', 'DELETE', 'OPTIONS', 'PATCH', 'TRACE', 'CONNECT', 'REFRESH']),
|
||||
return_content = dict(required=False, default='no', type='bool'),
|
||||
follow_redirects = dict(required=False, default='safe', choices=['all', 'safe', 'none', 'yes', 'no']),
|
||||
creates = dict(required=False, default=None, type='path'),
|
||||
removes = dict(required=False, default=None, type='path'),
|
||||
status_code = dict(required=False, default=[200], type='list'),
|
||||
timeout = dict(required=False, default=30, type='int'),
|
||||
headers = dict(required=False, type='dict', default={})
|
||||
))
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=argument_spec,
|
||||
check_invalid_arguments=False,
|
||||
add_file_common_args=True
|
||||
)
|
||||
|
||||
url = module.params['url']
|
||||
body = module.params['body']
|
||||
body_format = module.params['body_format'].lower()
|
||||
method = module.params['method']
|
||||
dest = module.params['dest']
|
||||
return_content = module.params['return_content']
|
||||
creates = module.params['creates']
|
||||
removes = module.params['removes']
|
||||
status_code = [int(x) for x in list(module.params['status_code'])]
|
||||
socket_timeout = module.params['timeout']
|
||||
|
||||
dict_headers = module.params['headers']
|
||||
|
||||
if body_format == 'json':
|
||||
# Encode the body unless its a string, then assume it is pre-formatted JSON
|
||||
if not isinstance(body, six.string_types):
|
||||
body = json.dumps(body)
|
||||
lower_header_keys = [key.lower() for key in dict_headers]
|
||||
if 'content-type' not in lower_header_keys:
|
||||
dict_headers['Content-Type'] = 'application/json'
|
||||
|
||||
# Grab all the http headers. Need this hack since passing multi-values is
|
||||
# currently a bit ugly. (e.g. headers='{"Content-Type":"application/json"}')
|
||||
for key, value in six.iteritems(module.params):
|
||||
if key.startswith("HEADER_"):
|
||||
module.deprecate('Supplying headers via HEADER_* is deprecated and '
|
||||
'will be removed in a future version. Please use '
|
||||
'`headers` to supply headers for the request')
|
||||
skey = key.replace("HEADER_", "")
|
||||
dict_headers[skey] = value
|
||||
|
||||
if creates is not None:
|
||||
# do not run the command if the line contains creates=filename
|
||||
# and the filename already exists. This allows idempotence
|
||||
# of uri executions.
|
||||
if os.path.exists(creates):
|
||||
module.exit_json(stdout="skipped, since %s exists" % creates, changed=False, rc=0)
|
||||
|
||||
if removes is not None:
|
||||
# do not run the command if the line contains removes=filename
|
||||
# and the filename do not exists. This allows idempotence
|
||||
# of uri executions.
|
||||
if not os.path.exists(removes):
|
||||
module.exit_json(stdout="skipped, since %s does not exist" % removes, changed=False, rc=0)
|
||||
|
||||
# Make the request
|
||||
resp, content, dest = uri(module, url, dest, body, body_format, method,
|
||||
dict_headers, socket_timeout)
|
||||
resp['status'] = int(resp['status'])
|
||||
|
||||
# Write the file out if requested
|
||||
if dest is not None:
|
||||
if resp['status'] == 304:
|
||||
changed = False
|
||||
else:
|
||||
write_file(module, url, dest, content)
|
||||
# allow file attribute changes
|
||||
changed = True
|
||||
module.params['path'] = dest
|
||||
file_args = module.load_file_common_arguments(module.params)
|
||||
file_args['path'] = dest
|
||||
changed = module.set_fs_attributes_if_different(file_args, changed)
|
||||
resp['path'] = dest
|
||||
else:
|
||||
changed = False
|
||||
|
||||
# Transmogrify the headers, replacing '-' with '_', since variables dont
|
||||
# work with dashes.
|
||||
# In python3, the headers are title cased. Lowercase them to be
|
||||
# compatible with the python2 behaviour.
|
||||
uresp = {}
|
||||
for key, value in six.iteritems(resp):
|
||||
ukey = key.replace("-", "_").lower()
|
||||
uresp[ukey] = value
|
||||
|
||||
try:
|
||||
uresp['location'] = absolute_location(url, uresp['location'])
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
# Default content_encoding to try
|
||||
content_encoding = 'utf-8'
|
||||
if 'content_type' in uresp:
|
||||
content_type, params = cgi.parse_header(uresp['content_type'])
|
||||
if 'charset' in params:
|
||||
content_encoding = params['charset']
|
||||
u_content = to_text(content, encoding=content_encoding)
|
||||
if 'application/json' in content_type or 'text/json' in content_type:
|
||||
try:
|
||||
js = json.loads(u_content)
|
||||
uresp['json'] = js
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
u_content = to_text(content, encoding=content_encoding)
|
||||
|
||||
if resp['status'] not in status_code:
|
||||
uresp['msg'] = 'Status code was not %s: %s' % (status_code, uresp.get('msg', ''))
|
||||
module.fail_json(content=u_content, **uresp)
|
||||
elif return_content:
|
||||
module.exit_json(changed=changed, content=u_content, **uresp)
|
||||
else:
|
||||
module.exit_json(changed=changed, **uresp)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
Loading…
Add table
Add a link
Reference in a new issue