From 6221a2740f5c3023c817d13e4a564f301ed3bc73 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Fri, 28 Feb 2014 14:17:07 -0600 Subject: [PATCH 01/29] Updating files for new upstream release 1.5.0 --- CHANGELOG.md | 2 +- RELEASES.txt | 2 +- packaging/debian/changelog | 4 ++-- packaging/rpm/ansible.spec | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8a97e55a48..1ff78020e6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,7 +1,7 @@ Ansible Changes By Release ========================== -## 1.5 "Love Walks In" - Release pending! +## 1.5 "Love Walks In" - February 28, 2014 Major features/changes: diff --git a/RELEASES.txt b/RELEASES.txt index 6038f9a764..6335829881 100644 --- a/RELEASES.txt +++ b/RELEASES.txt @@ -2,7 +2,7 @@ Ansible Releases at a Glance ============================ 1.6 "The Cradle Will Rock" - NEXT -1.5 "Love Walks In" -------- PENDING +1.5 "Love Walks In" -------- 02-28-2014 1.4.5 "Could This Be Magic?" - 02-12-2014 1.4.4 "Could This Be Magic?" - 01-06-2014 1.4.3 "Could This Be Magic?" - 12-20-2013 diff --git a/packaging/debian/changelog b/packaging/debian/changelog index a29f156753..c009bebb37 100644 --- a/packaging/debian/changelog +++ b/packaging/debian/changelog @@ -1,8 +1,8 @@ ansible (1.5) unstable; urgency=low - * 1.5 release (PENDING) + * 1.5 release - -- Michael DeHaan Wed, 27 November 2013 15:00:02 -0500 + -- Michael DeHaan Fri, 28 February 2014 -0500 ansible (1.4.5) unstable; urgency=low diff --git a/packaging/rpm/ansible.spec b/packaging/rpm/ansible.spec index 04561b3a5d..c067bbe42e 100644 --- a/packaging/rpm/ansible.spec +++ b/packaging/rpm/ansible.spec @@ -102,8 +102,8 @@ rm -rf %{buildroot} %changelog -* Thu Feb 13 2014 Michael DeHaan - 1.5-0 -* (PENDING) +* Fri Feb 28 2014 Michael DeHaan - 1.5-0 +- Release 1.5.0 * Wed Feb 12 2014 Michael DeHaan - 1.4.5 * Release 1.4.5 From d3b452ecb12262dbe055d0d092b4d4b5fc3d9195 Mon Sep 17 00:00:00 2001 From: jctanner Date: Wed, 5 Mar 2014 12:58:05 -0500 Subject: [PATCH 02/29] Merge pull request #5872 from tomdymond/add-ignoreerror-to-sysctl Add option to enable the sysctl -e option --- library/system/sysctl | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/library/system/sysctl b/library/system/sysctl index 2e3ce767e2..97e5bc5e6c 100644 --- a/library/system/sysctl +++ b/library/system/sysctl @@ -45,6 +45,11 @@ options: - Whether the entry should be present or absent in the sysctl file. choices: [ "present", "absent" ] default: present + ignoreerrors: + description: + - Use this option to ignore errors about unknown keys. + choices: [ "yes", "no" ] + default: no reload: description: - If C(yes), performs a I(/sbin/sysctl -p) if the C(sysctl_file) is @@ -214,8 +219,12 @@ class SysctlModule(object): # freebsd doesn't support -p, so reload the sysctl service rc,out,err = self.module.run_command('/etc/rc.d/sysctl reload') else: - # system supports reloading via the -p flag to sysctl, so we'll use that - rc,out,err = self.module.run_command([self.sysctl_cmd, '-p', self.sysctl_file]) + # system supports reloading via the -p flag to sysctl, so we'll use that + sysctl_args = [self.sysctl_cmd, '-p', self.sysctl_file] + if self.args['ignoreerrors']: + sysctl_args.insert(1, '-e') + + rc,out,err = self.module.run_command(sysctl_args) if rc != 0: self.module.fail_json(msg="Failed to reload sysctl: %s" % str(out) + str(err)) @@ -296,6 +305,7 @@ def main(): state = dict(default='present', choices=['present', 'absent']), reload = dict(default=True, type='bool'), sysctl_set = dict(default=False, type='bool'), + ignoreerrors = dict(default=False, type='bool'), sysctl_file = dict(default='/etc/sysctl.conf') ), supports_check_mode=True From 60055348ba58f1f67e61bdf4f7baca0ff7465007 Mon Sep 17 00:00:00 2001 From: Iordan Iordanov Date: Mon, 24 Feb 2014 16:05:03 -0500 Subject: [PATCH 03/29] Add support for checking host against global known host files. --- lib/ansible/runner/connection_plugins/ssh.py | 63 ++++++++++++-------- 1 file changed, 37 insertions(+), 26 deletions(-) diff --git a/lib/ansible/runner/connection_plugins/ssh.py b/lib/ansible/runner/connection_plugins/ssh.py index c5fab75ce1..22189caadf 100644 --- a/lib/ansible/runner/connection_plugins/ssh.py +++ b/lib/ansible/runner/connection_plugins/ssh.py @@ -118,35 +118,46 @@ class Connection(object): def not_in_host_file(self, host): if 'USER' in os.environ: - host_file = os.path.expandvars("~${USER}/.ssh/known_hosts") + user_host_file = os.path.expandvars("~${USER}/.ssh/known_hosts") else: - host_file = "~/.ssh/known_hosts" - host_file = os.path.expanduser(host_file) - if not os.path.exists(host_file): - print "previous known host file not found" - return True - host_fh = open(host_file) - data = host_fh.read() - host_fh.close() - for line in data.split("\n"): - if line is None or line.find(" ") == -1: + user_host_file = "~/.ssh/known_hosts" + user_host_file = os.path.expanduser(user_host_file) + + host_file_list = [] + host_file_list.append(user_host_file) + host_file_list.append("/etc/ssh/ssh_known_hosts") + host_file_list.append("/etc/ssh/ssh_known_hosts2") + + hfiles_not_found = 0 + for hf in host_file_list: + if not os.path.exists(hf): + hfiles_not_found += 1 continue - tokens = line.split() - if tokens[0].find(self.HASHED_KEY_MAGIC) == 0: - # this is a hashed known host entry - try: - (kn_salt,kn_host) = tokens[0][len(self.HASHED_KEY_MAGIC):].split("|",2) - hash = hmac.new(kn_salt.decode('base64'), digestmod=sha1) - hash.update(host) - if hash.digest() == kn_host.decode('base64'): - return False - except: - # invalid hashed host key, skip it + host_fh = open(hf) + data = host_fh.read() + host_fh.close() + for line in data.split("\n"): + if line is None or line.find(" ") == -1: continue - else: - # standard host file entry - if host in tokens[0]: - return False + tokens = line.split() + if tokens[0].find(self.HASHED_KEY_MAGIC) == 0: + # this is a hashed known host entry + try: + (kn_salt,kn_host) = tokens[0][len(self.HASHED_KEY_MAGIC):].split("|",2) + hash = hmac.new(kn_salt.decode('base64'), digestmod=sha1) + hash.update(host) + if hash.digest() == kn_host.decode('base64'): + return False + except: + # invalid hashed host key, skip it + continue + else: + # standard host file entry + if host in tokens[0]: + return False + + if (hfiles_not_found == len(host_file_list)): + print "previous known host file not found" return True def exec_command(self, cmd, tmp_path, sudo_user=None, sudoable=False, executable='/bin/sh', in_data=None, su_user=None, su=False): From 53788e4c981d612c87637244690267687e7b67b9 Mon Sep 17 00:00:00 2001 From: Pavel Antonov Date: Thu, 27 Feb 2014 00:27:39 +0400 Subject: [PATCH 04/29] Support docker_py >= 0.3.0, Docker API >= 1.8, extended error reporting --- library/cloud/docker_image | 55 +++++++++++++++++++++++++++++--------- 1 file changed, 43 insertions(+), 12 deletions(-) diff --git a/library/cloud/docker_image b/library/cloud/docker_image index 6d910c8bd7..5d1bebaf7a 100644 --- a/library/cloud/docker_image +++ b/library/cloud/docker_image @@ -104,6 +104,8 @@ Remove image from local docker storage: try: import sys + import re + import json import docker.client from requests.exceptions import * from urlparse import urlparse @@ -122,12 +124,33 @@ class DockerImageManager: docker_url = urlparse(module.params.get('docker_url')) self.client = docker.Client(base_url=docker_url.geturl(), timeout=module.params.get('timeout')) self.changed = False + self.log = [] + self.error_msg = None + + def get_log(self, as_string=True): + return "".join(self.log) if as_string else self.log def build(self): - res = self.client.build(self.path, tag=":".join([self.name, self.tag]), nocache=self.nocache, rm=True) + stream = self.client.build(self.path, tag=':'.join([self.name, self.tag]), nocache=self.nocache, rm=True, stream=True) + success_search = r'Successfully built ([0-9a-f]+)' + image_id = None self.changed = True - return res + for chunk in stream: + chunk_json = json.loads(chunk) + + if 'error' in chunk_json: + self.error_msg = chunk_json['error'] + return None + + if 'stream' in chunk_json: + output = chunk_json['stream'] + self.log.append(output) + match = re.search(success_search, output) + if match: + image_id = match.group(1) + + return image_id def has_changed(self): return self.changed @@ -136,7 +159,13 @@ class DockerImageManager: filtered_images = [] images = self.client.images() for i in images: - if (not self.name or self.name == i['Repository']) and (not self.tag or self.tag == i['Tag']): + # Docker-py version >= 0.3 (Docker API >= 1.8) + if 'RepoTags' in i: + repotag = '%s:%s' % (getattr(self, 'name', ''), getattr(self, 'tags', 'latest')) + if not self.name or repotag in i['RepoTags']: + filtered_images.append(i) + # Docker-py version < 0.3 (Docker API < 1.8) + elif (not self.name or self.name == i['Repository']) and (not self.tag or self.tag == i['Tag']): filtered_images.append(i) return filtered_images @@ -170,25 +199,27 @@ def main(): failed = False image_id = None msg = '' + do_build = False # build image if not exists if state == "present": images = manager.get_images() if len(images) == 0: - image_id, msg = manager.build() - if image_id is None: - failed = True - - + do_build = True + # build image + elif state == "build": + do_build = True # remove image or images elif state == "absent": manager.remove_images() - # build image - elif state == "build": - image_id, msg = manager.build() - if image_id is None: + if do_build: + image_id = manager.build() + if image_id: + msg = "Image builded: %s" % image_id + else: failed = True + msg = "Error: %s\nLog:%s" % (manager.error_msg, manager.get_log()) module.exit_json(failed=failed, changed=manager.has_changed(), msg=msg, image_id=image_id) From c85081e9a9b12e3b438ad78d41a8e0505d8d2571 Mon Sep 17 00:00:00 2001 From: Serge van Ginderachter Date: Thu, 27 Feb 2014 18:06:34 +0100 Subject: [PATCH 05/29] Fixes templating of ansible_ssh_host for delegates --- lib/ansible/runner/__init__.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/lib/ansible/runner/__init__.py b/lib/ansible/runner/__init__.py index 207862857a..800774cfd1 100644 --- a/lib/ansible/runner/__init__.py +++ b/lib/ansible/runner/__init__.py @@ -308,7 +308,7 @@ class Runner(object): delegate = {} - # allow ansible_ssh_host to be templated + # allow delegated host to be templated delegate['host'] = template.template(self.basedir, host, remote_inject, fail_on_undefined=True) @@ -333,7 +333,10 @@ class Runner(object): this_info = {} # get the real ssh_address for the delegate - delegate['ssh_host'] = this_info.get('ansible_ssh_host', delegate['host']) + # and allow ansible_ssh_host to be templated + delegate['ssh_host'] = template.template(self.basedir, + this_info.get('ansible_ssh_host', this_host), + this_info, fail_on_undefined=True) delegate['port'] = this_info.get('ansible_ssh_port', port) From fbf500ba1f43837d88b0d52bce2bcaacd026d48a Mon Sep 17 00:00:00 2001 From: Tefnet Date: Sun, 2 Mar 2014 00:59:17 +0100 Subject: [PATCH 06/29] missing import in assert module Fixed missing ansible.errors import in assert module --- lib/ansible/runner/action_plugins/assert.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/runner/action_plugins/assert.py b/lib/ansible/runner/action_plugins/assert.py index fbd0b3888a..e217bdb4aa 100644 --- a/lib/ansible/runner/action_plugins/assert.py +++ b/lib/ansible/runner/action_plugins/assert.py @@ -17,7 +17,7 @@ import ansible -from ansible import utils +from ansible import utils, errors from ansible.runner.return_data import ReturnData class ActionModule(object): From 14b4cb60d69b213468f5b8705a7fe4c4a8b072b4 Mon Sep 17 00:00:00 2001 From: Francesc Esplugas Date: Sun, 2 Mar 2014 12:41:07 +0100 Subject: [PATCH 07/29] get rid of newline chars when reading password file --- bin/ansible-vault | 2 ++ 1 file changed, 2 insertions(+) diff --git a/bin/ansible-vault b/bin/ansible-vault index 75250b5e81..902653d40b 100755 --- a/bin/ansible-vault +++ b/bin/ansible-vault @@ -105,6 +105,8 @@ def _read_password(filename): f = open(filename, "rb") data = f.read() f.close + # get rid of newline chars + data = data.strip() return data def execute_create(args, options, parser): From b06f3e5dd68a049c44ce598b89bc4e3f4fb9459a Mon Sep 17 00:00:00 2001 From: Hagai Date: Mon, 3 Mar 2014 14:47:57 +0200 Subject: [PATCH 08/29] Fix incorrect use of copy on list --- lib/ansible/playbook/play.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/playbook/play.py b/lib/ansible/playbook/play.py index 5ea31b526c..e9f00e4702 100644 --- a/lib/ansible/playbook/play.py +++ b/lib/ansible/playbook/play.py @@ -237,7 +237,7 @@ class Play(object): if "tags" in included_dep_vars: included_dep_vars["tags"] = list(set(included_dep_vars["tags"] + passed_vars["tags"])) else: - included_dep_vars["tags"] = passed_vars["tags"].copy() + included_dep_vars["tags"] = passed_vars["tags"][:] dep_vars = utils.combine_vars(passed_vars, dep_vars) dep_vars = utils.combine_vars(role_vars, dep_vars) From 5341040c0580007f3bf73007105a95555c8456e0 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Mon, 3 Mar 2014 10:12:03 -0600 Subject: [PATCH 09/29] Adding a wait loop to ec2_elb for the initial lb state when registering Fixes #5305 --- library/cloud/ec2_elb | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/library/cloud/ec2_elb b/library/cloud/ec2_elb index ebd90aeda8..c6c61fd199 100644 --- a/library/cloud/ec2_elb +++ b/library/cloud/ec2_elb @@ -157,7 +157,17 @@ class ElbManager: to report the instance in-service""" for lb in self.lbs: if wait: - initial_state = self._get_instance_health(lb) + tries = 1 + while True: + initial_state = self._get_instance_health(lb) + if initial_state: + break + time.sleep(1) + tries += 1 + # FIXME: this should be configurable, but since it didn't + # wait at all before this is at least better + if tries > 10: + self.module.fail_json(msg='failed to find the initial state of the load balancer') if enable_availability_zone: self._enable_availailability_zone(lb) From b14932465d768e634e500961434e6f05a033f2c8 Mon Sep 17 00:00:00 2001 From: Jesse Keating Date: Mon, 3 Mar 2014 13:23:27 -0800 Subject: [PATCH 10/29] Avoid range selection on empty groups This prevents a traceback when the group is empty. Fixes #6258 --- lib/ansible/inventory/__init__.py | 6 ++++++ test/units/TestInventory.py | 5 +++++ 2 files changed, 11 insertions(+) diff --git a/lib/ansible/inventory/__init__.py b/lib/ansible/inventory/__init__.py index 67117919d0..8f74d5ea9e 100644 --- a/lib/ansible/inventory/__init__.py +++ b/lib/ansible/inventory/__init__.py @@ -227,6 +227,12 @@ class Inventory(object): given a pattern like foo[0:5], where foo matches hosts, return the first 6 hosts """ + # If there are no hosts to select from, just return the + # empty set. This prevents trying to do selections on an empty set. + # issue#6258 + if not hosts: + return hosts + (loose_pattern, limits) = self._enumeration_info(pat) if not limits: return hosts diff --git a/test/units/TestInventory.py b/test/units/TestInventory.py index 2554d43204..2ae6256e62 100644 --- a/test/units/TestInventory.py +++ b/test/units/TestInventory.py @@ -212,6 +212,11 @@ class TestInventory(unittest.TestCase): inventory.subset('greek[0-2];norse[0]') self.assertEqual(sorted(inventory.list_hosts()), sorted(['zeus','hera','thor'])) + def test_subet_range_empty_group(self): + inventory = self.simple_inventory() + inventory.subset('missing[0]') + self.assertEqual(sorted(inventory.list_hosts()), sorted([])) + def test_subset_filename(self): inventory = self.simple_inventory() inventory.subset('@' + os.path.join(self.test_dir, 'restrict_pattern')) From dd3fa2aebeba08f20d090b8ce61166e6aa214257 Mon Sep 17 00:00:00 2001 From: Jim Kytola Date: Tue, 4 Mar 2014 10:07:32 -0500 Subject: [PATCH 11/29] Removes pluralization of manager tag attribute. --- library/cloud/docker_image | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/cloud/docker_image b/library/cloud/docker_image index 5d1bebaf7a..5fcdfad573 100644 --- a/library/cloud/docker_image +++ b/library/cloud/docker_image @@ -161,7 +161,7 @@ class DockerImageManager: for i in images: # Docker-py version >= 0.3 (Docker API >= 1.8) if 'RepoTags' in i: - repotag = '%s:%s' % (getattr(self, 'name', ''), getattr(self, 'tags', 'latest')) + repotag = '%s:%s' % (getattr(self, 'name', ''), getattr(self, 'tag', 'latest')) if not self.name or repotag in i['RepoTags']: filtered_images.append(i) # Docker-py version < 0.3 (Docker API < 1.8) From f7c4668a4d95204540f642140cb90167308f7e45 Mon Sep 17 00:00:00 2001 From: Michael Scherer Date: Tue, 4 Mar 2014 17:30:15 +0100 Subject: [PATCH 12/29] Fix wrong module name for exception in nova compute failed: [127.0.0.1] => {"failed": true, "parsed": false} invalid output was: Traceback (most recent call last): File "/tmp/ansible-tmp-1393950384.39-102240090845592/nova_compute", line 1328, in main() File "/tmp/ansible-tmp-1393950384.39-102240090845592/nova_compute", line 241, in main except exc.Unauthorized, e: NameError: global name 'exc' is not defined --- library/cloud/nova_compute | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/library/cloud/nova_compute b/library/cloud/nova_compute index af69322933..d0bc79b1a2 100644 --- a/library/cloud/nova_compute +++ b/library/cloud/nova_compute @@ -238,9 +238,9 @@ def main(): service_type='compute') try: nova.authenticate() - except exc.Unauthorized, e: + except exceptions.Unauthorized, e: module.fail_json(msg = "Invalid OpenStack Nova credentials.: %s" % e.message) - except exc.AuthorizationFailure, e: + except exceptions.AuthorizationFailure, e: module.fail_json(msg = "Unable to authorize user: %s" % e.message) if module.params['state'] == 'present': From 9de3b035a6f29e6be7f41273f3a9bdb5dcf4b652 Mon Sep 17 00:00:00 2001 From: anatoly techtonik Date: Tue, 4 Mar 2014 20:00:18 +0200 Subject: [PATCH 13/29] setup: Fix KeyError: 'ipv4_secondaries' (issue #6274) --- library/system/setup | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/library/system/setup b/library/system/setup index 1c51e52162..941a5dcd31 100644 --- a/library/system/setup +++ b/library/system/setup @@ -1562,13 +1562,13 @@ class LinuxNetwork(Network): iface = words[-1] if iface != device: interfaces[iface] = {} - if not secondary and "ipv4_secondaries" not in interfaces[iface]: - interfaces[iface]["ipv4_secondaries"] = [] if not secondary or "ipv4" not in interfaces[iface]: interfaces[iface]['ipv4'] = {'address': address, 'netmask': netmask, 'network': network} else: + if "ipv4_secondaries" not in interfaces[iface]: + interfaces[iface]["ipv4_secondaries"] = [] interfaces[iface]["ipv4_secondaries"].append({ 'address': address, 'netmask': netmask, @@ -1577,6 +1577,8 @@ class LinuxNetwork(Network): # add this secondary IP to the main device if secondary: + if "ipv4_secondaries" not in interfaces[device]: + interfaces[device]["ipv4_secondaries"] = [] interfaces[device]["ipv4_secondaries"].append({ 'address': address, 'netmask': netmask, From 118d24d171f1615fa9906a26ccf05cd1996755c8 Mon Sep 17 00:00:00 2001 From: Luca Berruti Date: Tue, 4 Mar 2014 18:20:01 +0100 Subject: [PATCH 14/29] fixes #6244 --- lib/ansible/runner/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ansible/runner/__init__.py b/lib/ansible/runner/__init__.py index 800774cfd1..a809a4aa7e 100644 --- a/lib/ansible/runner/__init__.py +++ b/lib/ansible/runner/__init__.py @@ -633,13 +633,13 @@ class Runner(object): all_failed = False results = [] for x in items: - # use a fresh inject for each item + # use a fresh inject for each item this_inject = inject.copy() this_inject['item'] = x # TODO: this idiom should be replaced with an up-conversion to a Jinja2 template evaluation if isinstance(self.complex_args, basestring): - complex_args = template.template(self.basedir, self.complex_args, inject, convert_bare=True) + complex_args = template.template(self.basedir, self.complex_args, this_inject, convert_bare=True) complex_args = utils.safe_eval(complex_args) if type(complex_args) != dict: raise errors.AnsibleError("args must be a dictionary, received %s" % complex_args) From 76037168b10f7e9ac09a084cbc16a19fd1a1c8c1 Mon Sep 17 00:00:00 2001 From: James Tanner Date: Wed, 5 Mar 2014 14:51:40 -0500 Subject: [PATCH 15/29] Fixes #6298 and adds a sudo unit test for synchronize --- .../runner/action_plugins/synchronize.py | 9 ++++-- test/units/TestSynchronize.py | 30 ++++++++++++++++++- 2 files changed, 36 insertions(+), 3 deletions(-) diff --git a/lib/ansible/runner/action_plugins/synchronize.py b/lib/ansible/runner/action_plugins/synchronize.py index d7c9113f28..c66fcdff3c 100644 --- a/lib/ansible/runner/action_plugins/synchronize.py +++ b/lib/ansible/runner/action_plugins/synchronize.py @@ -173,6 +173,11 @@ class ActionModule(object): if self.runner.noop_on_check(inject): module_items += " CHECKMODE=True" - return self.runner._execute_module(conn, tmp, 'synchronize', - module_items, inject=inject) + # run the module and store the result + result = self.runner._execute_module(conn, tmp, 'synchronize', module_items, inject=inject) + + # reset the sudo property + self.runner.sudo = self.original_sudo + + return result diff --git a/test/units/TestSynchronize.py b/test/units/TestSynchronize.py index 7965f2295e..dfb1a129e5 100644 --- a/test/units/TestSynchronize.py +++ b/test/units/TestSynchronize.py @@ -61,7 +61,35 @@ class TestSynchronize(unittest.TestCase): assert runner.executed_inject['delegate_to'] == "127.0.0.1", "was not delegated to 127.0.0.1" assert runner.executed_args == "dest=root@el6.lab.net:/tmp/bar src=/tmp/foo", "wrong args used" - assert runner.sudo == False, "sudo not set to false" + assert runner.sudo == None, "sudo was not reset to None" + + def test_synchronize_action_sudo(self): + + """ verify the synchronize action plugin unsets and then sets sudo """ + + runner = FakeRunner() + runner.sudo = True + runner.remote_user = "root" + runner.transport = "ssh" + conn = FakeConn() + inject = { + 'inventory_hostname': "el6.lab.net", + 'inventory_hostname_short': "el6", + 'ansible_connection': None, + 'ansible_ssh_user': 'root', + 'delegate_to': None, + 'playbook_dir': '.', + } + + x = Synchronize(runner) + x.setup("synchronize", inject) + x.run(conn, "/tmp", "synchronize", "src=/tmp/foo dest=/tmp/bar", inject) + + assert runner.executed_inject['delegate_to'] == "127.0.0.1", "was not delegated to 127.0.0.1" + assert runner.executed_args == 'dest=root@el6.lab.net:/tmp/bar src=/tmp/foo rsync_path="sudo rsync"', \ + "wrong args used: %s" % runner.executed_args + assert runner.sudo == True, "sudo was not reset to True" + def test_synchronize_action_local(self): From 9ba1245a84370957b0cc1c350725a2e726b3d03a Mon Sep 17 00:00:00 2001 From: James Tanner Date: Wed, 5 Mar 2014 18:49:54 -0500 Subject: [PATCH 16/29] Fixes #6077 decode escaped newline characters in content for the copy module --- lib/ansible/runner/action_plugins/copy.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/lib/ansible/runner/action_plugins/copy.py b/lib/ansible/runner/action_plugins/copy.py index 0ee9b6f3ce..79acdaba58 100644 --- a/lib/ansible/runner/action_plugins/copy.py +++ b/lib/ansible/runner/action_plugins/copy.py @@ -54,6 +54,12 @@ class ActionModule(object): raw = utils.boolean(options.get('raw', 'no')) force = utils.boolean(options.get('force', 'yes')) + # content with newlines is going to be escaped to safely load in yaml + # now we need to unescape it so that the newlines are evaluated properly + # when writing the file to disk + if content: + content = content.decode('unicode-escape') + if (source is None and content is None and not 'first_available_file' in inject) or dest is None: result=dict(failed=True, msg="src (or content) and dest are required") return ReturnData(conn=conn, result=result) From 52e809fcb75f181a5e9523bd766c2cabebd69590 Mon Sep 17 00:00:00 2001 From: aresch Date: Wed, 5 Mar 2014 16:25:42 -0800 Subject: [PATCH 17/29] Fix respecting remote_tmp when sudo is used --- lib/ansible/runner/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ansible/runner/__init__.py b/lib/ansible/runner/__init__.py index a809a4aa7e..2a117ef1c7 100644 --- a/lib/ansible/runner/__init__.py +++ b/lib/ansible/runner/__init__.py @@ -1001,11 +1001,11 @@ class Runner(object): basefile = 'ansible-tmp-%s-%s' % (time.time(), random.randint(0, 2**48)) basetmp = os.path.join(C.DEFAULT_REMOTE_TMP, basefile) - if (self.sudo or self.su) and (self.sudo_user != 'root' or self.su != 'root') and basetmp.startswith('$HOME'): + if (self.sudo or self.su) and (self.sudo_user != 'root' or self.su_user != 'root') and basetmp.startswith('$HOME'): basetmp = os.path.join('/tmp', basefile) cmd = 'mkdir -p %s' % basetmp - if self.remote_user != 'root' or ((self.sudo or self.su) and (self.sudo_user != 'root' or self.su != 'root')): + if self.remote_user != 'root' or ((self.sudo or self.su) and (self.sudo_user != 'root' or self.su_user != 'root')): cmd += ' && chmod a+rx %s' % basetmp cmd += ' && echo %s' % basetmp From e215f564c5e9370431a8a665fecc56b34e6545fd Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Wed, 5 Mar 2014 22:06:59 -0600 Subject: [PATCH 18/29] Create the tempdir in the accelerate module if it doesn't exist Fixes #6047 --- library/utilities/accelerate | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/library/utilities/accelerate b/library/utilities/accelerate index a6e84e3237..6508f1433e 100644 --- a/library/utilities/accelerate +++ b/library/utilities/accelerate @@ -391,7 +391,13 @@ class ThreadedTCPRequestHandler(SocketServer.BaseRequestHandler): final_path = None if 'user' in data and data.get('user') != getpass.getuser(): vv("the target user doesn't match this user, we'll move the file into place via sudo") - (fd,out_path) = tempfile.mkstemp(prefix='ansible.', dir=os.path.expanduser('~/.ansible/tmp/')) + tmp_path = os.path.expanduser('~/.ansible/tmp/') + if not os.path.exists(tmp_path): + try: + os.makedirs(tmp_path, 0700) + except: + return dict(failed=True, msg='could not create a temporary directory at %s' % tmp_path) + (fd,out_path) = tempfile.mkstemp(prefix='ansible.', dir=tmp_path) out_fd = os.fdopen(fd, 'w', 0) final_path = data['out_path'] else: From fe07ebc8010b5e701d573857aeedd5ab5e4cc165 Mon Sep 17 00:00:00 2001 From: amree Date: Thu, 6 Mar 2014 12:19:54 +0800 Subject: [PATCH 19/29] MASTER_PORT variable for CHANGE MASTER TO command can only accept integer value --- library/database/mysql_replication | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/database/mysql_replication b/library/database/mysql_replication index f18060e955..fdbb379371 100644 --- a/library/database/mysql_replication +++ b/library/database/mysql_replication @@ -325,7 +325,7 @@ def main(): if master_password: chm.append("MASTER_PASSWORD='" + master_password + "'") if master_port: - chm.append("MASTER_PORT='" + master_port + "'") + chm.append("MASTER_PORT=" + master_port) if master_connect_retry: chm.append("MASTER_CONNECT_RETRY='" + master_connect_retry + "'") if master_log_file: From 8e7a384fcccc1c249075b90043939e4b89ed86a6 Mon Sep 17 00:00:00 2001 From: Brian Coca Date: Thu, 6 Mar 2014 08:28:36 -0500 Subject: [PATCH 20/29] tags lists are properly uniqued and joined now, also avoids type issues when passed as list/set or strings Signed-off-by: Brian Coca --- lib/ansible/playbook/play.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/playbook/play.py b/lib/ansible/playbook/play.py index e9f00e4702..78a1d0b08b 100644 --- a/lib/ansible/playbook/play.py +++ b/lib/ansible/playbook/play.py @@ -235,7 +235,7 @@ class Play(object): included_dep_vars = included_role_dep[2] if included_dep_name == dep: if "tags" in included_dep_vars: - included_dep_vars["tags"] = list(set(included_dep_vars["tags"] + passed_vars["tags"])) + included_dep_vars["tags"] = list(set(included_dep_vars["tags"]).union(set(passed_vars["tags"]))) else: included_dep_vars["tags"] = passed_vars["tags"][:] From 27d52fd9cc74ac0a8aac18e13d85563557479ea0 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Thu, 6 Mar 2014 09:44:56 -0600 Subject: [PATCH 21/29] Un-escape newlines in delimiters for assemble module --- lib/ansible/runner/action_plugins/assemble.py | 6 ++++++ library/files/assemble | 2 ++ 2 files changed, 8 insertions(+) diff --git a/lib/ansible/runner/action_plugins/assemble.py b/lib/ansible/runner/action_plugins/assemble.py index eb6faf5dfc..c73964cda6 100644 --- a/lib/ansible/runner/action_plugins/assemble.py +++ b/lib/ansible/runner/action_plugins/assemble.py @@ -39,7 +39,13 @@ class ActionModule(object): for f in sorted(os.listdir(src_path)): fragment = "%s/%s" % (src_path, f) if delimit_me and delimiter: + # en-escape things like new-lines + delimiter = delimiter.decode('unicode-escape') tmp.write(delimiter) + # always make sure there's a newline after the + # delimiter, so lines don't run together + if delimiter[-1] != '\n': + tmp.write('\n') if os.path.isfile(fragment): tmp.write(file(fragment).read()) delimit_me = True diff --git a/library/files/assemble b/library/files/assemble index a8c78256e2..f4a60caf23 100644 --- a/library/files/assemble +++ b/library/files/assemble @@ -107,6 +107,8 @@ def assemble_from_fragments(src_path, delimiter=None, compiled_regexp=None): continue fragment = "%s/%s" % (src_path, f) if delimit_me and delimiter: + # un-escape anything like newlines + delimiter = delimiter.decode('unicode-escape') tmp.write(delimiter) # always make sure there's a newline after the # delimiter, so lines don't run together From 3fc8a83e7986b5da205cedb87d3b1ca948e3db12 Mon Sep 17 00:00:00 2001 From: Andrew Resch Date: Thu, 6 Mar 2014 10:24:16 -0800 Subject: [PATCH 22/29] Fix logic checking for both sudo and su, and their respective users --- lib/ansible/runner/__init__.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/lib/ansible/runner/__init__.py b/lib/ansible/runner/__init__.py index 2a117ef1c7..f9b7d0a304 100644 --- a/lib/ansible/runner/__init__.py +++ b/lib/ansible/runner/__init__.py @@ -420,7 +420,7 @@ class Runner(object): environment_string = self._compute_environment_string(inject) - if tmp.find("tmp") != -1 and (self.sudo or self.su) and (self.sudo_user != 'root' or self.su_user != 'root'): + if tmp.find("tmp") != -1 and (self.sudo and self.sudo_user != 'root') or (self.su and self.su_user != 'root'): # deal with possible umask issues once sudo'ed to other user cmd_chmod = "chmod a+r %s" % remote_module_path self._low_level_exec_command(conn, cmd_chmod, tmp, sudoable=False) @@ -449,7 +449,7 @@ class Runner(object): else: argsfile = self._transfer_str(conn, tmp, 'arguments', args) - if (self.sudo or self.su) and (self.sudo_user != 'root' or self.su_user != 'root'): + if (self.sudo and self.sudo_user != 'root') or (self.su and self.su_user != 'root'): # deal with possible umask issues once sudo'ed to other user cmd_args_chmod = "chmod a+r %s" % argsfile self._low_level_exec_command(conn, cmd_args_chmod, tmp, sudoable=False) @@ -491,7 +491,7 @@ class Runner(object): res = self._low_level_exec_command(conn, cmd, tmp, sudoable=sudoable, in_data=in_data) if tmp.find("tmp") != -1 and not C.DEFAULT_KEEP_REMOTE_FILES and not persist_files and delete_remote_tmp: - if (self.sudo or self.su) and (self.sudo_user != 'root' or self.su_user != 'root'): + if (self.sudo and self.sudo_user != 'root') or (self.su and self.su_user != 'root'): # not sudoing to root, so maybe can't delete files as that other user # have to clean up temp files as original user in a second step cmd2 = "rm -rf %s >/dev/null 2>&1" % tmp @@ -1001,11 +1001,11 @@ class Runner(object): basefile = 'ansible-tmp-%s-%s' % (time.time(), random.randint(0, 2**48)) basetmp = os.path.join(C.DEFAULT_REMOTE_TMP, basefile) - if (self.sudo or self.su) and (self.sudo_user != 'root' or self.su_user != 'root') and basetmp.startswith('$HOME'): + if (self.sudo and self.sudo_user != 'root') or (self.su and self.su_user != 'root') and basetmp.startswith('$HOME'): basetmp = os.path.join('/tmp', basefile) cmd = 'mkdir -p %s' % basetmp - if self.remote_user != 'root' or ((self.sudo or self.su) and (self.sudo_user != 'root' or self.su_user != 'root')): + if self.remote_user != 'root' or ((self.sudo and self.sudo_user != 'root') or (self.su and self.su_user != 'root')): cmd += ' && chmod a+rx %s' % basetmp cmd += ' && echo %s' % basetmp From ae1e9a3ec1cbe5a150097b92b2d6345ce06a9809 Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Fri, 7 Mar 2014 00:07:10 -0600 Subject: [PATCH 23/29] Properly wrap logical elements together for su/sudo detection --- lib/ansible/runner/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/runner/__init__.py b/lib/ansible/runner/__init__.py index f9b7d0a304..1e168e6ebb 100644 --- a/lib/ansible/runner/__init__.py +++ b/lib/ansible/runner/__init__.py @@ -420,7 +420,7 @@ class Runner(object): environment_string = self._compute_environment_string(inject) - if tmp.find("tmp") != -1 and (self.sudo and self.sudo_user != 'root') or (self.su and self.su_user != 'root'): + if tmp.find("tmp") != -1 and ((self.sudo and self.sudo_user != 'root') or (self.su and self.su_user != 'root')): # deal with possible umask issues once sudo'ed to other user cmd_chmod = "chmod a+r %s" % remote_module_path self._low_level_exec_command(conn, cmd_chmod, tmp, sudoable=False) From c920f78cc3439aa9db0dfb0dd6e72815b73e6afd Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Fri, 7 Mar 2014 16:34:04 -0600 Subject: [PATCH 24/29] Fix range issue in inventory and add additional error checking Fixes #6331 --- lib/ansible/inventory/__init__.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/lib/ansible/inventory/__init__.py b/lib/ansible/inventory/__init__.py index 8f74d5ea9e..171a4f2a04 100644 --- a/lib/ansible/inventory/__init__.py +++ b/lib/ansible/inventory/__init__.py @@ -208,12 +208,14 @@ class Inventory(object): """ # The regex used to match on the range, which can be [x] or [x-y]. - pattern_re = re.compile("^(.*)\[([0-9]+)(?:(?:-)([0-9]+))?\](.*)$") + pattern_re = re.compile("^(.*)\[([-]?[0-9]+)(?:(?:-)([0-9]+))?\](.*)$") m = pattern_re.match(pattern) if m: (target, first, last, rest) = m.groups() first = int(first) if last: + if first < 0: + raise errors.AnsibleError("invalid range: negative indices cannot be used as the first item in a range") last = int(last) else: last = first @@ -245,10 +247,13 @@ class Inventory(object): right = 0 left=int(left) right=int(right) - if left != right: - return hosts[left:right] - else: - return [ hosts[left] ] + try: + if left != right: + return hosts[left:right] + else: + return [ hosts[left] ] + except IndexError: + raise errors.AnsibleError("no hosts matching the pattern '%s' were found" % pat) def _create_implicit_localhost(self, pattern): new_host = Host(pattern) From 5bc6eafba506fafd47a10c869e3ebf3a757b014c Mon Sep 17 00:00:00 2001 From: James Cammarata Date: Mon, 10 Mar 2014 16:06:52 -0500 Subject: [PATCH 25/29] Validate SSL certs accessed through urllib* * Adds another module utility file which generalizes the access of urls via the urllib* libraries. * Adds a new spec generator for common arguments. * Makes the user-agent string configurable. Fixes #6211 --- examples/ansible.cfg | 14 ++ lib/ansible/constants.py | 4 + lib/ansible/module_utils/basic.py | 10 +- lib/ansible/module_utils/ec2.py | 28 +++ lib/ansible/module_utils/known_hosts.py | 28 +++ lib/ansible/module_utils/rax.py | 29 ++- lib/ansible/module_utils/urls.py | 262 ++++++++++++++++++++++++ library/cloud/ec2_facts | 24 +-- library/database/riak | 21 +- library/monitoring/airbrake_deployment | 41 ++-- library/monitoring/boundary_meter | 61 ++---- library/monitoring/datadog_event | 18 +- library/monitoring/newrelic_deployment | 48 +---- library/monitoring/pagerduty | 35 ++-- library/net_infrastructure/dnsmadeeasy | 32 +-- library/net_infrastructure/netscaler | 40 ++-- library/network/get_url | 116 ++--------- library/notification/flowdock | 31 +-- library/notification/grove | 7 +- library/notification/hipchat | 35 +--- library/packaging/apt_key | 20 +- library/packaging/rpm_key | 17 +- library/source_control/github_hooks | 79 +++---- 23 files changed, 598 insertions(+), 402 deletions(-) create mode 100644 lib/ansible/module_utils/urls.py diff --git a/examples/ansible.cfg b/examples/ansible.cfg index f543b2e4bc..396974bf01 100644 --- a/examples/ansible.cfg +++ b/examples/ansible.cfg @@ -103,6 +103,20 @@ filter_plugins = /usr/share/ansible_plugins/filter_plugins # set to 1 if you don't want colors, or export ANSIBLE_NOCOLOR=1 #nocolor = 1 +# the CA certificate path used for validating SSL certs. This path +# should exist on the controlling node, not the target nodes +# common locations: +# RHEL/CentOS: /etc/pki/tls/certs/ca-bundle.crt +# Fedora : /etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem +# Ubuntu : /usr/share/ca-certificates/cacert.org/cacert.org.crt +#ca_file_path = + +# the http user-agent string to use when fetching urls. Some web server +# operators block the default urllib user agent as it is frequently used +# by malicious attacks/scripts, so we set it to something unique to +# avoid issues. +#http_user_agent = ansible-agent + [paramiko_connection] # uncomment this line to cause the paramiko connection plugin to not record new host diff --git a/lib/ansible/constants.py b/lib/ansible/constants.py index 431e6eb742..ed996f3bef 100644 --- a/lib/ansible/constants.py +++ b/lib/ansible/constants.py @@ -144,6 +144,10 @@ DEFAULT_VARS_PLUGIN_PATH = get_config(p, DEFAULTS, 'vars_plugins', ' DEFAULT_FILTER_PLUGIN_PATH = get_config(p, DEFAULTS, 'filter_plugins', 'ANSIBLE_FILTER_PLUGINS', '/usr/share/ansible_plugins/filter_plugins') DEFAULT_LOG_PATH = shell_expand_path(get_config(p, DEFAULTS, 'log_path', 'ANSIBLE_LOG_PATH', '')) +# URL Arguments for generic module urllib2 use +DEFAULT_HTTP_USER_AGENT = get_config(p, DEFAULTS, 'http_user_agent', 'ANSIBLE_HTTP_USER_AGENT', 'ansible-agent') +DEFAULT_CA_FILE_PATH = shell_expand_path(get_config(p, DEFAULTS, 'ca_file_path', 'ANSIBLE_CA_FILE_PATH', '')) + ANSIBLE_NOCOLOR = get_config(p, DEFAULTS, 'nocolor', 'ANSIBLE_NOCOLOR', None, boolean=True) ANSIBLE_NOCOWS = get_config(p, DEFAULTS, 'nocows', 'ANSIBLE_NOCOWS', None, boolean=True) DISPLAY_SKIPPED_HOSTS = get_config(p, DEFAULTS, 'display_skipped_hosts', 'DISPLAY_SKIPPED_HOSTS', True, boolean=True) diff --git a/lib/ansible/module_utils/basic.py b/lib/ansible/module_utils/basic.py index c2be621d4b..fd0b2edfc3 100644 --- a/lib/ansible/module_utils/basic.py +++ b/lib/ansible/module_utils/basic.py @@ -59,6 +59,7 @@ import grp import pwd import platform import errno +import tempfile try: import json @@ -114,6 +115,7 @@ FILE_COMMON_ARGUMENTS=dict( remote_src = dict(), # used by assemble ) + def get_platform(): ''' what's the platform? example: Linux is a platform. ''' return platform.system() @@ -188,7 +190,7 @@ class AnsibleModule(object): os.environ['LANG'] = MODULE_LANG (self.params, self.args) = self._load_params() - self._legal_inputs = [ 'CHECKMODE', 'NO_LOG' ] + self._legal_inputs = ['CHECKMODE', 'NO_LOG'] self.aliases = self._handle_aliases() @@ -571,8 +573,9 @@ class AnsibleModule(object): def _check_invalid_arguments(self): for (k,v) in self.params.iteritems(): - if k in ('CHECKMODE', 'NO_LOG'): - continue + # these should be in legal inputs already + #if k in ('CHECKMODE', 'NO_LOG'): + # continue if k not in self._legal_inputs: self.fail_json(msg="unsupported parameter for module: %s" % k) @@ -1068,4 +1071,3 @@ class AnsibleModule(object): break return '%.2f %s' % (float(size)/ limit, suffix) - diff --git a/lib/ansible/module_utils/ec2.py b/lib/ansible/module_utils/ec2.py index 9156df766b..58291c2d5d 100644 --- a/lib/ansible/module_utils/ec2.py +++ b/lib/ansible/module_utils/ec2.py @@ -1,3 +1,31 @@ +# This code is part of Ansible, but is an independent component. +# This particular file snippet, and this file snippet only, is BSD licensed. +# Modules you write using this snippet, which is embedded dynamically by Ansible +# still belong to the author of the module, and may assign their own license +# to the complete work. +# +# Copyright (c), Michael DeHaan , 2012-2013 +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without modification, +# are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + try: from distutils.version import LooseVersion HAS_LOOSE_VERSION = True diff --git a/lib/ansible/module_utils/known_hosts.py b/lib/ansible/module_utils/known_hosts.py index 000db9d1e6..36f5b87fff 100644 --- a/lib/ansible/module_utils/known_hosts.py +++ b/lib/ansible/module_utils/known_hosts.py @@ -1,3 +1,31 @@ +# This code is part of Ansible, but is an independent component. +# This particular file snippet, and this file snippet only, is BSD licensed. +# Modules you write using this snippet, which is embedded dynamically by Ansible +# still belong to the author of the module, and may assign their own license +# to the complete work. +# +# Copyright (c), Michael DeHaan , 2012-2013 +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without modification, +# are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + def add_git_host_key(module, url, accept_hostkey=True): """ idempotently add a git url hostkey """ diff --git a/lib/ansible/module_utils/rax.py b/lib/ansible/module_utils/rax.py index 84e5686d24..98623c7d38 100644 --- a/lib/ansible/module_utils/rax.py +++ b/lib/ansible/module_utils/rax.py @@ -1,5 +1,32 @@ -import os +# This code is part of Ansible, but is an independent component. +# This particular file snippet, and this file snippet only, is BSD licensed. +# Modules you write using this snippet, which is embedded dynamically by Ansible +# still belong to the author of the module, and may assign their own license +# to the complete work. +# +# Copyright (c), Michael DeHaan , 2012-2013 +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without modification, +# are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +import os def rax_argument_spec(): return dict( diff --git a/lib/ansible/module_utils/urls.py b/lib/ansible/module_utils/urls.py new file mode 100644 index 0000000000..f251c6b407 --- /dev/null +++ b/lib/ansible/module_utils/urls.py @@ -0,0 +1,262 @@ +# This code is part of Ansible, but is an independent component. +# This particular file snippet, and this file snippet only, is BSD licensed. +# Modules you write using this snippet, which is embedded dynamically by Ansible +# still belong to the author of the module, and may assign their own license +# to the complete work. +# +# Copyright (c), Michael DeHaan , 2012-2013 +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without modification, +# are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +try: + import urllib + HAS_URLLIB = True +except: + HAS_URLLIB = False + +try: + import urllib2 + HAS_URLLIB2 = True +except: + HAS_URLLIB2 = False + +try: + import urlparse + HAS_URLPARSE = True +except: + HAS_URLPARSE = False + +try: + import ssl + HAS_SSL=True +except: + HAS_SSL=False + + +class RequestWithMethod(urllib2.Request): + ''' + Workaround for using DELETE/PUT/etc with urllib2 + Originally contained in library/net_infrastructure/dnsmadeeasy + ''' + + def __init__(self, url, method, data=None, headers={}): + self._method = method + urllib2.Request.__init__(self, url, data, headers) + + def get_method(self): + if self._method: + return self._method + else: + return urllib2.Request.get_method(self) + + +class SSLValidationHandler(urllib2.BaseHandler): + ''' + A custom handler class for SSL validation. + + Based on: + http://stackoverflow.com/questions/1087227/validate-ssl-certificates-with-python + http://techknack.net/python-urllib2-handlers/ + ''' + + def __init__(self, module, hostname, port, ca_cert=None): + self.module = module + self.hostname = hostname + self.port = port + self.ca_cert = ca_cert + + def get_ca_cert(self): + # tries to find a valid CA cert in one of the + # standard locations for the current distribution + + if self.ca_cert and os.path.exists(self.ca_cert): + # the user provided a custom CA cert (ie. one they + # uploaded themselves), so use it + return self.ca_cert + + ca_cert = None + platform = get_platform() + distribution = get_distribution() + if platform == 'Linux': + if distribution in ('Fedora',): + ca_cert = '/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem' + elif distribution in ('RHEL','CentOS','ScientificLinux'): + ca_cert = '/etc/pki/tls/certs/ca-bundle.crt' + elif distribution in ('Ubuntu','Debian'): + ca_cert = '/usr/share/ca-certificates/cacert.org/cacert.org.crt' + elif platform == 'FreeBSD': + ca_cert = '/usr/local/share/certs/ca-root.crt' + elif platform == 'OpenBSD': + ca_cert = '/etc/ssl/cert.pem' + elif platform == 'NetBSD': + ca_cert = '/etc/openssl/certs/ca-cert.pem' + elif platform == 'SunOS': + # FIXME? + pass + elif platform == 'AIX': + # FIXME? + pass + + if ca_cert and os.path.exists(ca_cert): + return ca_cert + elif os.path.exists('/etc/ansible/ca-cert.pem'): + # fall back to a user-deployed cert in a standard + # location if the OS platform one is not available + return '/etc/ansible/ca-cert.pem' + else: + # CA cert isn't available, no validation + return None + + def http_request(self, req): + try: + server_cert = ssl.get_server_certificate((self.hostname, self.port), ca_certs=self.get_ca_cert()) + except ssl.SSLError: + self.module.fail_json(msg='failed to validate the SSL certificate for %s:%s. You can use validate_certs=no, however this is unsafe and not recommended' % (self.hostname, self.port)) + return req + + https_request = http_request + + +def url_argument_spec(): + ''' + Creates an argument spec that can be used with any module + that will be requesting content via urllib/urllib2 + ''' + return dict( + url = dict(), + force = dict(default='no', aliases=['thirsty'], type='bool'), + http_agent = dict(default='ansible-httpget'), + use_proxy = dict(default='yes', type='bool'), + validate_certs = dict(default='yes', type='bool'), + ) + + +def fetch_url(module, url, data=None, headers=None, method=None, + use_proxy=False, validate_certs=True, force=False, last_mod_time=None, timeout=10): + ''' + Fetches a file from an HTTP/FTP server using urllib2 + ''' + + if not HAS_URLLIB: + module.fail_json(msg='urllib is not installed') + if not HAS_URLLIB2: + module.fail_json(msg='urllib2 is not installed') + elif not HAS_URLPARSE: + module.fail_json(msg='urlparse is not installed') + + r = None + handlers = [] + info = dict(url=url) + + parsed = urlparse.urlparse(url) + if parsed[0] == 'https': + if not HAS_SSL and validate_certs: + module.fail_json(msg='SSL validation is not available in your version of python. You can use validate_certs=no, however this is unsafe and not recommended') + elif validate_certs: + # do the cert validation + netloc = parsed[1] + if '@' in netloc: + netloc = netloc.split('@', 1)[1] + if ':' in netloc: + hostname, port = netloc.split(':', 1) + else: + hostname = netloc + port = 443 + # create the SSL validation handler and + # add it to the list of handlers + ssl_handler = SSLValidationHandler(module, hostname, port) + handlers.append(ssl_handler) + + if '@' in parsed[1]: + credentials, netloc = parsed[1].split('@', 1) + if ':' in credentials: + username, password = credentials.split(':', 1) + else: + username = credentials + password = '' + parsed = list(parsed) + parsed[1] = netloc + + passman = urllib2.HTTPPasswordMgrWithDefaultRealm() + # this creates a password manager + passman.add_password(None, netloc, username, password) + # because we have put None at the start it will always + # use this username/password combination for urls + # for which `theurl` is a super-url + + authhandler = urllib2.HTTPBasicAuthHandler(passman) + # create the AuthHandler + handlers.append(authhandler) + + #reconstruct url without credentials + url = urlparse.urlunparse(parsed) + + if not use_proxy: + proxyhandler = urllib2.ProxyHandler({}) + handlers.append(proxyhandler) + + opener = urllib2.build_opener(*handlers) + urllib2.install_opener(opener) + + if method: + if method.upper() not in ('OPTIONS','GET','HEAD','POST','PUT','DELETE','TRACE','CONNECT'): + module.fail_json(msg='invalid HTTP request method; %s' % method.upper()) + request = RequestWithMethod(url, method.upper(), data) + else: + request = urllib2.Request(url, data) + + # add the custom agent header, to help prevent issues + # with sites that block the default urllib agent string + request.add_header('User-agent', module.params.get('http_agent')) + + # if we're ok with getting a 304, set the timestamp in the + # header, otherwise make sure we don't get a cached copy + if last_mod_time and not force: + tstamp = last_mod_time.strftime('%a, %d %b %Y %H:%M:%S +0000') + request.add_header('If-Modified-Since', tstamp) + else: + request.add_header('cache-control', 'no-cache') + + # user defined headers now, which may override things we've set above + if headers: + if not isinstance(headers, dict): + module.fail_json("headers provided to fetch_url() must be a dict") + for header in headers: + request.add_header(header, headers[header]) + + try: + if sys.version_info < (2,6,0): + # urlopen in python prior to 2.6.0 did not + # have a timeout parameter + r = urllib2.urlopen(request, None) + else: + r = urllib2.urlopen(request, None, timeout) + info.update(r.info()) + info['url'] = r.geturl() # The URL goes in too, because of redirects. + info.update(dict(msg="OK (%s bytes)" % r.headers.get('Content-Length', 'unknown'), status=200)) + except urllib2.HTTPError, e: + info.update(dict(msg=str(e), status=e.code)) + except urllib2.URLError, e: + code = int(getattr(e, 'code', -1)) + info.update(dict(msg="Request failed: %s" % str(e), status=code)) + + return r, info + diff --git a/library/cloud/ec2_facts b/library/cloud/ec2_facts index 1c17fa5b71..c6a6670a58 100644 --- a/library/cloud/ec2_facts +++ b/library/cloud/ec2_facts @@ -41,7 +41,6 @@ EXAMPLES = ''' when: ansible_ec2_instance_type == "t1.micro" ''' -import urllib2 import socket import re @@ -62,7 +61,8 @@ class Ec2Metadata(object): 'us-west-1', 'us-west-2') - def __init__(self, ec2_metadata_uri=None, ec2_sshdata_uri=None, ec2_userdata_uri=None): + def __init__(self, module, ec2_metadata_uri=None, ec2_sshdata_uri=None, ec2_userdata_uri=None): + self.module = module self.uri_meta = ec2_metadata_uri or self.ec2_metadata_uri self.uri_user = ec2_userdata_uri or self.ec2_userdata_uri self.uri_ssh = ec2_sshdata_uri or self.ec2_sshdata_uri @@ -70,12 +70,9 @@ class Ec2Metadata(object): self._prefix = 'ansible_ec2_%s' def _fetch(self, url): - try: - return urllib2.urlopen(url).read() - except urllib2.HTTPError: - return - except urllib2.URLError: - return + self.module.fail_json(msg="url is %s" % url) + (response, info) = fetch_url(self.module, url, force=True) + return response.read() def _mangle_fields(self, fields, uri, filter_patterns=['public-keys-0']): new_fields = {} @@ -150,17 +147,20 @@ class Ec2Metadata(object): return data def main(): - - ec2_facts = Ec2Metadata().run() - ec2_facts_result = dict(changed=False, ansible_facts=ec2_facts) + argument_spec = url_argument_spec() module = AnsibleModule( - argument_spec = dict(), + argument_spec = argument_spec, supports_check_mode = True, ) + + ec2_facts = Ec2Metadata(module).run() + ec2_facts_result = dict(changed=False, ansible_facts=ec2_facts) + module.exit_json(**ec2_facts_result) # import module snippets from ansible.module_utils.basic import * +from ansible.module_utils.urls import * main() diff --git a/library/database/riak b/library/database/riak index 53faba6e98..e0a7552f0a 100644 --- a/library/database/riak +++ b/library/database/riak @@ -138,24 +138,13 @@ def main(): while True: if time.time() > timeout: module.fail_json(msg='Timeout, could not fetch Riak stats.') - try: - if sys.version_info<(2,6,0): - stats_raw = urllib2.urlopen( - 'http://%s/stats' % (http_conn), None).read() - else: - stats_raw = urllib2.urlopen( - 'http://%s/stats' % (http_conn), None, 5).read() + (response, info) = fetch_url(module, 'http://%s/stats' % (http_conn), force=True, timeout=5) + if info['status'] == 200: + stats_raw = response.read() break - except urllib2.HTTPError, e: - time.sleep(5) - except urllib2.URLError, e: - time.sleep(5) - except socket.timeout: - time.sleep(5) - except Exception, e: - module.fail_json(msg='Could not fetch Riak stats: %s' % e) + time.sleep(5) -# here we attempt to load those stats, + # here we attempt to load those stats, try: stats = json.loads(stats_raw) except: diff --git a/library/monitoring/airbrake_deployment b/library/monitoring/airbrake_deployment index 8a4a834be7..6a83459906 100644 --- a/library/monitoring/airbrake_deployment +++ b/library/monitoring/airbrake_deployment @@ -52,6 +52,13 @@ options: - Optional URL to submit the notification to. Use to send notifications to Airbrake-compliant tools like Errbit. required: false default: https://airbrake.io/deploys + validate_certs: + description: + - If C(no), SSL certificates for the target url will not be validated. This should only be used + on personally controlled sites using self-signed certificates. + required: false + default: 'yes' + choices: ['yes', 'no'] # informational: requirements for nodes requirements: [ urllib, urllib2 ] @@ -64,29 +71,12 @@ EXAMPLES = ''' revision=4.2 ''' -HAS_URLLIB = True -try: - import urllib -except ImportError: - HAS_URLLIB = False - -HAS_URLLIB2 = True -try: - import urllib2 -except ImportError: - HAS_URLLIB2 = False - # =========================================== # Module execution. # def main(): - if not HAS_URLLIB: - module.fail_json(msg="urllib is not installed") - if not HAS_URLLIB2: - module.fail_json(msg="urllib2 is not installed") - module = AnsibleModule( argument_spec=dict( token=dict(required=True), @@ -95,6 +85,7 @@ def main(): repo=dict(required=False), revision=dict(required=False), url=dict(required=False, default='https://api.airbrake.io/deploys.txt') + validate_certs=dict(default='yes', type='bool'), ), supports_check_mode=True ) @@ -123,18 +114,16 @@ def main(): module.exit_json(changed=True) # Send the data to airbrake - try: - req = urllib2.Request(url, urllib.urlencode(params)) - result=urllib2.urlopen(req) - except Exception, e: - module.fail_json(msg="unable to update airbrake via %s?%s : %s" % (url, urllib.urlencode(params), e)) + data = urllib.urlencode(params) + response, info = fetch_url(module, url, data=data, validate_certs=module.params['validate_certs']) + if info['status'] == 200: + module.exit_json(changed=True) else: - if result.code == 200: - module.exit_json(changed=True) - else: - module.fail_json(msg="HTTP result code: %d connecting to %s" % (result.code, url)) + module.fail_json(msg="HTTP result code: %d connecting to %s" % (info['status'], url)) # import module snippets from ansible.module_utils.basic import * +from ansible.module_utils.urls import * + main() diff --git a/library/monitoring/boundary_meter b/library/monitoring/boundary_meter index 202dfd03ae..3c9f90a4ce 100644 --- a/library/monitoring/boundary_meter +++ b/library/monitoring/boundary_meter @@ -24,7 +24,6 @@ along with Ansible. If not, see . import json import datetime -import urllib2 import base64 import os @@ -74,12 +73,6 @@ EXAMPLES=''' ''' -try: - import urllib2 - HAS_URLLIB2 = True -except ImportError: - HAS_URLLIB2 = False - api_host = "api.boundary.com" config_directory = "/etc/bprobe" @@ -101,7 +94,7 @@ def build_url(name, apiid, action, meter_id=None, cert_type=None): elif action == "delete": return "https://%s/%s/meters/%s" % (api_host, apiid, meter_id) -def http_request(name, apiid, apikey, action, meter_id=None, cert_type=None): +def http_request(module, name, apiid, apikey, action, data=None, meter_id=None, cert_type=None): if meter_id is None: url = build_url(name, apiid, action) @@ -111,11 +104,11 @@ def http_request(name, apiid, apikey, action, meter_id=None, cert_type=None): else: url = build_url(name, apiid, action, meter_id, cert_type) - auth = auth_encode(apikey) - request = urllib2.Request(url) - request.add_header("Authorization", "Basic %s" % (auth)) - request.add_header("Content-Type", "application/json") - return request + headers = dict() + headers["Authorization"] = "Basic %s" % auth_encode(apikey) + headers["Content-Type"] = "application/json" + + return fetch_url(module, url, data=data, headers=headers) def create_meter(module, name, apiid, apikey): @@ -126,14 +119,10 @@ def create_meter(module, name, apiid, apikey): module.exit_json(status="Meter " + name + " already exists",changed=False) else: # If it doesn't exist, create it - request = http_request(name, apiid, apikey, action="create") - # A create request seems to need a json body with the name of the meter in it body = '{"name":"' + name + '"}' - request.add_data(body) + response, info = http_request(module, name, apiid, apikey, data=body, action="create") - try: - result = urllib2.urlopen(request) - except urllib2.URLError, e: + if info['status'] != 200: module.fail_json(msg="Failed to connect to api host to create meter") # If the config directory doesn't exist, create it @@ -160,15 +149,13 @@ def create_meter(module, name, apiid, apikey): def search_meter(module, name, apiid, apikey): - request = http_request(name, apiid, apikey, action="search") + response, info = http_request(module, name, apiid, apikey, action="search") - try: - result = urllib2.urlopen(request) - except urllib2.URLError, e: + if info['status'] != 200: module.fail_json("Failed to connect to api host to search for meter") # Return meters - return json.loads(result.read()) + return json.loads(response.read()) def get_meter_id(module, name, apiid, apikey): # In order to delete the meter we need its id @@ -186,16 +173,9 @@ def delete_meter(module, name, apiid, apikey): if meter_id is None: return 1, "Meter does not exist, so can't delete it" else: - action = "delete" - request = http_request(name, apiid, apikey, action, meter_id) - # See http://stackoverflow.com/questions/4511598/how-to-make-http-delete-method-using-urllib2 - # urllib2 only does GET or POST I believe, but here we need delete - request.get_method = lambda: 'DELETE' - - try: - result = urllib2.urlopen(request) - except urllib2.URLError, e: - module.fail_json("Failed to connect to api host to delete meter") + response, info = http_request(module, name, apiid, apikey, action, meter_id) + if info['status'] != 200: + module.fail_json("Failed to delete meter") # Each new meter gets a new key.pem and ca.pem file, so they should be deleted types = ['cert', 'key'] @@ -214,17 +194,14 @@ def download_request(module, name, apiid, apikey, cert_type): if meter_id is not None: action = "certificates" - request = http_request(name, apiid, apikey, action, meter_id, cert_type) - - try: - result = urllib2.urlopen(request) - except urllib2.URLError, e: + response, info = http_request(module, name, apiid, apikey, action, meter_id, cert_type) + if info['status'] != 200: module.fail_json("Failed to connect to api host to download certificate") if result: try: cert_file_path = '%s/%s.pem' % (config_directory,cert_type) - body = result.read() + body = response.read() cert_file = open(cert_file_path, 'w') cert_file.write(body) cert_file.close @@ -238,9 +215,6 @@ def download_request(module, name, apiid, apikey, cert_type): def main(): - if not HAS_URLLIB2: - module.fail_json(msg="urllib2 is not installed") - module = AnsibleModule( argument_spec=dict( state=dict(required=True, choices=['present', 'absent']), @@ -268,5 +242,6 @@ def main(): # import module snippets from ansible.module_utils.basic import * +from ansible.module_utils.urls import * main() diff --git a/library/monitoring/datadog_event b/library/monitoring/datadog_event index 629e86e98a..878aee6d34 100644 --- a/library/monitoring/datadog_event +++ b/library/monitoring/datadog_event @@ -67,7 +67,6 @@ datadog_event: title="Testing from ansible" text="Test!" ''' import socket -from urllib2 import urlopen, Request, URLError def main(): module = AnsibleModule( @@ -97,8 +96,7 @@ def main(): post_event(module) def post_event(module): - uri = "https://app.datadoghq.com/api/v1/events?api_key=" + \ - module.params['api_key'] + uri = "https://app.datadoghq.com/api/v1/events?api_key=%s" % module.params['api_key'] body = dict( title=module.params['title'], @@ -117,22 +115,20 @@ def post_event(module): json_body = module.jsonify(body) headers = {"Content-Type": "application/json"} - request = Request(uri, json_body, headers, unverifiable=True) - try: - response = urlopen(request) + (response, info) = fetch_url(module, uri, data=json_body, headers=headers) + if info['status'] == 200: response_body = response.read() response_json = module.from_json(response_body) if response_json['status'] == 'ok': module.exit_json(changed=True) else: module.fail_json(msg=response) - - except URLError, e: - module.fail_json(msg="URL error: %s." % e) - except socket.error, e: - module.fail_json(msg="Socket error: %s to %s" % (e, uri)) + else: + module.fail_json(**info) # import module snippets from ansible.module_utils.basic import * +from ansible.module_utils.urls import * + main() diff --git a/library/monitoring/newrelic_deployment b/library/monitoring/newrelic_deployment index de64651969..08132722e1 100644 --- a/library/monitoring/newrelic_deployment +++ b/library/monitoring/newrelic_deployment @@ -75,29 +75,12 @@ EXAMPLES = ''' revision=1.0 ''' -HAS_URLLIB = True -try: - import urllib -except ImportError: - HAS_URLLIB = False - -HAS_URLLIB2 = True -try: - import urllib2 -except ImportError: - HAS_URLLIB2 = False - # =========================================== # Module execution. # def main(): - if not HAS_URLLIB: - module.fail_json(msg="urllib is not installed") - if not HAS_URLLIB2: - module.fail_json(msg="urllib2 is not installed") - module = AnsibleModule( argument_spec=dict( token=dict(required=True), @@ -134,29 +117,20 @@ def main(): module.exit_json(changed=True) # Send the data to NewRelic - try: - req = urllib2.Request("https://rpm.newrelic.com/deployments.xml", urllib.urlencode(params)) - req.add_header('x-api-key',module.params["token"]) - result=urllib2.urlopen(req) - # urlopen behaves differently in python 2.4 and 2.6 so we handle - # both cases here. In python 2.4 it throws an exception if the - # return code is anything other than a 200. In python 2.6 it - # doesn't throw an exception for any 2xx return codes. In both - # cases we expect newrelic should return a 201 on success. So - # to handle both cases, both the except & else cases below are - # effectively identical. - except Exception, e: - if e.code == 201: - module.exit_json(changed=True) - else: - module.fail_json(msg="unable to update newrelic: %s" % e) + url = "https://rpm.newrelic.com/deployments.xml" + data = urllib.urlencode(params) + headers = { + 'x-api-key': module.params["token"], + } + response, info = fetch_url(module, url, data=data, headers=headers) + if info['status'] in (200, 201): + module.exit_json(changed=True) else: - if result.code == 201: - module.exit_json(changed=True) - else: - module.fail_json(msg="result code: %d" % result.code) + module.fail_json(msg="unable to update newrelic: %s" % info['msg']) # import module snippets from ansible.module_utils.basic import * +from ansible.module_utils.urls import * + main() diff --git a/library/monitoring/pagerduty b/library/monitoring/pagerduty index bfd0573f4d..9a7f21d077 100644 --- a/library/monitoring/pagerduty +++ b/library/monitoring/pagerduty @@ -87,24 +87,23 @@ EXAMPLES=''' import json import datetime -import urllib2 import base64 -def ongoing(name, user, passwd): +def ongoing(module, name, user, passwd): url = "https://" + name + ".pagerduty.com/api/v1/maintenance_windows/ongoing" auth = base64.encodestring('%s:%s' % (user, passwd)).replace('\n', '') + headers = {"Authorization": "Basic %s" % auth} - req = urllib2.Request(url) - req.add_header("Authorization", "Basic %s" % auth) - res = urllib2.urlopen(req) - out = res.read() + response, info = fetch_url(module, url, headers=headers) + if info['status'] != 200: + module.fail_json(msg="failed to lookup the ongoing window: %s" % info['msg']) - return False, out + return False, response.read() -def create(name, user, passwd, service, hours, desc): +def create(module, name, user, passwd, service, hours, desc): now = datetime.datetime.utcnow() later = now + datetime.timedelta(hours=int(hours)) @@ -113,15 +112,17 @@ def create(name, user, passwd, service, hours, desc): url = "https://" + name + ".pagerduty.com/api/v1/maintenance_windows" auth = base64.encodestring('%s:%s' % (user, passwd)).replace('\n', '') + headers = { + 'Authorization': 'Basic %s' % auth, + 'Content-Type' : 'application/json', + } data = json.dumps({'maintenance_window': {'start_time': start, 'end_time': end, 'description': desc, 'service_ids': [service]}}) - req = urllib2.Request(url, data) - req.add_header("Authorization", "Basic %s" % auth) - req.add_header('Content-Type', 'application/json') - res = urllib2.urlopen(req) - out = res.read() + response, info = fetch_url(module, url, data=data, headers=headers, method='POST') + if info['status'] != 200: + module.fail_json(msg="failed to create the window: %s" % info['msg']) - return False, out + return False, response.read() def main(): @@ -149,10 +150,10 @@ def main(): if state == "running" or state == "started": if not service: module.fail_json(msg="service not specified") - (rc, out) = create(name, user, passwd, service, hours, desc) + (rc, out) = create(module, name, user, passwd, service, hours, desc) if state == "ongoing": - (rc, out) = ongoing(name, user, passwd) + (rc, out) = ongoing(module, name, user, passwd) if rc != 0: module.fail_json(msg="failed", result=out) @@ -161,4 +162,6 @@ def main(): # import module snippets from ansible.module_utils.basic import * +from ansible.module_utils.urls import * + main() diff --git a/library/net_infrastructure/dnsmadeeasy b/library/net_infrastructure/dnsmadeeasy index d4af13e884..9e2c14480e 100644 --- a/library/net_infrastructure/dnsmadeeasy +++ b/library/net_infrastructure/dnsmadeeasy @@ -106,8 +106,6 @@ EXAMPLES = ''' IMPORT_ERROR = None try: - import urllib - import urllib2 import json from time import strftime, gmtime import hashlib @@ -115,22 +113,6 @@ try: except ImportError, e: IMPORT_ERROR = str(e) - -class RequestWithMethod(urllib2.Request): - - """Workaround for using DELETE/PUT/etc with urllib2""" - - def __init__(self, url, method, data=None, headers={}): - self._method = method - urllib2.Request.__init__(self, url, data, headers) - - def get_method(self): - if self._method: - return self._method - else: - return urllib2.Request.get_method(self) - - class DME2: def __init__(self, apikey, secret, domain, module): @@ -169,16 +151,10 @@ class DME2: url = self.baseurl + resource if data and not isinstance(data, basestring): data = urllib.urlencode(data) - request = RequestWithMethod(url, method, data, self._headers()) - try: - response = urllib2.urlopen(request) - except urllib2.HTTPError, e: - self.module.fail_json( - msg="%s returned %s, with body: %s" % (url, e.code, e.read())) - except Exception, e: - self.module.fail_json( - msg="Failed contacting: %s : Exception %s" % (url, e.message())) + response, info = fetch_url(self.module, url, data=data, method=method) + if info['status'] not in (200, 201, 204): + self.module.fail_json(msg="%s returned %s, with body: %s" % (url, info['status'], info['msg'])) try: return json.load(response) @@ -338,4 +314,6 @@ def main(): # import module snippets from ansible.module_utils.basic import * +from ansible.module_utils.urls import * + main() diff --git a/library/net_infrastructure/netscaler b/library/net_infrastructure/netscaler index 1aa370895d..4756d90abd 100644 --- a/library/net_infrastructure/netscaler +++ b/library/net_infrastructure/netscaler @@ -73,6 +73,14 @@ options: default: server choices: ["server", "service"] aliases: [] + validate_certs: + description: + - If C(no), SSL certificates for the target url will not be validated. This should only be used + on personally controlled sites using self-signed certificates. + required: false + default: 'yes' + choices: ['yes', 'no'] + requirements: [ "urllib", "urllib2" ] author: Nandor Sivok ''' @@ -90,8 +98,6 @@ ansible host -m netscaler -a "nsc_host=nsc.example.com user=apiuser password=api import json -import urllib -import urllib2 import base64 import socket @@ -100,23 +106,25 @@ class netscaler(object): _nitro_base_url = '/nitro/v1/' + def __init__(self, module): + self.module = module + def http_request(self, api_endpoint, data_json={}): request_url = self._nsc_protocol + '://' + self._nsc_host + self._nitro_base_url + api_endpoint + data_json = urllib.urlencode(data_json) + if not len(data_json): + data_json = None - if len(data_json): - req = urllib2.Request(request_url, data_json) - req.add_header('Content-Type', 'application/x-www-form-urlencoded') - else: - req = urllib2.Request(request_url) + auth = base64.encodestring('%s:%s' % (self._nsc_user, self._nsc_pass)).replace('\n', '').strip() + headers = { + 'Authorization': 'Basic %s' % auth, + 'Content-Type' : 'application/x-www-form-urlencoded', + } - base64string = base64.encodestring('%s:%s' % (self._nsc_user, self._nsc_pass)).replace('\n', '').strip() - req.add_header('Authorization', "Basic %s" % base64string) + response, info = fetch_url(self.module, request_url, data=data_json, validate_certs=self.module.params['validate_certs']) - resp = urllib2.urlopen(req) - resp = json.load(resp) - - return resp + return json.load(response.read()) def prepare_request(self, action): resp = self.http_request( @@ -134,7 +142,7 @@ class netscaler(object): def core(module): - n = netscaler() + n = netscaler(module) n._nsc_host = module.params.get('nsc_host') n._nsc_user = module.params.get('user') n._nsc_pass = module.params.get('password') @@ -158,7 +166,8 @@ def main(): password = dict(required=True), action = dict(default='enable', choices=['enable','disable']), name = dict(default=socket.gethostname()), - type = dict(default='server', choices=['service', 'server']) + type = dict(default='server', choices=['service', 'server']), + validate_certs=dict(default='yes', type='bool'), ) ) @@ -177,4 +186,5 @@ def main(): # import module snippets from ansible.module_utils.basic import * +from ansible.module_utils.urls import * main() diff --git a/library/network/get_url b/library/network/get_url index 9704b8dbad..c249c44049 100644 --- a/library/network/get_url +++ b/library/network/get_url @@ -83,6 +83,13 @@ options: required: false default: 'yes' choices: ['yes', 'no'] + validate_certs: + description: + - If C(no), SSL certificates will not be validated. This should only be used + on personally controlled sites using self-signed certificates. + required: false + default: 'yes' + choices: ['yes', 'no'] others: description: - all arguments accepted by the M(file) module also work here @@ -108,19 +115,6 @@ try: except ImportError: HAS_HASHLIB=False -try: - import urllib2 - HAS_URLLIB2 = True -except ImportError: - HAS_URLLIB2 = False - -try: - import urlparse - import socket - HAS_URLPARSE = True -except ImportError: - HAS_URLPARSE=False - # ============================================================== # url handling @@ -130,80 +124,14 @@ def url_filename(url): return 'index.html' return fn -def url_do_get(module, url, dest, use_proxy, last_mod_time, force): - """ - Get url and return request and info - Credits: http://stackoverflow.com/questions/7006574/how-to-download-file-from-ftp - """ - - USERAGENT = 'ansible-httpget' - info = dict(url=url, dest=dest) - r = None - handlers = [] - - parsed = urlparse.urlparse(url) - - if '@' in parsed[1]: - credentials, netloc = parsed[1].split('@', 1) - if ':' in credentials: - username, password = credentials.split(':', 1) - else: - username = credentials - password = '' - parsed = list(parsed) - parsed[1] = netloc - - passman = urllib2.HTTPPasswordMgrWithDefaultRealm() - # this creates a password manager - passman.add_password(None, netloc, username, password) - # because we have put None at the start it will always - # use this username/password combination for urls - # for which `theurl` is a super-url - - authhandler = urllib2.HTTPBasicAuthHandler(passman) - # create the AuthHandler - handlers.append(authhandler) - - #reconstruct url without credentials - url = urlparse.urlunparse(parsed) - - if not use_proxy: - proxyhandler = urllib2.ProxyHandler({}) - handlers.append(proxyhandler) - - opener = urllib2.build_opener(*handlers) - urllib2.install_opener(opener) - request = urllib2.Request(url) - request.add_header('User-agent', USERAGENT) - - if last_mod_time and not force: - tstamp = last_mod_time.strftime('%a, %d %b %Y %H:%M:%S +0000') - request.add_header('If-Modified-Since', tstamp) - else: - request.add_header('cache-control', 'no-cache') - - try: - r = urllib2.urlopen(request) - info.update(r.info()) - info['url'] = r.geturl() # The URL goes in too, because of redirects. - info.update(dict(msg="OK (%s bytes)" % r.headers.get('Content-Length', 'unknown'), status=200)) - except urllib2.HTTPError, e: - # Must not fail_json() here so caller can handle HTTP 304 unmodified - info.update(dict(msg=str(e), status=e.code)) - except urllib2.URLError, e: - code = getattr(e, 'code', -1) - module.fail_json(msg="Request failed: %s" % str(e), status_code=code) - - return r, info - -def url_get(module, url, dest, use_proxy, last_mod_time, force): +def url_get(module, url, dest, use_proxy, last_mod_time, force, validate_certs): """ Download data from the url and store in a temporary file. Return (tempfile, info about the request) """ - req, info = url_do_get(module, url, dest, use_proxy, last_mod_time, force) + rsp, info = fetch_url(module, url, use_proxy=use_proxy, force=force, last_mod_time=last_mod_time, validate_certs=validate_certs) if info['status'] == 304: module.exit_json(url=url, dest=dest, changed=False, msg=info.get('msg', '')) @@ -215,12 +143,12 @@ def url_get(module, url, dest, use_proxy, last_mod_time, force): fd, tempname = tempfile.mkstemp() f = os.fdopen(fd, 'wb') try: - shutil.copyfileobj(req, f) + shutil.copyfileobj(rsp, f) except Exception, err: os.remove(tempname) module.fail_json(msg="failed to create temporary content file: %s" % str(err)) f.close() - req.close() + rsp.close() return tempname, info def extract_filename_from_headers(headers): @@ -247,21 +175,15 @@ def extract_filename_from_headers(headers): def main(): - # does this really happen on non-ancient python? - if not HAS_URLLIB2: - module.fail_json(msg="urllib2 is not installed") - if not HAS_URLPARSE: - module.fail_json(msg="urlparse is not installed") + argument_spec = url_argument_spec() + argument_spec.update( + dest = dict(required=True), + sha256sum = dict(default=''), + ) module = AnsibleModule( # not checking because of daisy chain to file module - argument_spec = dict( - url = dict(required=True), - dest = dict(required=True), - force = dict(default='no', aliases=['thirsty'], type='bool'), - sha256sum = dict(default=''), - use_proxy = dict(default='yes', type='bool') - ), + argument_spec = argument_spec, add_file_common_args=True ) @@ -270,6 +192,7 @@ def main(): force = module.params['force'] sha256sum = module.params['sha256sum'] use_proxy = module.params['use_proxy'] + validate_certs = module.params['validate_certs'] dest_is_dir = os.path.isdir(dest) last_mod_time = None @@ -284,7 +207,7 @@ def main(): last_mod_time = datetime.datetime.utcfromtimestamp(mtime) # download to tmpsrc - tmpsrc, info = url_get(module, url, dest, use_proxy, last_mod_time, force) + tmpsrc, info = url_get(module, url, dest, use_proxy, last_mod_time, force, validate_certs) # Now the request has completed, we can finally generate the final # destination file name from the info dict. @@ -366,4 +289,5 @@ def main(): # import module snippets from ansible.module_utils.basic import * +from ansible.module_utils.urls import * main() diff --git a/library/notification/flowdock b/library/notification/flowdock index a5be40d1f1..32817d756d 100644 --- a/library/notification/flowdock +++ b/library/notification/flowdock @@ -96,31 +96,12 @@ EXAMPLES = ''' tags=tag1,tag2,tag3 ''' -HAS_URLLIB = True -try: - import urllib -except ImportError: - HAS_URLLIB = False - -HAS_URLLIB2 = True -try: - import urllib2 -except ImportError: - HAS_URLLIB2 = False - - - # =========================================== # Module execution. # def main(): - if not HAS_URLLIB: - module.fail_json(msg="urllib is not installed") - if not HAS_URLLIB2: - module.fail_json(msg="urllib2 is not installed") - module = AnsibleModule( argument_spec=dict( token=dict(required=True), @@ -187,14 +168,16 @@ def main(): module.exit_json(changed=False) # Send the data to Flowdock - try: - response = urllib2.urlopen(url, urllib.urlencode(params)) - except Exception, e: - module.fail_json(msg="unable to send msg: %s" % e) + data = urllib.urlencode(params) + response, info = fetch_url(module, url, data=data) + if info['status'] != 200: + module.fail_json(msg="unable to send msg: %s" % info['msg']) - module.exit_json(changed=False, msg=module.params["msg"]) + module.exit_json(changed=True, msg=module.params["msg"]) # import module snippets from ansible.module_utils.basic import * +from ansible.module_utils.urls import * + main() diff --git a/library/notification/grove b/library/notification/grove index b759f025e2..1e2132cfb7 100644 --- a/library/notification/grove +++ b/library/notification/grove @@ -41,8 +41,6 @@ EXAMPLES = ''' message=deployed {{ target }} ''' -import urllib - BASE_URL = 'https://grove.io/api/notice/%s/' # ============================================================== @@ -57,7 +55,10 @@ def do_notify_grove(module, channel_token, service, message, url=None, icon_url= if icon_url is not None: my_data['icon_url'] = icon_url - urllib.urlopen(my_url, urllib.urlencode(my_data)) + data = urllib.urlencode(my_data) + response, info = fetch_url(module, my_url, data=data) + if info['status'] != 200: + module.fail_json(msg="failed to send notification: %s" % info['msg']) # ============================================================== # main diff --git a/library/notification/hipchat b/library/notification/hipchat index eec2b8c361..c4b36d64ce 100644 --- a/library/notification/hipchat +++ b/library/notification/hipchat @@ -60,22 +60,10 @@ EXAMPLES = ''' # HipChat module specific support methods. # -HAS_URLLIB = True -try: - import urllib -except ImportError: - HAS_URLLIB = False - -HAS_URLLIB2 = True -try: - import urllib2 -except ImportError: - HAS_URLLIB2 = False - MSG_URI = "https://api.hipchat.com/v1/rooms/message?" -def send_msg(token, room, msg_from, msg, msg_format='text', +def send_msg(module, token, room, msg_from, msg, msg_format='text', color='yellow', notify=False): '''sending message to hipchat''' @@ -92,8 +80,12 @@ def send_msg(token, room, msg_from, msg, msg_format='text', params['notify'] = 0 url = MSG_URI + "auth_token=%s" % (token) - response = urllib2.urlopen(url, urllib.urlencode(params)) - return response.read() + data = urllib.urlencode(params) + response, info = fetch_url(module, url, data=data) + if info['status'] == 200: + return response.read() + else: + module.fail_json(msg="failed to send message, return status=%s" % str(info['status'])) # =========================================== @@ -102,11 +94,6 @@ def send_msg(token, room, msg_from, msg, msg_format='text', def main(): - if not HAS_URLLIB: - module.fail_json(msg="urllib is not installed") - if not HAS_URLLIB2: - module.fail_json(msg="urllib2 is not installed") - module = AnsibleModule( argument_spec=dict( token=dict(required=True), @@ -130,15 +117,15 @@ def main(): notify = module.params["notify"] try: - send_msg(token, room, msg_from, msg, msg_format, - color, notify) + send_msg(module, token, room, msg_from, msg, msg_format, color, notify) except Exception, e: module.fail_json(msg="unable to sent msg: %s" % e) changed = True - module.exit_json(changed=changed, room=room, msg_from=msg_from, - msg=msg) + module.exit_json(changed=changed, room=room, msg_from=msg_from, msg=msg) # import module snippets from ansible.module_utils.basic import * +from ansible.module_utils.urls import * + main() diff --git a/library/packaging/apt_key b/library/packaging/apt_key index eee8633702..ff05bb93d1 100644 --- a/library/packaging/apt_key +++ b/library/packaging/apt_key @@ -64,6 +64,14 @@ options: default: present description: - used to specify if key is being added or revoked + validate_certs: + description: + - If C(no), SSL certificates for the target url will not be validated. This should only be used + on personally controlled sites using self-signed certificates. + required: false + default: 'yes' + choices: ['yes', 'no'] + ''' EXAMPLES = ''' @@ -88,7 +96,6 @@ EXAMPLES = ''' # FIXME: standardize into module_common -from urllib2 import urlopen, URLError from traceback import format_exc from re import compile as re_compile # FIXME: standardize into module_common @@ -133,11 +140,8 @@ def download_key(module, url): if url is None: module.fail_json(msg="needed a URL but was not specified") try: - connection = urlopen(url) - if connection is None: - module.fail_json("error connecting to download key from url") - data = connection.read() - return data + rsp, info = fetch_url(module, url, validate_certs=module.params['validate_certs']) + return rsp.read() except Exception: module.fail_json(msg="error getting key id from url", traceback=format_exc()) @@ -175,7 +179,8 @@ def main(): file=dict(required=False), key=dict(required=False), keyring=dict(required=False), - state=dict(required=False, choices=['present', 'absent'], default='present') + state=dict(required=False, choices=['present', 'absent'], default='present'), + validate_certs=dict(default='yes', type='bool'), ), supports_check_mode=True ) @@ -240,4 +245,5 @@ def main(): # import module snippets from ansible.module_utils.basic import * +from ansible.module_utils.urls import * main() diff --git a/library/packaging/rpm_key b/library/packaging/rpm_key index 8253247734..9d85f30ac8 100644 --- a/library/packaging/rpm_key +++ b/library/packaging/rpm_key @@ -42,6 +42,14 @@ options: choices: [present, absent] description: - Wheather the key will be imported or removed from the rpm db. + validate_certs: + description: + - If C(no) and the C(key) is a url starting with https, SSL certificates will not be validated. This should only be used + on personally controlled sites using self-signed certificates. + required: false + default: 'yes' + choices: ['yes', 'no'] + ''' EXAMPLES = ''' @@ -57,7 +65,6 @@ EXAMPLES = ''' import syslog import os.path import re -import urllib2 import tempfile # Attempt to download at most 8192 bytes. @@ -116,8 +123,8 @@ class RpmKey: def fetch_key(self, url, maxbytes=MAXBYTES): """Downloads a key from url, returns a valid path to a gpg key""" try: - fd = urllib2.urlopen(url) - key = fd.read(maxbytes) + rsp, info = fetch_url(self.module, url, validate_certs=self.module.params['validate_certs']) + key = rsp.read(maxbytes) if not is_pubkey(key): self.module.fail_json(msg="Not a public key: %s" % url) tmpfd, tmpname = tempfile.mkstemp() @@ -187,7 +194,8 @@ def main(): module = AnsibleModule( argument_spec = dict( state=dict(default='present', choices=['present', 'absent'], type='str'), - key=dict(required=True, type='str') + key=dict(required=True, type='str'), + validate_certs=dict(default='yes', type='bool'), ), supports_check_mode=True ) @@ -198,4 +206,5 @@ def main(): # import module snippets from ansible.module_utils.basic import * +from ansible.module_utils.urls import * main() diff --git a/library/source_control/github_hooks b/library/source_control/github_hooks index 55eb8d3c8d..c5c5b648c7 100644 --- a/library/source_control/github_hooks +++ b/library/source_control/github_hooks @@ -19,7 +19,6 @@ # along with Ansible. If not, see . import json -import urllib2 import base64 DOCUMENTATION = ''' @@ -51,6 +50,14 @@ options: - This tells the githooks module what you want it to do. required: true choices: [ "create", "cleanall" ] + validate_certs: + description: + - If C(no), SSL certificates for the target repo will not be validated. This should only be used + on personally controlled sites using self-signed certificates. + required: false + default: 'yes' + choices: ['yes', 'no'] + author: Phillip Gentry, CX Inc ''' @@ -62,16 +69,19 @@ EXAMPLES = ''' - local_action: github_hooks action=cleanall user={{ gituser }} oauthkey={{ oauthkey }} repo={{ repo }} ''' -def list(hookurl, oauthkey, repo, user): +def list(module, hookurl, oauthkey, repo, user): url = "%s/hooks" % repo auth = base64.encodestring('%s:%s' % (user, oauthkey)).replace('\n', '') - req = urllib2.Request(url) - req.add_header("Authorization", "Basic %s" % auth) - res = urllib2.urlopen(req) - out = res.read() - return False, out + headers = { + 'Authorization': 'Basic %s' % auth, + } + response, info = fetch_url(module, url, headers=headers, validate_certs=module.params['validate_certs']) + if info['status'] != 200: + return False, '' + else: + return False, response.read() -def clean504(hookurl, oauthkey, repo, user): +def clean504(module, hookurl, oauthkey, repo, user): current_hooks = list(hookurl, oauthkey, repo, user)[1] decoded = json.loads(current_hooks) @@ -79,11 +89,11 @@ def clean504(hookurl, oauthkey, repo, user): if hook['last_response']['code'] == 504: # print "Last response was an ERROR for hook:" # print hook['id'] - delete(hookurl, oauthkey, repo, user, hook['id']) + delete(module, hookurl, oauthkey, repo, user, hook['id']) return 0, current_hooks -def cleanall(hookurl, oauthkey, repo, user): +def cleanall(module, hookurl, oauthkey, repo, user): current_hooks = list(hookurl, oauthkey, repo, user)[1] decoded = json.loads(current_hooks) @@ -91,11 +101,11 @@ def cleanall(hookurl, oauthkey, repo, user): if hook['last_response']['code'] != 200: # print "Last response was an ERROR for hook:" # print hook['id'] - delete(hookurl, oauthkey, repo, user, hook['id']) + delete(module, hookurl, oauthkey, repo, user, hook['id']) return 0, current_hooks -def create(hookurl, oauthkey, repo, user): +def create(module, hookurl, oauthkey, repo, user): url = "%s/hooks" % repo values = { "active": True, @@ -107,29 +117,23 @@ def create(hookurl, oauthkey, repo, user): } data = json.dumps(values) auth = base64.encodestring('%s:%s' % (user, oauthkey)).replace('\n', '') - out='[]' - try : - req = urllib2.Request(url) - req.add_data(data) - req.add_header("Authorization", "Basic %s" % auth) - res = urllib2.urlopen(req) - out = res.read() - return 0, out - except urllib2.HTTPError, e : - if e.code == 422 : - return 0, out + headers = { + 'Authorization': 'Basic %s' % auth, + } + response, info = fetch_url(module, url, data=data, headers=headers, validate_certs=module.params['validate_certs']) + if info['status'] != 200: + return 0, '[]' + else: + return 0, response.read() -def delete(hookurl, oauthkey, repo, user, hookid): +def delete(module, hookurl, oauthkey, repo, user, hookid): url = "%s/hooks/%s" % (repo, hookid) auth = base64.encodestring('%s:%s' % (user, oauthkey)).replace('\n', '') - req = urllib2.Request(url) - req.get_method = lambda: 'DELETE' - req.add_header("Authorization", "Basic %s" % auth) - # req.add_header('Content-Type', 'application/xml') - # req.add_header('Accept', 'application/xml') - res = urllib2.urlopen(req) - out = res.read() - return out + headers = { + 'Authorization': 'Basic %s' % auth, + } + response, info = fetch_url(module, url, data=data, headers=headers, method='DELETE', validate_certs=module.params['validate_certs']) + return response.read() def main(): module = AnsibleModule( @@ -139,6 +143,7 @@ def main(): oauthkey=dict(required=True), repo=dict(required=True), user=dict(required=True), + validate_certs=dict(default='yes', type='bool'), ) ) @@ -149,16 +154,16 @@ def main(): user = module.params['user'] if action == "list": - (rc, out) = list(hookurl, oauthkey, repo, user) + (rc, out) = list(module, hookurl, oauthkey, repo, user) if action == "clean504": - (rc, out) = clean504(hookurl, oauthkey, repo, user) + (rc, out) = clean504(module, hookurl, oauthkey, repo, user) if action == "cleanall": - (rc, out) = cleanall(hookurl, oauthkey, repo, user) + (rc, out) = cleanall(module, hookurl, oauthkey, repo, user) if action == "create": - (rc, out) = create(hookurl, oauthkey, repo, user) + (rc, out) = create(module, hookurl, oauthkey, repo, user) if rc != 0: module.fail_json(msg="failed", result=out) @@ -168,4 +173,6 @@ def main(): # import module snippets from ansible.module_utils.basic import * +from ansible.module_utils.urls import * + main() From a02641c0202a643c9d70475789e3f0d8261ae2ee Mon Sep 17 00:00:00 2001 From: James Tanner Date: Mon, 10 Mar 2014 16:11:24 -0500 Subject: [PATCH 26/29] Force command action to not be executed by the shell unless specifically enabled --- lib/ansible/module_utils/basic.py | 50 +++-- lib/ansible/module_utils/redhat.py | 252 ++++++++++++++++++++++ lib/ansible/runner/lookup_plugins/pipe.py | 2 +- library/cloud/virt | 18 +- library/commands/command | 2 +- library/files/synchronize | 9 +- library/notification/osx_say | 2 - library/packaging/easy_install | 4 +- library/packaging/npm | 5 +- library/packaging/pacman | 12 +- library/packaging/pip | 13 +- library/packaging/redhat_subscription | 81 +++---- library/packaging/rhn_register | 81 +------ library/packaging/urpmi | 13 +- library/source_control/bzr | 29 ++- library/source_control/git | 67 +++--- library/system/service | 4 +- library/system/setup | 25 ++- library/web_infrastructure/django_manage | 3 +- 19 files changed, 427 insertions(+), 245 deletions(-) create mode 100644 lib/ansible/module_utils/redhat.py diff --git a/lib/ansible/module_utils/basic.py b/lib/ansible/module_utils/basic.py index fd0b2edfc3..67ceb3b605 100644 --- a/lib/ansible/module_utils/basic.py +++ b/lib/ansible/module_utils/basic.py @@ -990,12 +990,13 @@ class AnsibleModule(object): # rename might not preserve context self.set_context_if_different(dest, context, False) - def run_command(self, args, check_rc=False, close_fds=False, executable=None, data=None, binary_data=False, path_prefix=None): + def run_command(self, args, check_rc=False, close_fds=False, executable=None, data=None, binary_data=False, path_prefix=None, cwd=None, use_unsafe_shell=False): ''' Execute a command, returns rc, stdout, and stderr. args is the command to run If args is a list, the command will be run with shell=False. - Otherwise, the command will be run with shell=True when args is a string. + If args is a string and use_unsafe_shell=False it will split args to a list and run with shell=False + If args is a string and use_unsafe_shell=True it run with shell=True. Other arguments: - check_rc (boolean) Whether to call fail_json in case of non zero RC. Default is False. @@ -1004,13 +1005,18 @@ class AnsibleModule(object): - executable (string) See documentation for subprocess.Popen(). Default is None. ''' + + shell = False if isinstance(args, list): - shell = False - elif isinstance(args, basestring): + pass + elif isinstance(args, basestring) and use_unsafe_shell: shell = True + elif isinstance(args, basestring): + args = shlex.split(args) else: msg = "Argument 'args' to run_command must be list or string" self.fail_json(rc=257, cmd=args, msg=msg) + rc = 0 msg = None st_in = None @@ -1022,25 +1028,25 @@ class AnsibleModule(object): if data: st_in = subprocess.PIPE + + kwargs = dict( + executable=executable, + shell=shell, + close_fds=close_fds, + stdin= st_in, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE + ) + + if path_prefix: + kwargs['env'] = env + if cwd: + kwargs['cwd'] = cwd + + try: - if path_prefix is not None: - cmd = subprocess.Popen(args, - executable=executable, - shell=shell, - close_fds=close_fds, - stdin=st_in, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - env=env) - else: - cmd = subprocess.Popen(args, - executable=executable, - shell=shell, - close_fds=close_fds, - stdin=st_in, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - + cmd = subprocess.Popen(args, **kwargs) + if data: if not binary_data: data += '\\n' diff --git a/lib/ansible/module_utils/redhat.py b/lib/ansible/module_utils/redhat.py new file mode 100644 index 0000000000..a1081f9c8c --- /dev/null +++ b/lib/ansible/module_utils/redhat.py @@ -0,0 +1,252 @@ +import os +import re +import types +import ConfigParser +import shlex + + +class RegistrationBase(object): + def __init__(self, module, username=None, password=None): + self.module = module + self.username = username + self.password = password + + def configure(self): + raise NotImplementedError("Must be implemented by a sub-class") + + def enable(self): + # Remove any existing redhat.repo + redhat_repo = '/etc/yum.repos.d/redhat.repo' + if os.path.isfile(redhat_repo): + os.unlink(redhat_repo) + + def register(self): + raise NotImplementedError("Must be implemented by a sub-class") + + def unregister(self): + raise NotImplementedError("Must be implemented by a sub-class") + + def unsubscribe(self): + raise NotImplementedError("Must be implemented by a sub-class") + + def update_plugin_conf(self, plugin, enabled=True): + plugin_conf = '/etc/yum/pluginconf.d/%s.conf' % plugin + if os.path.isfile(plugin_conf): + cfg = ConfigParser.ConfigParser() + cfg.read([plugin_conf]) + if enabled: + cfg.set('main', 'enabled', 1) + else: + cfg.set('main', 'enabled', 0) + fd = open(plugin_conf, 'rwa+') + cfg.write(fd) + fd.close() + + def subscribe(self, **kwargs): + raise NotImplementedError("Must be implemented by a sub-class") + + +class Rhsm(RegistrationBase): + def __init__(self, module, username=None, password=None): + RegistrationBase.__init__(self, module, username, password) + self.config = self._read_config() + self.module = module + + def _read_config(self, rhsm_conf='/etc/rhsm/rhsm.conf'): + ''' + Load RHSM configuration from /etc/rhsm/rhsm.conf. + Returns: + * ConfigParser object + ''' + + # Read RHSM defaults ... + cp = ConfigParser.ConfigParser() + cp.read(rhsm_conf) + + # Add support for specifying a default value w/o having to standup some configuration + # Yeah, I know this should be subclassed ... but, oh well + def get_option_default(self, key, default=''): + sect, opt = key.split('.', 1) + if self.has_section(sect) and self.has_option(sect, opt): + return self.get(sect, opt) + else: + return default + + cp.get_option = types.MethodType(get_option_default, cp, ConfigParser.ConfigParser) + + return cp + + def enable(self): + ''' + Enable the system to receive updates from subscription-manager. + This involves updating affected yum plugins and removing any + conflicting yum repositories. + ''' + RegistrationBase.enable(self) + self.update_plugin_conf('rhnplugin', False) + self.update_plugin_conf('subscription-manager', True) + + def configure(self, **kwargs): + ''' + Configure the system as directed for registration with RHN + Raises: + * Exception - if error occurs while running command + ''' + args = ['subscription-manager', 'config'] + + # Pass supplied **kwargs as parameters to subscription-manager. Ignore + # non-configuration parameters and replace '_' with '.'. For example, + # 'server_hostname' becomes '--system.hostname'. + for k,v in kwargs.items(): + if re.search(r'^(system|rhsm)_', k): + args.append('--%s=%s' % (k.replace('_','.'), v)) + + self.module.run_command(args, check_rc=True) + + @property + def is_registered(self): + ''' + Determine whether the current system + Returns: + * Boolean - whether the current system is currently registered to + RHN. + ''' + # Quick version... + if False: + return os.path.isfile('/etc/pki/consumer/cert.pem') and \ + os.path.isfile('/etc/pki/consumer/key.pem') + + args = ['subscription-manager', 'identity'] + rc, stdout, stderr = self.module.run_command(args, check_rc=False) + if rc == 0: + return True + else: + return False + + def register(self, username, password, autosubscribe, activationkey): + ''' + Register the current system to the provided RHN server + Raises: + * Exception - if error occurs while running command + ''' + args = ['subscription-manager', 'register'] + + # Generate command arguments + if activationkey: + args.append('--activationkey "%s"' % activationkey) + else: + if autosubscribe: + args.append('--autosubscribe') + if username: + args.extend(['--username', username]) + if password: + args.extend(['--password', password]) + + # Do the needful... + rc, stderr, stdout = self.module.run_command(args, check_rc=True) + + def unsubscribe(self): + ''' + Unsubscribe a system from all subscribed channels + Raises: + * Exception - if error occurs while running command + ''' + args = ['subscription-manager', 'unsubscribe', '--all'] + rc, stderr, stdout = self.module.run_command(args, check_rc=True) + + def unregister(self): + ''' + Unregister a currently registered system + Raises: + * Exception - if error occurs while running command + ''' + args = ['subscription-manager', 'unregister'] + rc, stderr, stdout = self.module.run_command(args, check_rc=True) + + def subscribe(self, regexp): + ''' + Subscribe current system to available pools matching the specified + regular expression + Raises: + * Exception - if error occurs while running command + ''' + + # Available pools ready for subscription + available_pools = RhsmPools(self.module) + + for pool in available_pools.filter(regexp): + pool.subscribe() + + +class RhsmPool(object): + ''' + Convenience class for housing subscription information + ''' + + def __init__(self, module, **kwargs): + self.module = module + for k,v in kwargs.items(): + setattr(self, k, v) + + def __str__(self): + return str(self.__getattribute__('_name')) + + def subscribe(self): + args = "subscription-manager subscribe --pool %s" % self.PoolId + rc, stdout, stderr = self.module.run_command(args, check_rc=True) + if rc == 0: + return True + else: + return False + + +class RhsmPools(object): + """ + This class is used for manipulating pools subscriptions with RHSM + """ + def __init__(self, module): + self.module = module + self.products = self._load_product_list() + + def __iter__(self): + return self.products.__iter__() + + def _load_product_list(self): + """ + Loads list of all availaible pools for system in data structure + """ + args = "subscription-manager list --available" + rc, stdout, stderr = self.module.run_command(args, check_rc=True) + + products = [] + for line in stdout.split('\n'): + # Remove leading+trailing whitespace + line = line.strip() + # An empty line implies the end of a output group + if len(line) == 0: + continue + # If a colon ':' is found, parse + elif ':' in line: + (key, value) = line.split(':',1) + key = key.strip().replace(" ", "") # To unify + value = value.strip() + if key in ['ProductName', 'SubscriptionName']: + # Remember the name for later processing + products.append(RhsmPool(self.module, _name=value, key=value)) + elif products: + # Associate value with most recently recorded product + products[-1].__setattr__(key, value) + # FIXME - log some warning? + #else: + # warnings.warn("Unhandled subscription key/value: %s/%s" % (key,value)) + return products + + def filter(self, regexp='^$'): + ''' + Return a list of RhsmPools whose name matches the provided regular expression + ''' + r = re.compile(regexp) + for product in self.products: + if r.search(product._name): + yield product + diff --git a/lib/ansible/runner/lookup_plugins/pipe.py b/lib/ansible/runner/lookup_plugins/pipe.py index 4205b887ff..62ec7e129e 100644 --- a/lib/ansible/runner/lookup_plugins/pipe.py +++ b/lib/ansible/runner/lookup_plugins/pipe.py @@ -32,7 +32,7 @@ class LookupModule(object): ret = [] for term in terms: - p = subprocess.Popen(term, cwd=self.basedir, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE) + p = subprocess.Popen(term, cwd=self.basedir, shell=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE) (stdout, stderr) = p.communicate() if p.returncode == 0: ret.append(stdout.decode("utf-8").rstrip()) diff --git a/library/cloud/virt b/library/cloud/virt index 42e99209b1..8cbf7fc895 100644 --- a/library/cloud/virt +++ b/library/cloud/virt @@ -113,13 +113,14 @@ class VMNotFound(Exception): class LibvirtConnection(object): - def __init__(self, uri): + def __init__(self, uri, module): - cmd = subprocess.Popen("uname -r", shell=True, stdout=subprocess.PIPE, - close_fds=True) - output = cmd.communicate()[0] + self.module = module - if output.find("xen") != -1: + cmd = "uname -r" + rc, stdout, stderr = self.module.run_command(cmd) + + if stdout.find("xen") != -1: conn = libvirt.open(None) else: conn = libvirt.open(uri) @@ -221,11 +222,12 @@ class LibvirtConnection(object): class Virt(object): - def __init__(self, uri): + def __init__(self, uri, module): + self.module = module self.uri = uri def __get_conn(self): - self.conn = LibvirtConnection(self.uri) + self.conn = LibvirtConnection(self.uri, self.module) return self.conn def get_vm(self, vmid): @@ -399,7 +401,7 @@ def core(module): uri = module.params.get('uri', None) xml = module.params.get('xml', None) - v = Virt(uri) + v = Virt(uri, module) res = {} if state and command=='list_vms': diff --git a/library/commands/command b/library/commands/command index 76d2f828d0..ba9ae30a7f 100644 --- a/library/commands/command +++ b/library/commands/command @@ -136,7 +136,7 @@ def main(): args = shlex.split(args) startd = datetime.datetime.now() - rc, out, err = module.run_command(args, executable=executable) + rc, out, err = module.run_command(args, executable=executable, use_unsafe_shell=shell) endd = datetime.datetime.now() delta = endd - startd diff --git a/library/files/synchronize b/library/files/synchronize index 493322393b..eb556c30f5 100644 --- a/library/files/synchronize +++ b/library/files/synchronize @@ -16,8 +16,6 @@ # You should have received a copy of the GNU General Public License # along with Ansible. If not, see . -import subprocess - DOCUMENTATION = ''' --- module: synchronize @@ -272,6 +270,13 @@ def main(): cmd = cmd + " --rsync-path '%s'" %(rsync_path) changed_marker = '<>' cmd = cmd + " --out-format='" + changed_marker + "%i %n%L'" + + # expand the paths + if '@' not in source: + source = os.path.expanduser(source) + if '@' not in dest: + dest = os.path.expanduser(dest) + cmd = ' '.join([cmd, source, dest]) cmdstr = cmd (rc, out, err) = module.run_command(cmd) diff --git a/library/notification/osx_say b/library/notification/osx_say index de5d1917c5..39e3da88c1 100644 --- a/library/notification/osx_say +++ b/library/notification/osx_say @@ -44,8 +44,6 @@ EXAMPLES = ''' - local_action: osx_say msg="{{inventory_hostname}} is all done" voice=Zarvox ''' -import subprocess - DEFAULT_VOICE='Trinoids' def say(module, msg, voice): diff --git a/library/packaging/easy_install b/library/packaging/easy_install index bdacf8e464..889a81f025 100644 --- a/library/packaging/easy_install +++ b/library/packaging/easy_install @@ -151,8 +151,8 @@ def main(): command = '%s %s' % (virtualenv, env) if site_packages: command += ' --system-site-packages' - os.chdir(tempfile.gettempdir()) - rc_venv, out_venv, err_venv = module.run_command(command) + cwd = tempfile.gettempdir() + rc_venv, out_venv, err_venv = module.run_command(command, cwd=cwd) rc += rc_venv out += out_venv diff --git a/library/packaging/npm b/library/packaging/npm index 62179c373a..c623b6f7e6 100644 --- a/library/packaging/npm +++ b/library/packaging/npm @@ -125,10 +125,11 @@ class Npm(object): cmd.append(self.name_version) #If path is specified, cd into that path and run the command. + cwd = None if self.path: - os.chdir(self.path) + cwd = self.path - rc, out, err = self.module.run_command(cmd, check_rc=check_rc) + rc, out, err = self.module.run_command(cmd, check_rc=check_rc, cwd=cwd) return out return '' diff --git a/library/packaging/pacman b/library/packaging/pacman index 3080cb4a60..a4a24ca5fd 100644 --- a/library/packaging/pacman +++ b/library/packaging/pacman @@ -90,7 +90,8 @@ def query_package(module, name, state="installed"): # pacman -Q returns 0 if the package is installed, # 1 if it is not installed if state == "installed": - rc = os.system("pacman -Q %s" % (name)) + cmd = "pacman -Q %s" % (name) + rc, stdout, stderr = module.run_command(cmd, check_rc=False) if rc == 0: return True @@ -99,7 +100,8 @@ def query_package(module, name, state="installed"): def update_package_db(module): - rc = os.system("pacman -Syy > /dev/null") + cmd = "pacman -Syy > /dev/null" + rc, stdout, stderr = module.run_command(cmd, check_rc=False) if rc != 0: module.fail_json(msg="could not update package db") @@ -118,7 +120,8 @@ def remove_packages(module, packages): if not query_package(module, package): continue - rc = os.system("pacman -%s %s --noconfirm > /dev/null" % (args, package)) + cmd = "pacman -%s %s --noconfirm > /dev/null" % (args, package) + rc, stdout, stderr = module.run_command(cmd, check_rc=False) if rc != 0: module.fail_json(msg="failed to remove %s" % (package)) @@ -145,7 +148,8 @@ def install_packages(module, packages, package_files): else: params = '-S %s' % package - rc = os.system("pacman %s --noconfirm > /dev/null" % (params)) + cmd = "pacman %s --noconfirm > /dev/null" % (params) + rc, stdout, stderr = module.run_command(cmd, check_rc=False) if rc != 0: module.fail_json(msg="failed to install %s" % (package)) diff --git a/library/packaging/pip b/library/packaging/pip index 35487c3296..aa55bf8ba0 100644 --- a/library/packaging/pip +++ b/library/packaging/pip @@ -253,10 +253,10 @@ def main(): cmd = '%s --no-site-packages %s' % (virtualenv, env) else: cmd = '%s %s' % (virtualenv, env) - os.chdir(tempfile.gettempdir()) + this_dir = tempfile.gettempdir() if chdir: - os.chdir(chdir) - rc, out_venv, err_venv = module.run_command(cmd) + this_dir = os.path.join(this_dir, chdir) + rc, out_venv, err_venv = module.run_command(cmd, cwd=this_dir) out += out_venv err += err_venv if rc != 0: @@ -298,10 +298,11 @@ def main(): if module.check_mode: module.exit_json(changed=True) - os.chdir(tempfile.gettempdir()) + this_dir = tempfile.gettempdir() if chdir: - os.chdir(chdir) - rc, out_pip, err_pip = module.run_command(cmd, path_prefix=path_prefix) + this_dir = os.path.join(this_dir, chdir) + + rc, out_pip, err_pip = module.run_command(cmd, path_prefix=path_prefix, cwd=this_dir) out += out_pip err += err_pip if rc == 1 and state == 'absent' and 'not installed' in out_pip: diff --git a/library/packaging/redhat_subscription b/library/packaging/redhat_subscription index e363aa0946..bb5d655a52 100644 --- a/library/packaging/redhat_subscription +++ b/library/packaging/redhat_subscription @@ -75,39 +75,13 @@ EXAMPLES = ''' import os import re import types -import subprocess import ConfigParser import shlex -class CommandException(Exception): - pass - - -def run_command(args): - ''' - Convenience method to run a command, specified as a list of arguments. - Returns: - * tuple - (stdout, stder, retcode) - ''' - - # Coerce into a string - if isinstance(args, str): - args = shlex.split(args) - - # Run desired command - proc = subprocess.Popen(args, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT) - (stdout, stderr) = proc.communicate() - returncode = proc.poll() - if returncode != 0: - cmd = ' '.join(args) - raise CommandException("Command failed (%s): %s\n%s" % (returncode, cmd, stdout)) - return (stdout, stderr, returncode) - - -class RegistrationBase (object): - def __init__(self, username=None, password=None): +class RegistrationBase(object): + def __init__(self, module, username=None, password=None): + self.module = module self.username = username self.password = password @@ -147,9 +121,10 @@ class RegistrationBase (object): class Rhsm(RegistrationBase): - def __init__(self, username=None, password=None): - RegistrationBase.__init__(self, username, password) + def __init__(self, module, username=None, password=None): + RegistrationBase.__init__(self, module, username, password) self.config = self._read_config() + self.module = module def _read_config(self, rhsm_conf='/etc/rhsm/rhsm.conf'): ''' @@ -199,8 +174,8 @@ class Rhsm(RegistrationBase): for k,v in kwargs.items(): if re.search(r'^(system|rhsm)_', k): args.append('--%s=%s' % (k.replace('_','.'), v)) - - run_command(args) + + self.module.run_command(args, check_rc=True) @property def is_registered(self): @@ -216,13 +191,11 @@ class Rhsm(RegistrationBase): os.path.isfile('/etc/pki/consumer/key.pem') args = ['subscription-manager', 'identity'] - try: - (stdout, stderr, retcode) = run_command(args) - except CommandException, e: - return False - else: - # Display some debug output + rc, stdout, stderr = self.module.run_command(args, check_rc=False) + if rc == 0: return True + else: + return False def register(self, username, password, autosubscribe, activationkey): ''' @@ -244,7 +217,7 @@ class Rhsm(RegistrationBase): args.extend(['--password', password]) # Do the needful... - run_command(args) + rc, stderr, stdout = self.module.run_command(args, check_rc=True) def unsubscribe(self): ''' @@ -253,7 +226,7 @@ class Rhsm(RegistrationBase): * Exception - if error occurs while running command ''' args = ['subscription-manager', 'unsubscribe', '--all'] - run_command(args) + rc, stderr, stdout = self.module.run_command(args, check_rc=True) def unregister(self): ''' @@ -262,7 +235,7 @@ class Rhsm(RegistrationBase): * Exception - if error occurs while running command ''' args = ['subscription-manager', 'unregister'] - run_command(args) + rc, stderr, stdout = self.module.run_command(args, check_rc=True) def subscribe(self, regexp): ''' @@ -273,7 +246,7 @@ class Rhsm(RegistrationBase): ''' # Available pools ready for subscription - available_pools = RhsmPools() + available_pools = RhsmPools(self.module) for pool in available_pools.filter(regexp): pool.subscribe() @@ -284,7 +257,8 @@ class RhsmPool(object): Convenience class for housing subscription information ''' - def __init__(self, **kwargs): + def __init__(self, module, **kwargs): + self.module = module for k,v in kwargs.items(): setattr(self, k, v) @@ -292,15 +266,20 @@ class RhsmPool(object): return str(self.__getattribute__('_name')) def subscribe(self): - (stdout, stderr, retcode) = run_command("subscription-manager subscribe --pool %s" % self.PoolId) - return True + args = "subscription-manager subscribe --pool %s" % self.PoolId + rc, stdout, stderr = self.module.run_command(args, check_rc=True) + if rc == 0: + return True + else: + return False class RhsmPools(object): """ This class is used for manipulating pools subscriptions with RHSM """ - def __init__(self): + def __init__(self, module): + self.module = module self.products = self._load_product_list() def __iter__(self): @@ -310,7 +289,8 @@ class RhsmPools(object): """ Loads list of all availaible pools for system in data structure """ - (stdout, stderr, retval) = run_command("subscription-manager list --available") + args = "subscription-manager list --available" + rc, stdout, stderr = self.module.run_command(args, check_rc=True) products = [] for line in stdout.split('\n'): @@ -326,7 +306,7 @@ class RhsmPools(object): value = value.strip() if key in ['ProductName', 'SubscriptionName']: # Remember the name for later processing - products.append(RhsmPool(_name=value, key=value)) + products.append(RhsmPool(self.module, _name=value, key=value)) elif products: # Associate value with most recently recorded product products[-1].__setattr__(key, value) @@ -348,7 +328,7 @@ class RhsmPools(object): def main(): # Load RHSM configuration from file - rhn = Rhsm() + rhn = Rhsm(AnsibleModule()) module = AnsibleModule( argument_spec = dict( @@ -364,6 +344,7 @@ def main(): ) ) + rhn.module = module state = module.params['state'] username = module.params['username'] password = module.params['password'] diff --git a/library/packaging/rhn_register b/library/packaging/rhn_register index 5e8c3718f9..28d91a6a02 100644 --- a/library/packaging/rhn_register +++ b/library/packaging/rhn_register @@ -72,12 +72,7 @@ EXAMPLES = ''' ''' import sys -import os -import re import types -import subprocess -import ConfigParser -import shlex import xmlrpclib import urlparse @@ -90,75 +85,9 @@ except ImportError, e: module.fail_json(msg="Unable to import up2date_client. Is 'rhn-client-tools' installed?\n%s" % e) -class CommandException(Exception): - pass - - -def run_command(args): - ''' - Convenience method to run a command, specified as a list of arguments. - Returns: - * tuple - (stdout, stder, retcode) - ''' - - # Coerce into a string - if isinstance(args, str): - args = shlex.split(args) - - # Run desired command - proc = subprocess.Popen(args, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT) - (stdout, stderr) = proc.communicate() - returncode = proc.poll() - if returncode != 0: - cmd = ' '.join(args) - raise CommandException("Command failed (%s): %s\n%s" % (returncode, cmd, stdout)) - return (stdout, stderr, returncode) - - -class RegistrationBase (object): - def __init__(self, username=None, password=None): - self.username = username - self.password = password - - def configure(self): - raise NotImplementedError("Must be implemented by a sub-class") - - def enable(self): - # Remove any existing redhat.repo - redhat_repo = '/etc/yum.repos.d/redhat.repo' - if os.path.isfile(redhat_repo): - os.unlink(redhat_repo) - - def register(self): - raise NotImplementedError("Must be implemented by a sub-class") - - def unregister(self): - raise NotImplementedError("Must be implemented by a sub-class") - - def unsubscribe(self): - raise NotImplementedError("Must be implemented by a sub-class") - - def update_plugin_conf(self, plugin, enabled=True): - plugin_conf = '/etc/yum/pluginconf.d/%s.conf' % plugin - if os.path.isfile(plugin_conf): - cfg = ConfigParser.ConfigParser() - cfg.read([plugin_conf]) - if enabled: - cfg.set('main', 'enabled', 1) - else: - cfg.set('main', 'enabled', 0) - fd = open(plugin_conf, 'rwa+') - cfg.write(fd) - fd.close() - - def subscribe(self, **kwargs): - raise NotImplementedError("Must be implemented by a sub-class") - - class Rhn(RegistrationBase): - def __init__(self, username=None, password=None): + def __init__(self, module, username=None, password=None): RegistrationBase.__init__(self, username, password) self.config = self.load_config() @@ -271,7 +200,7 @@ class Rhn(RegistrationBase): register_cmd += " --activationkey '%s'" % activationkey # FIXME - support --profilename # FIXME - support --systemorgid - run_command(register_cmd) + rc, stdout, stderr = self.module.run_command(register_command, check_rc=True) def api(self, method, *args): ''' @@ -309,14 +238,14 @@ class Rhn(RegistrationBase): Subscribe to requested yum repositories using 'rhn-channel' command ''' rhn_channel_cmd = "rhn-channel --user='%s' --password='%s'" % (self.username, self.password) - (stdout, stderr, rc) = run_command(rhn_channel_cmd + " --available-channels") + rc, stdout, stderr = self.module.run_command(rhn_channel_cmd + " --available-channels", check_rc=True) # Enable requested repoid's for wanted_channel in channels: # Each inserted repo regexp will be matched. If no match, no success. for availaible_channel in stdout.rstrip().split('\n'): # .rstrip() because of \n at the end -> empty string at the end if re.search(wanted_repo, available_channel): - run_command(rhn_channel_cmd + " --add --channel=%s" % available_channel) + rc, stdout, stderr = self.module.run_command(rhn_channel_cmd + " --add --channel=%s" % available_channel, check_rc=True) def main(): @@ -379,4 +308,6 @@ def main(): # import module snippets from ansible.module_utils.basic import * +from ansible.module_utils.redhat import * + main() diff --git a/library/packaging/urpmi b/library/packaging/urpmi index b001ed94de..72dfef0201 100644 --- a/library/packaging/urpmi +++ b/library/packaging/urpmi @@ -91,7 +91,8 @@ def query_package(module, name): # rpm -q returns 0 if the package is installed, # 1 if it is not installed - rc = os.system("rpm -q %s" % (name)) + cmd = "rpm -q %s" % (name) + rc, stdout, stderr = module.run_command(cmd, check_rc=False) if rc == 0: return True else: @@ -103,13 +104,14 @@ def query_package_provides(module, name): # rpm -q returns 0 if the package is installed, # 1 if it is not installed - rc = os.system("rpm -q --provides %s >/dev/null" % (name)) + cmd = "rpm -q --provides %s >/dev/null" % (name) + rc, stdout, stderr = module.run_command(cmd, check_rc=False) return rc == 0 def update_package_db(module): - rc = os.system("urpmi.update -a -q") - + cmd = "urpmi.update -a -q" + rc, stdout, stderr = module.run_command(cmd, check_rc=False) if rc != 0: module.fail_json(msg="could not update package db") @@ -123,7 +125,8 @@ def remove_packages(module, packages): if not query_package(module, package): continue - rc = os.system("%s --auto %s > /dev/null" % (URPME_PATH, package)) + cmd = "%s --auto %s > /dev/null" % (URPME_PATH, package) + rc, stdout, stderr = module.run_command(cmd, check_rc=False) if rc != 0: module.fail_json(msg="failed to remove %s" % (package)) diff --git a/library/source_control/bzr b/library/source_control/bzr index bc2dfc3089..5217e46990 100644 --- a/library/source_control/bzr +++ b/library/source_control/bzr @@ -75,16 +75,17 @@ class Bzr(object): self.version = version self.bzr_path = bzr_path - def _command(self, args_list, **kwargs): + def _command(self, args_list, cwd=None, **kwargs): (rc, out, err) = self.module.run_command( - [self.bzr_path] + args_list, **kwargs) + [self.bzr_path] + args_list, cwd=cwd, **kwargs) return (rc, out, err) def get_version(self): '''samples the version of the bzr branch''' - os.chdir(self.dest) + cmd = "%s revno" % self.bzr_path - revno = os.popen(cmd).read().strip() + rc, stdout, stderr = self.module.run_command(cmd, cwd=self.dest) + revno = stdout.strip() return revno def clone(self): @@ -94,17 +95,18 @@ class Bzr(object): os.makedirs(dest_dirname) except: pass - os.chdir(dest_dirname) if self.version.lower() != 'head': args_list = ["branch", "-r", self.version, self.parent, self.dest] else: args_list = ["branch", self.parent, self.dest] - return self._command(args_list, check_rc=True) + return self._command(args_list, check_rc=True, cwd=dest_dirname) def has_local_mods(self): - os.chdir(self.dest) + cmd = "%s status -S" % self.bzr_path - lines = os.popen(cmd).read().splitlines() + rc, stdout, stderr = self.module.run_command(cmd, cwd=self.dest) + lines = stdout.splitlines() + lines = filter(lambda c: not re.search('^\\?\\?.*$', c), lines) return len(lines) > 0 @@ -114,30 +116,27 @@ class Bzr(object): Discards any changes to tracked files in the working tree since that commit. ''' - os.chdir(self.dest) if not force and self.has_local_mods(): self.module.fail_json(msg="Local modifications exist in branch (force=no).") - return self._command(["revert"], check_rc=True) + return self._command(["revert"], check_rc=True, cwd=self.dest) def fetch(self): '''updates branch from remote sources''' - os.chdir(self.dest) if self.version.lower() != 'head': - (rc, out, err) = self._command(["pull", "-r", self.version]) + (rc, out, err) = self._command(["pull", "-r", self.version], cwd=self.dest) else: - (rc, out, err) = self._command(["pull"]) + (rc, out, err) = self._command(["pull"], cwd=self.dest) if rc != 0: self.module.fail_json(msg="Failed to pull") return (rc, out, err) def switch_version(self): '''once pulled, switch to a particular revno or revid''' - os.chdir(self.dest) if self.version.lower() != 'head': args_list = ["revert", "-r", self.version] else: args_list = ["revert"] - return self._command(args_list, check_rc=True) + return self._command(args_list, check_rc=True, cwd=self.dest) # =========================================== diff --git a/library/source_control/git b/library/source_control/git index ca876c666b..4f885c9400 100644 --- a/library/source_control/git +++ b/library/source_control/git @@ -181,11 +181,12 @@ def set_git_ssh(ssh_wrapper, key_file, ssh_opts): if ssh_opts: os.environ["GIT_SSH_OPTS"] = ssh_opts -def get_version(git_path, dest, ref="HEAD"): +def get_version(module, git_path, dest, ref="HEAD"): ''' samples the version of the git repo ''' - os.chdir(dest) + cmd = "%s rev-parse %s" % (git_path, ref) - sha = os.popen(cmd).read().rstrip("\n") + rc, stdout, stderr = module.run_command(cmd, cwd=dest) + sha = stdout.rstrip('\n') return sha def clone(git_path, module, repo, dest, remote, depth, version, bare, reference): @@ -195,7 +196,6 @@ def clone(git_path, module, repo, dest, remote, depth, version, bare, reference) os.makedirs(dest_dirname) except: pass - os.chdir(dest_dirname) cmd = [ git_path, 'clone' ] if bare: cmd.append('--bare') @@ -209,19 +209,19 @@ def clone(git_path, module, repo, dest, remote, depth, version, bare, reference) if reference: cmd.extend([ '--reference', str(reference) ]) cmd.extend([ repo, dest ]) - module.run_command(cmd, check_rc=True) + module.run_command(cmd, check_rc=True, cwd=dest_dirname) if bare: - os.chdir(dest) if remote != 'origin': - module.run_command([git_path, 'remote', 'add', remote, repo], check_rc=True) + module.run_command([git_path, 'remote', 'add', remote, repo], check_rc=True, cwd=dest) -def has_local_mods(git_path, dest, bare): +def has_local_mods(module, git_path, dest, bare): if bare: return False - os.chdir(dest) - cmd = "%s status -s" % (git_path,) - lines = os.popen(cmd).read().splitlines() - lines = filter(lambda c: not re.search('^\\?\\?.*$', c), lines) + + cmd = "%s status -s" % (git_path) + rc, stdout, stderr = module.run_command(cmd, cwd=dest) + lines = stdout.splitlines() + return len(lines) > 0 def reset(git_path, module, dest): @@ -230,16 +230,16 @@ def reset(git_path, module, dest): Discards any changes to tracked files in working tree since that commit. ''' - os.chdir(dest) cmd = "%s reset --hard HEAD" % (git_path,) - return module.run_command(cmd, check_rc=True) + return module.run_command(cmd, check_rc=True, cwd=dest) def get_remote_head(git_path, module, dest, version, remote, bare): cloning = False + cwd = None if remote == module.params['repo']: cloning = True else: - os.chdir(dest) + cwd = dest if version == 'HEAD': if cloning: # cloning the repo, just get the remote's HEAD version @@ -255,7 +255,7 @@ def get_remote_head(git_path, module, dest, version, remote, bare): # appears to be a sha1. return as-is since it appears # cannot check for a specific sha1 on remote return version - (rc, out, err) = module.run_command(cmd, check_rc=True ) + (rc, out, err) = module.run_command(cmd, check_rc=True, cwd=cwd) if len(out) < 1: module.fail_json(msg="Could not determine remote revision for %s" % version) rev = out.split()[0] @@ -270,10 +270,9 @@ def is_remote_tag(git_path, module, dest, remote, version): return False def get_branches(git_path, module, dest): - os.chdir(dest) branches = [] cmd = '%s branch -a' % (git_path,) - (rc, out, err) = module.run_command(cmd) + (rc, out, err) = module.run_command(cmd, cwd=dest) if rc != 0: module.fail_json(msg="Could not determine branch data - received %s" % out) for line in out.split('\n'): @@ -281,10 +280,9 @@ def get_branches(git_path, module, dest): return branches def get_tags(git_path, module, dest): - os.chdir(dest) tags = [] cmd = '%s tag' % (git_path,) - (rc, out, err) = module.run_command(cmd) + (rc, out, err) = module.run_command(cmd, cwd=dest) if rc != 0: module.fail_json(msg="Could not determine tag data - received %s" % out) for line in out.split('\n'): @@ -352,18 +350,17 @@ def get_head_branch(git_path, module, dest, remote, bare=False): def fetch(git_path, module, repo, dest, version, remote, bare): ''' updates repo from remote sources ''' - os.chdir(dest) if bare: - (rc, out1, err1) = module.run_command([git_path, 'fetch', remote, '+refs/heads/*:refs/heads/*']) + (rc, out1, err1) = module.run_command([git_path, 'fetch', remote, '+refs/heads/*:refs/heads/*'], cwd=dest) else: - (rc, out1, err1) = module.run_command("%s fetch %s" % (git_path, remote)) + (rc, out1, err1) = module.run_command("%s fetch %s" % (git_path, remote), cwd=dest) if rc != 0: module.fail_json(msg="Failed to download remote objects and refs") if bare: - (rc, out2, err2) = module.run_command([git_path, 'fetch', remote, '+refs/tags/*:refs/tags/*']) + (rc, out2, err2) = module.run_command([git_path, 'fetch', remote, '+refs/tags/*:refs/tags/*'], cwd=dest) else: - (rc, out2, err2) = module.run_command("%s fetch --tags %s" % (git_path, remote)) + (rc, out2, err2) = module.run_command("%s fetch --tags %s" % (git_path, remote), cwd=dest) if rc != 0: module.fail_json(msg="Failed to download remote objects and refs") (rc, out3, err3) = submodule_update(git_path, module, dest) @@ -371,28 +368,26 @@ def fetch(git_path, module, repo, dest, version, remote, bare): def submodule_update(git_path, module, dest): ''' init and update any submodules ''' - os.chdir(dest) # skip submodule commands if .gitmodules is not present if not os.path.exists(os.path.join(dest, '.gitmodules')): return (0, '', '') cmd = [ git_path, 'submodule', 'sync' ] - (rc, out, err) = module.run_command(cmd, check_rc=True) + (rc, out, err) = module.run_command(cmd, check_rc=True, cwd=dest) cmd = [ git_path, 'submodule', 'update', '--init', '--recursive' ] - (rc, out, err) = module.run_command(cmd) + (rc, out, err) = module.run_command(cmd, cwd=dest) if rc != 0: module.fail_json(msg="Failed to init/update submodules") return (rc, out, err) def switch_version(git_path, module, dest, remote, version): ''' once pulled, switch to a particular SHA, tag, or branch ''' - os.chdir(dest) cmd = '' if version != 'HEAD': if is_remote_branch(git_path, module, dest, remote, version): if not is_local_branch(git_path, module, dest, version): cmd = "%s checkout --track -b %s %s/%s" % (git_path, version, remote, version) else: - (rc, out, err) = module.run_command("%s checkout --force %s" % (git_path, version)) + (rc, out, err) = module.run_command("%s checkout --force %s" % (git_path, version), cwd=dest) if rc != 0: module.fail_json(msg="Failed to checkout branch %s" % version) cmd = "%s reset --hard %s/%s" % (git_path, remote, version) @@ -400,11 +395,11 @@ def switch_version(git_path, module, dest, remote, version): cmd = "%s checkout --force %s" % (git_path, version) else: branch = get_head_branch(git_path, module, dest, remote) - (rc, out, err) = module.run_command("%s checkout --force %s" % (git_path, branch)) + (rc, out, err) = module.run_command("%s checkout --force %s" % (git_path, branch), cwd=dest) if rc != 0: module.fail_json(msg="Failed to checkout branch %s" % branch) cmd = "%s reset --hard %s" % (git_path, remote) - (rc, out1, err1) = module.run_command(cmd) + (rc, out1, err1) = module.run_command(cmd, cwd=dest) if rc != 0: if version != 'HEAD': module.fail_json(msg="Failed to checkout %s" % (version)) @@ -484,12 +479,12 @@ def main(): # Just return having found a repo already in the dest path # this does no checking that the repo is the actual repo # requested. - before = get_version(git_path, dest) + before = get_version(module, git_path, dest) module.exit_json(changed=False, before=before, after=before) else: # else do a pull - local_mods = has_local_mods(git_path, dest, bare) - before = get_version(git_path, dest) + local_mods = has_local_mods(module, git_path, dest, bare) + before = get_version(module, git_path, dest) if local_mods: # failure should happen regardless of check mode if not force: @@ -519,7 +514,7 @@ def main(): switch_version(git_path, module, dest, remote, version) # determine if we changed anything - after = get_version(git_path, dest) + after = get_version(module, git_path, dest) changed = False if before != after or local_mods: diff --git a/library/system/service b/library/system/service index 2e26a47b63..5180a14d82 100644 --- a/library/system/service +++ b/library/system/service @@ -207,7 +207,9 @@ class Service(object): os._exit(0) # Start the command - p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, preexec_fn=lambda: os.close(pipe[1])) + if isinstance(cmd, basestring): + cmd = shlex.split(cmd) + p = subprocess.Popen(cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, preexec_fn=lambda: os.close(pipe[1])) stdout = "" stderr = "" fds = [p.stdout, p.stderr] diff --git a/library/system/setup b/library/system/setup index 941a5dcd31..1c156f6ce3 100644 --- a/library/system/setup +++ b/library/system/setup @@ -29,7 +29,6 @@ import socket import struct import datetime import getpass -import subprocess import ConfigParser import StringIO @@ -1430,7 +1429,8 @@ class LinuxNetwork(Network): """ platform = 'Linux' - def __init__(self): + def __init__(self, module): + self.module = module Network.__init__(self) def populate(self): @@ -1616,12 +1616,15 @@ class LinuxNetwork(Network): ips['all_ipv6_addresses'].append(address) ip_path = module.get_bin_path("ip") - primary_data = subprocess.Popen( - [ip_path, 'addr', 'show', 'primary', device], - stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0] - secondary_data = subprocess.Popen( - [ip_path, 'addr', 'show', 'secondary', device], - stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0] + + args = [ip_path, 'addr', 'show', 'primary', device] + rc, stdout, stderr = self.module.run_command(args) + primary_data = stdout + + args = [ip_path, 'addr', 'show', 'secondary', device] + rc, stdout, stderr = self.module.run_command(args) + secondary_data = stdout + parse_ip_output(primary_data) parse_ip_output(secondary_data, secondary=True) @@ -2281,11 +2284,11 @@ def get_file_content(path, default=None): data = default return data -def ansible_facts(): +def ansible_facts(module): facts = {} facts.update(Facts().populate()) facts.update(Hardware().populate()) - facts.update(Network().populate()) + facts.update(Network(module).populate()) facts.update(Virtual().populate()) return facts @@ -2294,7 +2297,7 @@ def ansible_facts(): def run_setup(module): setup_options = {} - facts = ansible_facts() + facts = ansible_facts(module) for (k, v) in facts.items(): setup_options["ansible_%s" % k.replace('-', '_')] = v diff --git a/library/web_infrastructure/django_manage b/library/web_infrastructure/django_manage index 68eb92c1bf..b02a9398f5 100644 --- a/library/web_infrastructure/django_manage +++ b/library/web_infrastructure/django_manage @@ -232,7 +232,6 @@ def main(): _ensure_virtualenv(module) - os.chdir(app_path) cmd = "python manage.py %s" % (command, ) if command in noinput_commands: @@ -251,7 +250,7 @@ def main(): if module.params[param]: cmd = '%s %s' % (cmd, module.params[param]) - rc, out, err = module.run_command(cmd) + rc, out, err = module.run_command(cmd, cwd=app_path) if rc != 0: if command == 'createcachetable' and 'table' in err and 'already exists' in err: out = 'Already exists.' From 2d478b16279fa2d6eda1c8ebd5a1881b4172f69c Mon Sep 17 00:00:00 2001 From: James Tanner Date: Mon, 10 Mar 2014 16:15:44 -0500 Subject: [PATCH 27/29] Implement new default cipher class AES256 --- bin/ansible-vault | 8 +- lib/ansible/utils/vault.py | 247 +++++++++++++++--- test/units/TestVault.py | 56 ++-- test/units/TestVaultEditor.py | 141 ++++++++++ .../units/vault_test_data/foo-ansible-1.0.yml | 4 + .../units/vault_test_data/foo-ansible-1.1.yml | 6 + 6 files changed, 410 insertions(+), 52 deletions(-) create mode 100644 test/units/TestVaultEditor.py create mode 100644 test/units/vault_test_data/foo-ansible-1.0.yml create mode 100644 test/units/vault_test_data/foo-ansible-1.1.yml diff --git a/bin/ansible-vault b/bin/ansible-vault index 902653d40b..2c8094d13b 100755 --- a/bin/ansible-vault +++ b/bin/ansible-vault @@ -52,7 +52,7 @@ def build_option_parser(action): sys.exit() # options for all actions - #parser.add_option('-c', '--cipher', dest='cipher', default="AES", help="cipher to use") + #parser.add_option('-c', '--cipher', dest='cipher', default="AES256", help="cipher to use") parser.add_option('--debug', dest='debug', action="store_true", help="debug") parser.add_option('--vault-password-file', dest='password_file', help="vault password file") @@ -119,7 +119,7 @@ def execute_create(args, options, parser): else: password = _read_password(options.password_file) - cipher = 'AES' + cipher = 'AES256' if hasattr(options, 'cipher'): cipher = options.cipher @@ -133,7 +133,7 @@ def execute_decrypt(args, options, parser): else: password = _read_password(options.password_file) - cipher = 'AES' + cipher = 'AES256' if hasattr(options, 'cipher'): cipher = options.cipher @@ -169,7 +169,7 @@ def execute_encrypt(args, options, parser): else: password = _read_password(options.password_file) - cipher = 'AES' + cipher = 'AES256' if hasattr(options, 'cipher'): cipher = options.cipher diff --git a/lib/ansible/utils/vault.py b/lib/ansible/utils/vault.py index 9a43fee1b9..169dc8333b 100644 --- a/lib/ansible/utils/vault.py +++ b/lib/ansible/utils/vault.py @@ -30,6 +30,22 @@ from binascii import hexlify from binascii import unhexlify from ansible import constants as C +from Crypto.Hash import SHA256, HMAC + +# Counter import fails for 2.0.1, requires >= 2.6.1 from pip +try: + from Crypto.Util import Counter + HAS_COUNTER = True +except ImportError: + HAS_COUNTER = False + +# KDF import fails for 2.0.1, requires >= 2.6.1 from pip +try: + from Crypto.Protocol.KDF import PBKDF2 + HAS_PBKDF2 = True +except ImportError: + HAS_PBKDF2 = False + # AES IMPORTS try: from Crypto.Cipher import AES as AES @@ -37,15 +53,17 @@ try: except ImportError: HAS_AES = False +CRYPTO_UPGRADE = "ansible-vault requires a newer version of pycrypto than the one installed on your platform. You may fix this with OS-specific commands such as: rpm -e --nodeps python-crypto; pip install pycrypto" + HEADER='$ANSIBLE_VAULT' -CIPHER_WHITELIST=['AES'] +CIPHER_WHITELIST=['AES', 'AES256'] class VaultLib(object): def __init__(self, password): self.password = password self.cipher_name = None - self.version = '1.0' + self.version = '1.1' def is_encrypted(self, data): if data.startswith(HEADER): @@ -59,7 +77,8 @@ class VaultLib(object): raise errors.AnsibleError("data is already encrypted") if not self.cipher_name: - raise errors.AnsibleError("the cipher must be set before encrypting data") + self.cipher_name = "AES256" + #raise errors.AnsibleError("the cipher must be set before encrypting data") if 'Vault' + self.cipher_name in globals() and self.cipher_name in CIPHER_WHITELIST: cipher = globals()['Vault' + self.cipher_name] @@ -67,13 +86,17 @@ class VaultLib(object): else: raise errors.AnsibleError("%s cipher could not be found" % self.cipher_name) + """ # combine sha + data this_sha = sha256(data).hexdigest() tmp_data = this_sha + "\n" + data + """ + # encrypt sha + data - tmp_data = this_cipher.encrypt(tmp_data, self.password) + enc_data = this_cipher.encrypt(data, self.password) + # add header - tmp_data = self._add_headers_and_hexify_encrypted_data(tmp_data) + tmp_data = self._add_header(enc_data) return tmp_data def decrypt(self, data): @@ -83,8 +106,9 @@ class VaultLib(object): if not self.is_encrypted(data): raise errors.AnsibleError("data is not encrypted") - # clean out header, hex and sha - data = self._split_headers_and_get_unhexified_data(data) + # clean out header + data = self._split_header(data) + # create the cipher object if 'Vault' + self.cipher_name in globals() and self.cipher_name in CIPHER_WHITELIST: @@ -96,33 +120,26 @@ class VaultLib(object): # try to unencrypt data data = this_cipher.decrypt(data, self.password) - # split out sha and verify decryption - split_data = data.split("\n") - this_sha = split_data[0] - this_data = '\n'.join(split_data[1:]) - test_sha = sha256(this_data).hexdigest() - if this_sha != test_sha: - raise errors.AnsibleError("Decryption failed") + return data - return this_data + def _add_header(self, data): + # combine header and encrypted data in 80 char columns - def _add_headers_and_hexify_encrypted_data(self, data): - # combine header and hexlified encrypted data in 80 char columns - - tmpdata = hexlify(data) - tmpdata = [tmpdata[i:i+80] for i in range(0, len(tmpdata), 80)] + #tmpdata = hexlify(data) + tmpdata = [data[i:i+80] for i in range(0, len(data), 80)] if not self.cipher_name: raise errors.AnsibleError("the cipher must be set before adding a header") dirty_data = HEADER + ";" + str(self.version) + ";" + self.cipher_name + "\n" + for l in tmpdata: dirty_data += l + '\n' return dirty_data - def _split_headers_and_get_unhexified_data(self, data): + def _split_header(self, data): # used by decrypt tmpdata = data.split('\n') @@ -130,14 +147,22 @@ class VaultLib(object): self.version = str(tmpheader[1].strip()) self.cipher_name = str(tmpheader[2].strip()) - clean_data = ''.join(tmpdata[1:]) + clean_data = '\n'.join(tmpdata[1:]) + """ # strip out newline, join, unhex clean_data = [ x.strip() for x in clean_data ] clean_data = unhexlify(''.join(clean_data)) + """ return clean_data + def __enter__(self): + return self + + def __exit__(self, *err): + pass + class VaultEditor(object): # uses helper methods for write_file(self, filename, data) # to write a file so that code isn't duplicated for simple @@ -153,6 +178,9 @@ class VaultEditor(object): def create_file(self): """ create a new encrypted file """ + if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2: + raise errors.AnsibleError(CRYPTO_UPGRADE) + if os.path.isfile(self.filename): raise errors.AnsibleError("%s exists, please use 'edit' instead" % self.filename) @@ -166,6 +194,10 @@ class VaultEditor(object): self.write_data(enc_data, self.filename) def decrypt_file(self): + + if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2: + raise errors.AnsibleError(CRYPTO_UPGRADE) + if not os.path.isfile(self.filename): raise errors.AnsibleError("%s does not exist" % self.filename) @@ -179,6 +211,9 @@ class VaultEditor(object): def edit_file(self): + if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2: + raise errors.AnsibleError(CRYPTO_UPGRADE) + # decrypt to tmpfile tmpdata = self.read_data(self.filename) this_vault = VaultLib(self.password) @@ -191,9 +226,11 @@ class VaultEditor(object): call([EDITOR, tmp_path]) new_data = self.read_data(tmp_path) - # create new vault and set cipher to old + # create new vault new_vault = VaultLib(self.password) - new_vault.cipher_name = this_vault.cipher_name + + # we want the cipher to default to AES256 + #new_vault.cipher_name = this_vault.cipher_name # encrypt new data a write out to tmp enc_data = new_vault.encrypt(new_data) @@ -203,6 +240,10 @@ class VaultEditor(object): self.shuffle_files(tmp_path, self.filename) def encrypt_file(self): + + if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2: + raise errors.AnsibleError(CRYPTO_UPGRADE) + if not os.path.isfile(self.filename): raise errors.AnsibleError("%s does not exist" % self.filename) @@ -216,14 +257,20 @@ class VaultEditor(object): raise errors.AnsibleError("%s is already encrypted" % self.filename) def rekey_file(self, new_password): + + if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2: + raise errors.AnsibleError(CRYPTO_UPGRADE) + # decrypt tmpdata = self.read_data(self.filename) this_vault = VaultLib(self.password) dec_data = this_vault.decrypt(tmpdata) - # create new vault, set cipher to old and password to new + # create new vault new_vault = VaultLib(new_password) - new_vault.cipher_name = this_vault.cipher_name + + # we want to force cipher to the default + #new_vault.cipher_name = this_vault.cipher_name # re-encrypt data and re-write file enc_data = new_vault.encrypt(dec_data) @@ -254,11 +301,14 @@ class VaultEditor(object): class VaultAES(object): + # this version has been obsoleted by the VaultAES256 class + # which uses encrypt-then-mac (fixing order) and also improving the KDF used + # code remains for upgrade purposes only # http://stackoverflow.com/a/16761459 def __init__(self): if not HAS_AES: - raise errors.AnsibleError("pycrypto is not installed. Fix this with your package manager, for instance, yum-install python-crypto OR (apt equivalent)") + raise errors.AnsibleError(CRYPTO_UPGRADE) def aes_derive_key_and_iv(self, password, salt, key_length, iv_length): @@ -278,7 +328,12 @@ class VaultAES(object): """ Read plaintext data from in_file and write encrypted to out_file """ - in_file = BytesIO(data) + + # combine sha + data + this_sha = sha256(data).hexdigest() + tmp_data = this_sha + "\n" + data + + in_file = BytesIO(tmp_data) in_file.seek(0) out_file = BytesIO() @@ -301,14 +356,21 @@ class VaultAES(object): out_file.write(cipher.encrypt(chunk)) out_file.seek(0) - return out_file.read() + enc_data = out_file.read() + tmp_data = hexlify(enc_data) + return tmp_data + + def decrypt(self, data, password, key_length=32): """ Read encrypted data from in_file and write decrypted to out_file """ # http://stackoverflow.com/a/14989032 + data = ''.join(data.split('\n')) + data = unhexlify(data) + in_file = BytesIO(data) in_file.seek(0) out_file = BytesIO() @@ -330,6 +392,129 @@ class VaultAES(object): # reset the stream pointer to the beginning out_file.seek(0) - return out_file.read() + new_data = out_file.read() + + # split out sha and verify decryption + split_data = new_data.split("\n") + this_sha = split_data[0] + this_data = '\n'.join(split_data[1:]) + test_sha = sha256(this_data).hexdigest() + + if this_sha != test_sha: + raise errors.AnsibleError("Decryption failed") + + #return out_file.read() + return this_data + + +class VaultAES256(object): + + """ + Vault implementation using AES-CTR with an HMAC-SHA256 authentication code. + Keys are derived using PBKDF2 + """ + + # http://www.daemonology.net/blog/2009-06-11-cryptographic-right-answers.html + + def gen_key_initctr(self, password, salt): + # 16 for AES 128, 32 for AES256 + keylength = 32 + + # match the size used for counter.new to avoid extra work + ivlength = 16 + + hash_function = SHA256 + + # make two keys and one iv + pbkdf2_prf = lambda p, s: HMAC.new(p, s, hash_function).digest() + + if not HAS_PBKDF2: + raise errors.AnsibleError(CRYPTO_UPGRADE) + + derivedkey = PBKDF2(password, salt, dkLen=(2 * keylength) + ivlength, + count=10000, prf=pbkdf2_prf) + + #import epdb; epdb.st() + key1 = derivedkey[:keylength] + key2 = derivedkey[keylength:(keylength * 2)] + iv = derivedkey[(keylength * 2):(keylength * 2) + ivlength] + + return key1, key2, hexlify(iv) + + + def encrypt(self, data, password): + + salt = os.urandom(32) + key1, key2, iv = self.gen_key_initctr(password, salt) + + # PKCS#7 PAD DATA http://tools.ietf.org/html/rfc5652#section-6.3 + bs = AES.block_size + padding_length = (bs - len(data) % bs) or bs + data += padding_length * chr(padding_length) + + # COUNTER.new PARAMETERS + # 1) nbits (integer) - Length of the counter, in bits. + # 2) initial_value (integer) - initial value of the counter. "iv" from gen_key_initctr + + if not HAS_COUNTER: + raise errors.AnsibleError(CRYPTO_UPGRADE) + ctr = Counter.new(128, initial_value=long(iv, 16)) + + # AES.new PARAMETERS + # 1) AES key, must be either 16, 24, or 32 bytes long -- "key" from gen_key_initctr + # 2) MODE_CTR, is the recommended mode + # 3) counter= + + cipher = AES.new(key1, AES.MODE_CTR, counter=ctr) + + # ENCRYPT PADDED DATA + cryptedData = cipher.encrypt(data) + + # COMBINE SALT, DIGEST AND DATA + hmac = HMAC.new(key2, cryptedData, SHA256) + message = "%s\n%s\n%s" % ( hexlify(salt), hmac.hexdigest(), hexlify(cryptedData) ) + message = hexlify(message) + return message + + def decrypt(self, data, password): + + # SPLIT SALT, DIGEST, AND DATA + data = ''.join(data.split("\n")) + data = unhexlify(data) + salt, cryptedHmac, cryptedData = data.split("\n", 2) + salt = unhexlify(salt) + cryptedData = unhexlify(cryptedData) + + key1, key2, iv = self.gen_key_initctr(password, salt) + + # EXIT EARLY IF DIGEST DOESN'T MATCH + hmacDecrypt = HMAC.new(key2, cryptedData, SHA256) + if not self.is_equal(cryptedHmac, hmacDecrypt.hexdigest()): + return None + + # SET THE COUNTER AND THE CIPHER + if not HAS_COUNTER: + raise errors.AnsibleError(CRYPTO_UPGRADE) + ctr = Counter.new(128, initial_value=long(iv, 16)) + cipher = AES.new(key1, AES.MODE_CTR, counter=ctr) + + # DECRYPT PADDED DATA + decryptedData = cipher.decrypt(cryptedData) + + # UNPAD DATA + padding_length = ord(decryptedData[-1]) + decryptedData = decryptedData[:-padding_length] + + return decryptedData + + def is_equal(self, a, b): + # http://codahale.com/a-lesson-in-timing-attacks/ + if len(a) != len(b): + return False + + result = 0 + for x, y in zip(a, b): + result |= ord(x) ^ ord(y) + return result == 0 + - diff --git a/test/units/TestVault.py b/test/units/TestVault.py index f42188057f..415d5c14aa 100644 --- a/test/units/TestVault.py +++ b/test/units/TestVault.py @@ -12,6 +12,21 @@ from nose.plugins.skip import SkipTest from ansible import errors from ansible.utils.vault import VaultLib + +# Counter import fails for 2.0.1, requires >= 2.6.1 from pip +try: + from Crypto.Util import Counter + HAS_COUNTER = True +except ImportError: + HAS_COUNTER = False + +# KDF import fails for 2.0.1, requires >= 2.6.1 from pip +try: + from Crypto.Protocol.KDF import PBKDF2 + HAS_PBKDF2 = True +except ImportError: + HAS_PBKDF2 = False + # AES IMPORTS try: from Crypto.Cipher import AES as AES @@ -26,8 +41,8 @@ class TestVaultLib(TestCase): slots = ['is_encrypted', 'encrypt', 'decrypt', - '_add_headers_and_hexify_encrypted_data', - '_split_headers_and_get_unhexified_data',] + '_add_header', + '_split_header',] for slot in slots: assert hasattr(v, slot), "VaultLib is missing the %s method" % slot @@ -41,9 +56,7 @@ class TestVaultLib(TestCase): v = VaultLib('ansible') v.cipher_name = "TEST" sensitive_data = "ansible" - sensitive_hex = hexlify(sensitive_data) - data = v._add_headers_and_hexify_encrypted_data(sensitive_data) - open("/tmp/awx.log", "a").write("data: %s\n" % data) + data = v._add_header(sensitive_data) lines = data.split('\n') assert len(lines) > 1, "failed to properly add header" header = lines[0] @@ -53,19 +66,18 @@ class TestVaultLib(TestCase): assert header_parts[0] == '$ANSIBLE_VAULT', "header does not start with $ANSIBLE_VAULT" assert header_parts[1] == v.version, "header version is incorrect" assert header_parts[2] == 'TEST', "header does end with cipher name" - assert lines[1] == sensitive_hex - def test_remove_header(self): + def test_split_header(self): v = VaultLib('ansible') - data = "$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify("ansible") - rdata = v._split_headers_and_get_unhexified_data(data) + data = "$ANSIBLE_VAULT;9.9;TEST\nansible" + rdata = v._split_header(data) lines = rdata.split('\n') assert lines[0] == "ansible" assert v.cipher_name == 'TEST', "cipher name was not set" assert v.version == "9.9" - def test_encyrpt_decrypt(self): - if not HAS_AES: + def test_encrypt_decrypt_aes(self): + if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2: raise SkipTest v = VaultLib('ansible') v.cipher_name = 'AES' @@ -74,8 +86,18 @@ class TestVaultLib(TestCase): assert enc_data != "foobar", "encryption failed" assert dec_data == "foobar", "decryption failed" + def test_encrypt_decrypt_aes256(self): + if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2: + raise SkipTest + v = VaultLib('ansible') + v.cipher_name = 'AES256' + enc_data = v.encrypt("foobar") + dec_data = v.decrypt(enc_data) + assert enc_data != "foobar", "encryption failed" + assert dec_data == "foobar", "decryption failed" + def test_encrypt_encrypted(self): - if not HAS_AES: + if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2: raise SkipTest v = VaultLib('ansible') v.cipher_name = 'AES' @@ -88,7 +110,7 @@ class TestVaultLib(TestCase): assert error_hit, "No error was thrown when trying to encrypt data with a header" def test_decrypt_decrypted(self): - if not HAS_AES: + if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2: raise SkipTest v = VaultLib('ansible') data = "ansible" @@ -100,7 +122,8 @@ class TestVaultLib(TestCase): assert error_hit, "No error was thrown when trying to decrypt data without a header" def test_cipher_not_set(self): - if not HAS_AES: + # not setting the cipher should default to AES256 + if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2: raise SkipTest v = VaultLib('ansible') data = "ansible" @@ -109,6 +132,5 @@ class TestVaultLib(TestCase): enc_data = v.encrypt(data) except errors.AnsibleError, e: error_hit = True - assert error_hit, "No error was thrown when trying to encrypt data without the cipher set" - - + assert not error_hit, "An error was thrown when trying to encrypt data without the cipher set" + assert v.cipher_name == "AES256", "cipher name is not set to AES256: %s" % v.cipher_name diff --git a/test/units/TestVaultEditor.py b/test/units/TestVaultEditor.py new file mode 100644 index 0000000000..4d3f99e89a --- /dev/null +++ b/test/units/TestVaultEditor.py @@ -0,0 +1,141 @@ +#!/usr/bin/env python + +from unittest import TestCase +import getpass +import os +import shutil +import time +import tempfile +from binascii import unhexlify +from binascii import hexlify +from nose.plugins.skip import SkipTest + +from ansible import errors +from ansible.utils.vault import VaultLib +from ansible.utils.vault import VaultEditor + +# Counter import fails for 2.0.1, requires >= 2.6.1 from pip +try: + from Crypto.Util import Counter + HAS_COUNTER = True +except ImportError: + HAS_COUNTER = False + +# KDF import fails for 2.0.1, requires >= 2.6.1 from pip +try: + from Crypto.Protocol.KDF import PBKDF2 + HAS_PBKDF2 = True +except ImportError: + HAS_PBKDF2 = False + +# AES IMPORTS +try: + from Crypto.Cipher import AES as AES + HAS_AES = True +except ImportError: + HAS_AES = False + +class TestVaultEditor(TestCase): + + def test_methods_exist(self): + v = VaultEditor(None, None, None) + slots = ['create_file', + 'decrypt_file', + 'edit_file', + 'encrypt_file', + 'rekey_file', + 'read_data', + 'write_data', + 'shuffle_files'] + for slot in slots: + assert hasattr(v, slot), "VaultLib is missing the %s method" % slot + + def test_decrypt_1_0(self): + if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2: + raise SkipTest + dirpath = tempfile.mkdtemp() + filename = os.path.join(dirpath, "foo-ansible-1.0.yml") + shutil.rmtree(dirpath) + shutil.copytree("vault_test_data", dirpath) + ve = VaultEditor(None, "ansible", filename) + + # make sure the password functions for the cipher + error_hit = False + try: + ve.decrypt_file() + except errors.AnsibleError, e: + error_hit = True + + # verify decrypted content + f = open(filename, "rb") + fdata = f.read() + f.close() + + shutil.rmtree(dirpath) + assert error_hit == False, "error decrypting 1.0 file" + assert fdata.strip() == "foo", "incorrect decryption of 1.0 file: %s" % fdata.strip() + + def test_decrypt_1_1(self): + if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2: + raise SkipTest + dirpath = tempfile.mkdtemp() + filename = os.path.join(dirpath, "foo-ansible-1.1.yml") + shutil.rmtree(dirpath) + shutil.copytree("vault_test_data", dirpath) + ve = VaultEditor(None, "ansible", filename) + + # make sure the password functions for the cipher + error_hit = False + try: + ve.decrypt_file() + except errors.AnsibleError, e: + error_hit = True + + # verify decrypted content + f = open(filename, "rb") + fdata = f.read() + f.close() + + shutil.rmtree(dirpath) + assert error_hit == False, "error decrypting 1.0 file" + assert fdata.strip() == "foo", "incorrect decryption of 1.0 file: %s" % fdata.strip() + + + def test_rekey_migration(self): + if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2: + raise SkipTest + dirpath = tempfile.mkdtemp() + filename = os.path.join(dirpath, "foo-ansible-1.0.yml") + shutil.rmtree(dirpath) + shutil.copytree("vault_test_data", dirpath) + ve = VaultEditor(None, "ansible", filename) + + # make sure the password functions for the cipher + error_hit = False + try: + ve.rekey_file('ansible2') + except errors.AnsibleError, e: + error_hit = True + + # verify decrypted content + f = open(filename, "rb") + fdata = f.read() + f.close() + + shutil.rmtree(dirpath) + assert error_hit == False, "error rekeying 1.0 file to 1.1" + + # ensure filedata can be decrypted, is 1.1 and is AES256 + vl = VaultLib("ansible2") + dec_data = None + error_hit = False + try: + dec_data = vl.decrypt(fdata) + except errors.AnsibleError, e: + error_hit = True + + assert vl.cipher_name == "AES256", "wrong cipher name set after rekey: %s" % vl.cipher_name + assert error_hit == False, "error decrypting migrated 1.0 file" + assert dec_data.strip() == "foo", "incorrect decryption of rekeyed/migrated file: %s" % dec_data + + diff --git a/test/units/vault_test_data/foo-ansible-1.0.yml b/test/units/vault_test_data/foo-ansible-1.0.yml new file mode 100644 index 0000000000..f71ddf10ce --- /dev/null +++ b/test/units/vault_test_data/foo-ansible-1.0.yml @@ -0,0 +1,4 @@ +$ANSIBLE_VAULT;1.0;AES +53616c7465645f5fd0026926a2d415a28a2622116273fbc90e377225c12a347e1daf4456d36a77f9 +9ad98d59f61d06a4b66718d855f16fb7bdfe54d1ec8aeaa4d06c2dc1fa630ae1846a029877f0eeb1 +83c62ffb04c2512995e815de4b4d29ed diff --git a/test/units/vault_test_data/foo-ansible-1.1.yml b/test/units/vault_test_data/foo-ansible-1.1.yml new file mode 100644 index 0000000000..d9a4a448a6 --- /dev/null +++ b/test/units/vault_test_data/foo-ansible-1.1.yml @@ -0,0 +1,6 @@ +$ANSIBLE_VAULT;1.1;AES256 +62303130653266653331306264616235333735323636616539316433666463323964623162386137 +3961616263373033353631316333623566303532663065310a393036623466376263393961326530 +64336561613965383835646464623865663966323464653236343638373165343863623638316664 +3631633031323837340a396530313963373030343933616133393566366137363761373930663833 +3739 From 43203bac5669d4b8cd07bd5f0cf80672fd60907a Mon Sep 17 00:00:00 2001 From: Michael DeHaan Date: Mon, 10 Mar 2014 17:23:37 -0400 Subject: [PATCH 28/29] Update the message about pycrypto to include that python-devel must be installed. --- lib/ansible/utils/vault.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ansible/utils/vault.py b/lib/ansible/utils/vault.py index 169dc8333b..118f579005 100644 --- a/lib/ansible/utils/vault.py +++ b/lib/ansible/utils/vault.py @@ -53,7 +53,7 @@ try: except ImportError: HAS_AES = False -CRYPTO_UPGRADE = "ansible-vault requires a newer version of pycrypto than the one installed on your platform. You may fix this with OS-specific commands such as: rpm -e --nodeps python-crypto; pip install pycrypto" +CRYPTO_UPGRADE = "ansible-vault requires a newer version of pycrypto than the one installed on your platform. You may fix this with OS-specific commands such as: yum install python-devel; rpm -e --nodeps python-crypto; pip install pycrypto" HEADER='$ANSIBLE_VAULT' CIPHER_WHITELIST=['AES', 'AES256'] From 16c05cbc8892041cacba3ff87c86e68b86b4511b Mon Sep 17 00:00:00 2001 From: Richard C Isaacson Date: Mon, 10 Mar 2014 17:40:36 -0500 Subject: [PATCH 29/29] Update files for 1.5.1 release. --- CHANGELOG.md | 7 +++++++ RELEASES.txt | 1 + VERSION | 2 +- docsite/rst/index.rst | 2 +- lib/ansible/__init__.py | 2 +- packaging/debian/changelog | 8 +++++++- packaging/rpm/ansible.spec | 7 +++++-- 7 files changed, 23 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1ff78020e6..0ab6ae6955 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,13 @@ Ansible Changes By Release ========================== +## 1.5.1 "Love Walks In" - March 10, 2014 + +- Force command action to not be executed by the shell unless specifically enabled. +- Validate SSL certs accessed through urllib*. +- Implement new default cipher class AES256 in ansible-vault. +- Misc bug fixes. + ## 1.5 "Love Walks In" - February 28, 2014 Major features/changes: diff --git a/RELEASES.txt b/RELEASES.txt index 6335829881..680d313329 100644 --- a/RELEASES.txt +++ b/RELEASES.txt @@ -2,6 +2,7 @@ Ansible Releases at a Glance ============================ 1.6 "The Cradle Will Rock" - NEXT +1.5.1 "Love Walks In" -------- 03-10-2014 1.5 "Love Walks In" -------- 02-28-2014 1.4.5 "Could This Be Magic?" - 02-12-2014 1.4.4 "Could This Be Magic?" - 01-06-2014 diff --git a/VERSION b/VERSION index c239c60cba..26ca594609 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.5 +1.5.1 diff --git a/docsite/rst/index.rst b/docsite/rst/index.rst index d507fda164..5f6ca7d63b 100644 --- a/docsite/rst/index.rst +++ b/docsite/rst/index.rst @@ -16,7 +16,7 @@ We believe simplicity is relevant to all sizes of environments and design for bu Ansible manages machines in an agentless manner. There is never a question of how to upgrade remote daemons or the problem of not being able to manage systems because daemons are uninstalled. As OpenSSH is one of the most peer reviewed open source components, the security exposure of using the tool is greatly reduced. Ansible is decentralized -- it relies on your existing OS credentials to control access to remote machines; if needed it can easily connect with Kerberos, LDAP, and other centralized authentication management systems. -This documentation covers the current released version of Ansible (1.4.5) and also some development version features (1.5). For recent features, in each section, the version of Ansible where the feature is added is indicated. Ansible, Inc releases a new major release of Ansible approximately every 2 months. The core application evolves somewhat conservatively, valuing simplicity in language design and setup, while the community around new modules and plugins being developed and contributed moves very very quickly, typically adding 20 or so new modules in each release. +This documentation covers the current released version of Ansible (1.5.1) and also some development version features (1.6.0). For recent features, in each section, the version of Ansible where the feature is added is indicated. Ansible, Inc releases a new major release of Ansible approximately every 2 months. The core application evolves somewhat conservatively, valuing simplicity in language design and setup, while the community around new modules and plugins being developed and contributed moves very very quickly, typically adding 20 or so new modules in each release. .. _an_introduction: diff --git a/lib/ansible/__init__.py b/lib/ansible/__init__.py index bad06025a1..663f95768e 100644 --- a/lib/ansible/__init__.py +++ b/lib/ansible/__init__.py @@ -14,5 +14,5 @@ # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see . -__version__ = '1.5' +__version__ = '1.5.1' __author__ = 'Michael DeHaan' diff --git a/packaging/debian/changelog b/packaging/debian/changelog index c009bebb37..a65eaff5eb 100644 --- a/packaging/debian/changelog +++ b/packaging/debian/changelog @@ -1,8 +1,14 @@ +ansible (1.5.1) unstable; urgency=low + + * 1.5.1 release + + -- Michael DeHaan Mon, 10 March 2014 17:33:44 -0500 + ansible (1.5) unstable; urgency=low * 1.5 release - -- Michael DeHaan Fri, 28 February 2014 -0500 + -- Michael DeHaan Fri, 28 February 2014 00:00:00 -0500 ansible (1.4.5) unstable; urgency=low diff --git a/packaging/rpm/ansible.spec b/packaging/rpm/ansible.spec index c067bbe42e..3028da5c05 100644 --- a/packaging/rpm/ansible.spec +++ b/packaging/rpm/ansible.spec @@ -5,7 +5,7 @@ %endif Name: %{name} -Version: 1.5 +Version: 1.5.1 Release: 1%{?dist} Url: http://www.ansible.com Summary: SSH-based application deployment, configuration management, and IT orchestration platform @@ -102,7 +102,10 @@ rm -rf %{buildroot} %changelog -* Fri Feb 28 2014 Michael DeHaan - 1.5-0 +* Fri Mar 10 2014 Michael DeHaan - 1.5.1 +- Release 1.5.1 + +* Fri Feb 28 2014 Michael DeHaan - 1.5.0 - Release 1.5.0 * Wed Feb 12 2014 Michael DeHaan - 1.4.5