Merge branch 'integration'

Conflicts:
	lib/ansible/playbook.py
	lib/ansible/runner.py
	library/apt
This commit is contained in:
Michael DeHaan 2012-04-23 21:05:06 -04:00
commit c00699d0ef
46 changed files with 1673 additions and 578 deletions

View file

@ -151,7 +151,7 @@ class PlaybookRunnerCallbacks(DefaultRunnerCallbacks):
print "failed: [%s] => %s => %s\n" % (host, invocation, utils.smjson(results))
def on_ok(self, host, host_result):
invocation = host_result.get('invocation',None)
invocation = host_result.get('invocation','')
if invocation.startswith('async_status'):
pass
elif not invocation or invocation.startswith('setup '):

View file

@ -45,12 +45,12 @@ class Connection(object):
self.runner = runner
self.transport = transport
def connect(self, host):
def connect(self, host, port=None):
conn = None
if self.transport == 'local' and self._LOCALHOSTRE.search(host):
conn = LocalConnection(self.runner, host)
conn = LocalConnection(self.runner, host, None)
elif self.transport == 'paramiko':
conn = ParamikoConnection(self.runner, host)
conn = ParamikoConnection(self.runner, host, port)
if conn is None:
raise Exception("unsupported connection type")
return conn.connect()
@ -64,10 +64,13 @@ class Connection(object):
class ParamikoConnection(object):
''' SSH based connections with Paramiko '''
def __init__(self, runner, host):
def __init__(self, runner, host, port=None):
self.ssh = None
self.runner = runner
self.host = host
self.port = port
if port is None:
self.port = self.runner.remote_port
def _get_conn(self):
ssh = paramiko.SSHClient()
@ -75,9 +78,13 @@ class ParamikoConnection(object):
try:
ssh.connect(
self.host, username=self.runner.remote_user,
allow_agent=True, look_for_keys=True, password=self.runner.remote_pass,
timeout=self.runner.timeout, port=self.runner.remote_port
self.host,
username=self.runner.remote_user,
allow_agent=True,
look_for_keys=True,
password=self.runner.remote_pass,
timeout=self.runner.timeout,
port=self.port
)
except Exception, e:
if str(e).find("PID check failed") != -1:
@ -183,7 +190,7 @@ class LocalConnection(object):
self.runner = runner
self.host = host
def connect(self):
def connect(self, port=None):
''' connect to the local host; nothing to do here '''
return self

292
lib/ansible/inventory.py Normal file
View file

@ -0,0 +1,292 @@
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#############################################
import fnmatch
import os
import subprocess
import constants as C
from ansible import errors
from ansible import utils
class Inventory(object):
""" Host inventory for ansible.
The inventory is either a simple text file with systems and [groups] of
systems, or a script that will be called with --list or --host.
"""
def __init__(self, host_list=C.DEFAULT_HOST_LIST):
self._restriction = None
self._variables = {}
if type(host_list) == list:
self.host_list = host_list
self.groups = dict(ungrouped=host_list)
self._is_script = False
return
inventory_file = os.path.expanduser(host_list)
if not os.path.exists(inventory_file):
raise errors.AnsibleFileNotFound("inventory file not found: %s" % host_list)
self.inventory_file = os.path.abspath(inventory_file)
if os.access(self.inventory_file, os.X_OK):
self.host_list, self.groups = self._parse_from_script()
self._is_script = True
else:
self.host_list, self.groups = self._parse_from_file()
self._is_script = False
# *****************************************************
# Public API
def list_hosts(self, pattern="all"):
""" Return a list of hosts [matching the pattern] """
if self._restriction is None:
host_list = self.host_list
else:
host_list = [ h for h in self.host_list if h in self._restriction ]
return [ h for h in host_list if self._matches(h, pattern) ]
def restrict_to(self, restriction):
""" Restrict list operations to the hosts given in restriction """
if type(restriction)!=list:
restriction = [ restriction ]
self._restriction = restriction
def lift_restriction(self):
""" Do not restrict list operations """
self._restriction = None
def get_variables(self, host):
""" Return the variables associated with this host. """
if host in self._variables:
return self._variables[host].copy()
if not self._is_script:
return {}
return self._get_variables_from_script(host)
# *****************************************************
def _parse_from_file(self):
''' parse a textual host file '''
results = []
groups = dict(ungrouped=[])
lines = file(self.inventory_file).read().split("\n")
if "---" in lines:
return self._parse_yaml()
group_name = 'ungrouped'
for item in lines:
item = item.lstrip().rstrip()
if item.startswith("#"):
# ignore commented out lines
pass
elif item.startswith("["):
# looks like a group
group_name = item.replace("[","").replace("]","").lstrip().rstrip()
groups[group_name] = []
elif item != "":
# looks like a regular host
if ":" in item:
# a port was specified
item, port = item.split(":")
try:
port = int(port)
except ValueError:
raise errors.AnsibleError("SSH port for %s in inventory (%s) should be numerical."%(item, port))
self._set_variable(item, "ansible_ssh_port", port)
groups[group_name].append(item)
if not item in results:
results.append(item)
return (results, groups)
# *****************************************************
def _parse_from_script(self):
''' evaluate a script that returns list of hosts by groups '''
results = []
groups = dict(ungrouped=[])
cmd = [self.inventory_file, '--list']
cmd = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False)
out, err = cmd.communicate()
rc = cmd.returncode
if rc:
raise errors.AnsibleError("%s: %s" % self.inventory_file, err)
try:
groups = utils.json_loads(out)
except:
raise errors.AnsibleError("invalid JSON response from script: %s" % self.inventory_file)
for (groupname, hostlist) in groups.iteritems():
for host in hostlist:
if host not in results:
results.append(host)
return (results, groups)
# *****************************************************
def _parse_yaml(self):
""" Load the inventory from a yaml file.
returns hosts and groups"""
data = utils.parse_yaml_from_file(self.inventory_file)
if type(data) != list:
raise errors.AnsibleError("YAML inventory should be a list.")
hosts = []
groups = {}
ungrouped = []
for item in data:
if type(item) == dict:
if "group" in item:
group_name = item["group"]
group_vars = []
if "vars" in item:
group_vars = item["vars"]
group_hosts = []
if "hosts" in item:
for host in item["hosts"]:
host_name = self._parse_yaml_host(host, group_vars)
group_hosts.append(host_name)
groups[group_name] = group_hosts
hosts.extend(group_hosts)
elif "host" in item:
host_name = self._parse_yaml_host(item)
hosts.append(host_name)
ungrouped.append(host_name)
else:
host_name = self._parse_yaml_host(item)
hosts.append(host_name)
ungrouped.append(host_name)
# filter duplicate hosts
output_hosts = []
for host in hosts:
if host not in output_hosts:
output_hosts.append(host)
if len(ungrouped) > 0 :
# hosts can be defined top-level, but also in a group
really_ungrouped = []
for host in ungrouped:
already_grouped = False
for name, group_hosts in groups.items():
if host in group_hosts:
already_grouped = True
if not already_grouped:
really_ungrouped.append(host)
groups["ungrouped"] = really_ungrouped
return output_hosts, groups
def _parse_yaml_host(self, item, variables=[]):
def set_variables(host, variables):
if type(variables) == list:
for variable in variables:
if len(variable) != 1:
raise errors.AnsibleError("Only one item expected in %s"%(variable))
k, v = variable.items()[0]
self._set_variable(host, k, v)
elif type(variables) == dict:
for k, v in variables.iteritems():
self._set_variable(host, k, v)
if type(item) in [str, unicode]:
set_variables(item, variables)
return item
elif type(item) == dict:
if "host" in item:
host_name = item["host"]
set_variables(host_name, variables)
if "vars" in item:
set_variables(host_name, item["vars"])
return host_name
else:
raise errors.AnsibleError("Unknown item in inventory: %s"%(item))
def _get_variables_from_script(self, host):
''' support per system variabes from external variable scripts, see web docs '''
cmd = [self.inventory_file, '--host', host]
cmd = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=False
)
out, err = cmd.communicate()
variables = {}
try:
variables = utils.json_loads(out)
except:
raise errors.AnsibleError("%s returned invalid result when called with hostname %s" % (
self.inventory_file,
host
))
return variables
def _set_variable(self, host, key, value):
if not host in self._variables:
self._variables[host] = {}
self._variables[host][key] = value
def _matches(self, host_name, pattern):
''' returns if a hostname is matched by the pattern '''
# a pattern is in fnmatch format but more than one pattern
# can be strung together with semicolons. ex:
# atlanta-web*.example.com;dc-web*.example.com
if host_name == '':
return False
pattern = pattern.replace(";",":")
subpatterns = pattern.split(":")
for subpattern in subpatterns:
if subpattern == 'all':
return True
if fnmatch.fnmatch(host_name, subpattern):
return True
elif subpattern in self.groups:
if host_name in self.groups[subpattern]:
return True
return False

View file

@ -17,11 +17,11 @@
#############################################
import ansible.inventory
import ansible.runner
import ansible.constants as C
from ansible import utils
from ansible import errors
import shlex
import os
import time
@ -58,17 +58,33 @@ class PlayBook(object):
remote_port = C.DEFAULT_REMOTE_PORT,
transport = C.DEFAULT_TRANSPORT,
override_hosts = None,
extra_vars = None,
debug = False,
verbose = False,
callbacks = None,
runner_callbacks = None,
stats = None):
stats = None,
sudo = False):
"""
playbook: path to a playbook file
host_list: path to a file like /etc/ansible/hosts
module_path: path to ansible modules, like /usr/share/ansible/
forks: desired level of paralellism
timeout: connection timeout
remote_user: run as this user if not specified in a particular play
remote_pass: use this remote password (for all plays) vs using SSH keys
sudo_pass: if sudo==True, and a password is required, this is the sudo password
remote_port: default remote port to use if not specified with the host or play
transport: how to connect to hosts that don't specify a transport (local, paramiko, etc)
override_hosts: skip the inventory file, just talk to these hosts
callbacks output callbacks for the playbook
runner_callbacks: more callbacks, this time for the runner API
stats: holds aggregrate data about events occuring to each host
sudo: if not specified per play, requests all plays use sudo mode
"""
if playbook is None or callbacks is None or runner_callbacks is None or stats is None:
raise Exception('missing required arguments')
self.host_list = host_list
self.module_path = module_path
self.forks = forks
self.timeout = timeout
@ -77,20 +93,23 @@ class PlayBook(object):
self.remote_port = remote_port
self.transport = transport
self.debug = debug
self.verbose = verbose
self.callbacks = callbacks
self.runner_callbacks = runner_callbacks
self.override_hosts = override_hosts
self.extra_vars = extra_vars
self.stats = stats
self.sudo = sudo
self.sudo_pass = sudo_pass
self.basedir = os.path.dirname(playbook)
self.playbook = self._parse_playbook(playbook)
self.host_list, self.groups = ansible.runner.Runner.parse_hosts(
host_list, override_hosts=self.override_hosts, extra_vars=self.extra_vars)
if override_hosts is not None:
if type(override_hosts) != list:
raise errors.AnsibleError("override hosts must be a list")
self.inventory = ansible.inventory.Inventory(override_hosts)
else:
self.inventory = ansible.inventory.Inventory(host_list)
# *****************************************************
def _get_vars(self, play, dirname):
@ -98,8 +117,18 @@ class PlayBook(object):
if play.get('vars') is None:
play['vars'] = {}
vars = play['vars']
if type(vars) != dict:
if type(vars) not in [dict, list]:
raise errors.AnsibleError("'vars' section must contain only key/value pairs")
# translate a list of vars into a dict
if type(vars) == list:
varlist = vars
vars = {}
for item in varlist:
k, v = item.items()[0]
vars[k] = v
play['vars'] = vars
vars_prompt = play.get('vars_prompt', {})
if type(vars_prompt) != dict:
raise errors.AnsibleError("'vars_prompt' section must contain only key/value pairs")
@ -178,10 +207,10 @@ class PlayBook(object):
if action is None:
raise errors.AnsibleError('action is required')
produced_task = task.copy()
produced_task['action'] = utils.template(action, dict(item=item))
produced_task['name'] = utils.template(name, dict(item=item))
produced_task['action'] = utils.template(action, dict(item=item), SETUP_CACHE)
produced_task['name'] = utils.template(name, dict(item=item), SETUP_CACHE)
if only_if:
produced_task['only_if'] = utils.template(only_if, dict(item=item))
produced_task['only_if'] = utils.template(only_if, dict(item=item), SETUP_CACHE)
new_tasks2.append(produced_task)
else:
new_tasks2.append(task)
@ -233,7 +262,6 @@ class PlayBook(object):
def _async_poll(self, runner, hosts, async_seconds, async_poll_interval, only_if):
''' launch an async job, if poll_interval is set, wait for completion '''
runner.host_list = hosts
runner.background = async_seconds
results = runner.run()
self.stats.compute(results, poll=True)
@ -257,7 +285,7 @@ class PlayBook(object):
return results
clock = async_seconds
runner.host_list = self.hosts_to_poll(results)
host_list = self.hosts_to_poll(results)
poll_results = results
while (clock >= 0):
@ -267,11 +295,13 @@ class PlayBook(object):
runner.module_name = 'async_status'
runner.background = 0
runner.pattern = '*'
self.inventory.restrict_to(host_list)
poll_results = runner.run()
self.stats.compute(poll_results, poll=True)
runner.host_list = self.hosts_to_poll(poll_results)
host_list = self.hosts_to_poll(poll_results)
self.inventory.lift_restriction()
if len(runner.host_list) == 0:
if len(host_list) == 0:
break
if poll_results is None:
break
@ -298,33 +328,40 @@ class PlayBook(object):
# *****************************************************
def _run_module(self, pattern, host_list, module, args, vars, remote_user,
async_seconds, async_poll_interval, only_if, sudo, transport):
def _run_module(self, pattern, module, args, vars, remote_user,
async_seconds, async_poll_interval, only_if, sudo, transport, port):
''' run a particular module step in a playbook '''
hosts = [ h for h in host_list if (h not in self.stats.failures) and (h not in self.stats.dark)]
hosts = [ h for h in self.inventory.list_hosts() if (h not in self.stats.failures) and (h not in self.stats.dark)]
self.inventory.restrict_to(hosts)
if port is None:
port=self.remote_port
runner = ansible.runner.Runner(
pattern=pattern, groups=self.groups, module_name=module,
module_args=args, host_list=hosts, forks=self.forks,
pattern=pattern, inventory=self.inventory, module_name=module,
module_args=args, forks=self.forks,
remote_pass=self.remote_pass, module_path=self.module_path,
timeout=self.timeout, remote_user=remote_user,
remote_port=self.remote_port, module_vars=vars,
remote_port=port, module_vars=vars,
setup_cache=SETUP_CACHE, basedir=self.basedir,
conditional=only_if, callbacks=self.runner_callbacks,
extra_vars=self.extra_vars, debug=self.debug, sudo=sudo,
debug=self.debug, sudo=sudo,
transport=transport, sudo_pass=self.sudo_pass, is_playbook=True
)
if async_seconds == 0:
return runner.run()
results = runner.run()
else:
return self._async_poll(runner, hosts, async_seconds, async_poll_interval, only_if)
results = self._async_poll(runner, hosts, async_seconds, async_poll_interval, only_if)
self.inventory.lift_restriction()
return results
# *****************************************************
def _run_task(self, pattern=None, host_list=None, task=None,
remote_user=None, handlers=None, conditional=False, sudo=False, transport=None):
def _run_task(self, pattern=None, task=None,
remote_user=None, handlers=None, conditional=False, sudo=False, transport=None, port=None):
''' run a single task in the playbook and recursively run any subtasks. '''
# load the module name and parameters from the task entry
@ -340,7 +377,9 @@ class PlayBook(object):
tokens = action.split(None, 1)
module_name = tokens[0]
module_args = tokens[1]
module_args = ''
if len(tokens) > 1:
module_args = tokens[1]
# include task specific vars
module_vars = task.get('vars')
@ -354,9 +393,9 @@ class PlayBook(object):
# load up an appropriate ansible runner to
# run the task in parallel
results = self._run_module(pattern, host_list, module_name,
results = self._run_module(pattern, module_name,
module_args, module_vars, remote_user, async_seconds,
async_poll_interval, only_if, sudo, transport)
async_poll_interval, only_if, sudo, transport, port)
self.stats.compute(results)
@ -406,7 +445,7 @@ class PlayBook(object):
# *****************************************************
def _do_conditional_imports(self, vars_files, host_list):
def _do_conditional_imports(self, vars_files):
''' handle the vars_files section, which can contain variables '''
# FIXME: save parsed variable results in memory to avoid excessive re-reading/parsing
@ -417,7 +456,7 @@ class PlayBook(object):
if type(vars_files) != list:
raise errors.AnsibleError("vars_files must be a list")
for host in host_list:
for host in self.inventory.list_hosts():
cache_vars = SETUP_CACHE.get(host,{})
SETUP_CACHE[host] = cache_vars
for filename in vars_files:
@ -426,7 +465,7 @@ class PlayBook(object):
found = False
sequence = []
for real_filename in filename:
filename2 = utils.path_dwim(self.basedir, utils.template(real_filename, cache_vars))
filename2 = utils.path_dwim(self.basedir, utils.template(real_filename, cache_vars, SETUP_CACHE))
sequence.append(filename2)
if os.path.exists(filename2):
found = True
@ -442,7 +481,7 @@ class PlayBook(object):
)
else:
filename2 = utils.path_dwim(self.basedir, utils.template(filename, cache_vars))
filename2 = utils.path_dwim(self.basedir, utils.template(filename, cache_vars, SETUP_CACHE))
if not os.path.exists(filename2):
raise errors.AnsibleError("no file matched for vars_file import: %s" % filename2)
data = utils.parse_yaml_from_file(filename2)
@ -460,25 +499,29 @@ class PlayBook(object):
if vars_files is not None:
self.callbacks.on_setup_secondary()
self._do_conditional_imports(vars_files, self.host_list)
self._do_conditional_imports(vars_files)
else:
self.callbacks.on_setup_primary()
host_list = [ h for h in self.host_list if not (h in self.stats.failures or h in self.stats.dark) ]
host_list = [ h for h in self.inventory.list_hosts(pattern)
if not (h in self.stats.failures or h in self.stats.dark) ]
self.inventory.restrict_to(host_list)
# push any variables down to the system
setup_results = ansible.runner.Runner(
pattern=pattern, groups=self.groups, module_name='setup',
module_args=vars, host_list=host_list,
pattern=pattern, module_name='setup',
module_args=vars, inventory=self.inventory,
forks=self.forks, module_path=self.module_path,
timeout=self.timeout, remote_user=user,
remote_pass=self.remote_pass, remote_port=self.remote_port,
remote_pass=self.remote_pass, remote_port=port,
setup_cache=SETUP_CACHE,
callbacks=self.runner_callbacks, sudo=sudo, debug=self.debug,
transport=transport, sudo_pass=self.sudo_pass, is_playbook=True
).run()
self.stats.compute(setup_results, setup=True)
self.inventory.lift_restriction()
# now for each result, load into the setup cache so we can
# let runner template out future commands
setup_ok = setup_results.get('contacted', {})
@ -487,15 +530,6 @@ class PlayBook(object):
for (host, result) in setup_ok.iteritems():
SETUP_CACHE[host] = result
if self.extra_vars:
extra_vars = utils.parse_kv(self.extra_vars)
for h in self.host_list:
try:
SETUP_CACHE[h].update(extra_vars)
except:
SETUP_CACHE[h] = extra_vars
return host_list
# *****************************************************
def _run_play(self, pg):
@ -514,7 +548,7 @@ class PlayBook(object):
handlers = pg.get('handlers', [])
user = pg.get('user', self.remote_user)
port = pg.get('port', self.remote_port)
sudo = pg.get('sudo', False)
sudo = pg.get('sudo', self.sudo)
transport = pg.get('connection', self.transport)
self.callbacks.on_play_start(pattern)
@ -530,12 +564,12 @@ class PlayBook(object):
for task in tasks:
self._run_task(
pattern=pattern,
host_list=self.host_list,
task=task,
handlers=handlers,
remote_user=user,
sudo=sudo,
transport=transport
transport=transport,
port=port
)
# handlers only run on certain nodes, they are flagged by _flag_handlers
@ -547,16 +581,18 @@ class PlayBook(object):
for task in handlers:
triggered_by = task.get('run', None)
if type(triggered_by) == list:
self.inventory.restrict_to(triggered_by)
self._run_task(
pattern=pattern,
task=task,
handlers=[],
host_list=triggered_by,
conditional=True,
remote_user=user,
sudo=sudo,
transport=transport
transport=transport,
port=port
)
self.inventory.lift_restriction()
# end of execution for this particular pattern. Multiple patterns
# can be in a single playbook file

View file

@ -18,7 +18,6 @@
################################################
import fnmatch
import multiprocessing
import signal
import os
@ -27,11 +26,11 @@ import Queue
import random
import traceback
import tempfile
import subprocess
import getpass
import base64
import ansible.constants as C
import ansible.connection
import ansible.inventory
from ansible import utils
from ansible import errors
from ansible import callbacks as ans_callbacks
@ -68,17 +67,41 @@ def _executor_hook(job_queue, result_queue):
class Runner(object):
_external_variable_script = None
def __init__(self, host_list=C.DEFAULT_HOST_LIST, module_path=C.DEFAULT_MODULE_PATH,
def __init__(self,
host_list=C.DEFAULT_HOST_LIST, module_path=C.DEFAULT_MODULE_PATH,
module_name=C.DEFAULT_MODULE_NAME, module_args=C.DEFAULT_MODULE_ARGS,
forks=C.DEFAULT_FORKS, timeout=C.DEFAULT_TIMEOUT, pattern=C.DEFAULT_PATTERN,
remote_user=C.DEFAULT_REMOTE_USER, remote_pass=C.DEFAULT_REMOTE_PASS,
sudo_pass=C.DEFAULT_SUDO_PASS, remote_port=C.DEFAULT_REMOTE_PORT, background=0,
basedir=None, setup_cache=None, transport=C.DEFAULT_TRANSPORT,
conditional='True', groups={}, callbacks=None, verbose=False,
debug=False, sudo=False, extra_vars=None, module_vars=None, is_playbook=False):
forks=C.DEFAULT_FORKS, timeout=C.DEFAULT_TIMEOUT,
pattern=C.DEFAULT_PATTERN, remote_user=C.DEFAULT_REMOTE_USER,
remote_pass=C.DEFAULT_REMOTE_PASS, remote_port=C.DEFAULT_REMOTE_PORT,
sudo_pass=C.DEFAULT_SUDO_PASS, background=0, basedir=None,
setup_cache=None, transport=C.DEFAULT_TRANSPORT, conditional='True',
callbacks=None, debug=False, sudo=False, module_vars=None,
is_playbook=False, inventory=None):
"""
host_list : path to a host list file, like /etc/ansible/hosts
module_path : path to modules, like /usr/share/ansible
module_name : which module to run (string)
module_args : args to pass to the module (string)
forks : desired level of paralellism (hosts to run on at a time)
timeout : connection timeout, such as a SSH timeout, in seconds
pattern : pattern or groups to select from in inventory
remote_user : connect as this remote username
remote_pass : supply this password (if not using keys)
remote_port : use this default remote port (if not set by the inventory system)
sudo_pass : sudo password if using sudo and sudo requires a password
background : run asynchronously with a cap of this many # of seconds (if not 0)
basedir : paths used by modules if not absolute are relative to here
setup_cache : this is a internalism that is going away
transport : transport mode (paramiko, local)
conditional : only execute if this string, evaluated, is True
callbacks : output callback class
sudo : log in as remote user and immediately sudo to root
module_vars : provides additional variables to a template. FIXME: just use module_args, remove
is_playbook : indicates Runner is being used by a playbook. affects behavior in various ways.
inventory : inventory object, if host_list is not provided
"""
if setup_cache is None:
setup_cache = {}
if basedir is None:
@ -93,11 +116,10 @@ class Runner(object):
self.transport = transport
self.connector = ansible.connection.Connection(self, self.transport)
if type(host_list) == str:
self.host_list, self.groups = self.parse_hosts(host_list)
if inventory is None:
self.inventory = ansible.inventory.Inventory(host_list)
else:
self.host_list = host_list
self.groups = groups
self.inventory = inventory
self.setup_cache = setup_cache
self.conditional = conditional
@ -107,10 +129,8 @@ class Runner(object):
self.pattern = pattern
self.module_args = module_args
self.module_vars = module_vars
self.extra_vars = extra_vars
self.timeout = timeout
self.debug = debug
self.verbose = verbose
self.remote_user = remote_user
self.remote_pass = remote_pass
self.remote_port = remote_port
@ -129,116 +149,18 @@ class Runner(object):
self._tmp_paths = {}
random.seed()
# *****************************************************
@classmethod
def parse_hosts_from_regular_file(cls, host_list):
''' parse a textual host file '''
results = []
groups = dict(ungrouped=[])
lines = file(host_list).read().split("\n")
group_name = 'ungrouped'
for item in lines:
item = item.lstrip().rstrip()
if item.startswith("#"):
# ignore commented out lines
pass
elif item.startswith("["):
# looks like a group
group_name = item.replace("[","").replace("]","").lstrip().rstrip()
groups[group_name] = []
elif item != "":
# looks like a regular host
groups[group_name].append(item)
if not item in results:
results.append(item)
return (results, groups)
# *****************************************************
@classmethod
def parse_hosts_from_script(cls, host_list, extra_vars):
''' evaluate a script that returns list of hosts by groups '''
results = []
groups = dict(ungrouped=[])
host_list = os.path.abspath(host_list)
cls._external_variable_script = host_list
cmd = [host_list, '--list']
if extra_vars:
cmd.extend(['--extra-vars', extra_vars])
cmd = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False)
out, err = cmd.communicate()
rc = cmd.returncode
if rc:
raise errors.AnsibleError("%s: %s" % (host_list, err))
try:
groups = utils.json_loads(out)
except:
raise errors.AnsibleError("invalid JSON response from script: %s" % host_list)
for (groupname, hostlist) in groups.iteritems():
for host in hostlist:
if host not in results:
results.append(host)
return (results, groups)
# *****************************************************
@classmethod
def parse_hosts(cls, host_list, override_hosts=None, extra_vars=None):
def parse_hosts(cls, host_list, override_hosts=None):
''' parse the host inventory file, returns (hosts, groups) '''
if override_hosts is not None:
if type(override_hosts) != list:
raise errors.AnsibleError("override hosts must be a list")
return (override_hosts, dict(ungrouped=override_hosts))
if type(host_list) == list:
raise Exception("function can only be called on inventory files")
host_list = os.path.expanduser(host_list)
if not os.path.exists(host_list):
raise errors.AnsibleFileNotFound("inventory file not found: %s" % host_list)
if not os.access(host_list, os.X_OK):
return Runner.parse_hosts_from_regular_file(host_list)
if override_hosts is None:
inventory = ansible.inventory.Inventory(host_list)
else:
return Runner.parse_hosts_from_script(host_list, extra_vars)
inventory = ansible.inventory.Inventory(override_hosts)
# *****************************************************
def _matches(self, host_name, pattern):
''' returns if a hostname is matched by the pattern '''
# a pattern is in fnmatch format but more than one pattern
# can be strung together with semicolons. ex:
# atlanta-web*.example.com;dc-web*.example.com
if host_name == '':
return False
pattern = pattern.replace(";",":")
subpatterns = pattern.split(":")
for subpattern in subpatterns:
if subpattern == 'all':
return True
if fnmatch.fnmatch(host_name, subpattern):
return True
elif subpattern in self.groups:
if host_name in self.groups[subpattern]:
return True
return False
# *****************************************************
def _connect(self, host):
''' connects to a host, returns (is_successful, connection_object OR traceback_string) '''
try:
return [ True, self.connector.connect(host) ]
except errors.AnsibleConnectionFailed, e:
return [ False, "FAILED: %s" % str(e) ]
return inventory.host_list, inventory.groups
# *****************************************************
@ -263,7 +185,7 @@ class Runner(object):
if type(files) == str:
files = [ files ]
for filename in files:
if not filename.startswith('/tmp/'):
if filename.find('/tmp/') == -1:
raise Exception("not going to happen")
self._exec_command(conn, "rm -rf %s" % filename, None)
@ -278,51 +200,22 @@ class Runner(object):
# *****************************************************
def _transfer_str(self, conn, tmp, name, args_str):
''' transfer arguments as a single file to be fed to the module. '''
def _transfer_str(self, conn, tmp, name, data):
''' transfer string to remote file '''
if type(args_str) == dict:
args_str = utils.smjson(args_str)
if type(data) == dict:
data = utils.smjson(data)
args_fd, args_file = tempfile.mkstemp()
args_fo = os.fdopen(args_fd, 'w')
args_fo.write(args_str)
args_fo.flush()
args_fo.close()
afd, afile = tempfile.mkstemp()
afo = os.fdopen(afd, 'w')
afo.write(data)
afo.flush()
afo.close()
args_remote = os.path.join(tmp, name)
conn.put_file(args_file, args_remote)
os.unlink(args_file)
return args_remote
# *****************************************************
def _add_variables_from_script(self, conn, inject):
''' support per system variabes from external variable scripts, see web docs '''
host = conn.host
cmd = [Runner._external_variable_script, '--host', host]
if self.extra_vars:
cmd.extend(['--extra-vars', self.extra_vars])
cmd = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=False
)
out, err = cmd.communicate()
inject2 = {}
try:
inject2 = utils.json_loads(out)
except:
raise errors.AnsibleError("%s returned invalid result when called with hostname %s" % (
Runner._external_variable_script,
host
))
# store injected variables in the templates
inject.update(inject2)
remote = os.path.join(tmp, name)
conn.put_file(afile, remote)
os.unlink(afile)
return remote
# *****************************************************
@ -335,7 +228,7 @@ class Runner(object):
# TODO: keep this as a dict through the whole path to simplify this code
for (k,v) in inject.iteritems():
if not k.startswith('facter_') and not k.startswith('ohai_'):
if not k.startswith('facter_') and not k.startswith('ohai_') and not k.startswith('ansible_'):
if not is_dict:
if str(v).find(" ") != -1:
v = "\"%s\"" % v
@ -375,19 +268,20 @@ class Runner(object):
''' runs a module that has already been transferred '''
inject = self.setup_cache.get(conn.host,{})
conditional = utils.double_template(self.conditional, inject)
conditional = utils.double_template(self.conditional, inject, self.setup_cache)
if not eval(conditional):
return [ utils.smjson(dict(skipped=True)), None, 'skipped' ]
if Runner._external_variable_script is not None:
self._add_variables_from_script(conn, inject)
host_variables = self.inventory.get_variables(conn.host)
inject.update(host_variables)
if self.module_name == 'setup':
args = self._add_setup_vars(inject, args)
args = self._add_setup_metadata(args)
if type(args) == dict:
args = utils.bigjson(args)
args = utils.template(args, inject)
args = utils.bigjson(args)
args = utils.template(args, inject, self.setup_cache)
module_name_tail = remote_module_path.split("/")[-1]
@ -492,7 +386,11 @@ class Runner(object):
dest = options.get('dest', None)
if source is None or dest is None:
return (host, True, dict(failed=True, msg="src and dest are required"), '')
# apply templating to source argument
inject = self.setup_cache.get(conn.host,{})
source = utils.template(source, inject, self.setup_cache)
# transfer the file to a remote tmp location
tmp_src = tmp + source.split('/')[-1]
conn.put_file(utils.path_dwim(self.basedir, source), tmp_src)
@ -524,8 +422,8 @@ class Runner(object):
return (host, True, dict(failed=True, msg="src and dest are required"), '')
# files are saved in dest dir, with a subdir for each host, then the filename
filename = os.path.basename(source)
dest = "%s/%s/%s" % (utils.path_dwim(self.basedir, dest), host, filename)
dest = "%s/%s/%s" % (utils.path_dwim(self.basedir, dest), host, source)
dest = dest.replace("//","/")
# compare old and new md5 for support of change hooks
local_md5 = None
@ -539,7 +437,6 @@ class Runner(object):
# fetch the file and check for changes
conn.fetch_file(source, dest)
new_md5 = os.popen("md5sum %s" % dest).read().split()[0]
changed = (new_md5 != local_md5)
if new_md5 != remote_md5:
return (host, True, dict(failed=True, msg="md5 mismatch", md5sum=new_md5), '')
return (host, True, dict(changed=True, md5sum=new_md5), '')
@ -577,32 +474,54 @@ class Runner(object):
if source is None or dest is None:
return (host, True, dict(failed=True, msg="src and dest are required"), '')
if metadata is None:
if self.remote_user == 'root':
metadata = '/etc/ansible/setup'
else:
metadata = '~/.ansible/setup'
# apply templating to source argument so vars can be used in the path
inject = self.setup_cache.get(conn.host,{})
source = utils.template(source, inject, self.setup_cache)
# first copy the source template over
temppath = tmp + os.path.split(source)[-1]
conn.put_file(utils.path_dwim(self.basedir, source), temppath)
(host, ok, data, err) = (None, None, None, None)
if not self.is_playbook:
# not running from a playbook so we have to fetch the remote
# setup file contents before proceeding...
if metadata is None:
if self.remote_user == 'root':
metadata = '/etc/ansible/setup'
else:
# path is expanded on remote side
metadata = "~/.ansible/setup"
# install the template module
slurp_module = self._transfer_module(conn, tmp, 'slurp')
# run the slurp module to get the metadata file
args = "src=%s" % metadata
(result1, err, executed) = self._execute_module(conn, tmp, slurp_module, args)
result1 = utils.json_loads(result1)
if not 'content' in result1 or result1.get('encoding','base64') != 'base64':
result1['failed'] = True
return self._return_from_module(conn, host, result1, err, executed)
content = base64.b64decode(result1['content'])
inject = utils.json_loads(content)
# install the template module
template_module = self._transfer_module(conn, tmp, 'template')
copy_module = self._transfer_module(conn, tmp, 'copy')
# transfer module vars
if self.module_vars:
vars = utils.bigjson(self.module_vars)
vars_path = self._transfer_str(conn, tmp, 'module_vars', vars)
vars_arg=" vars=%s"%(vars_path)
else:
vars_arg=""
# run the template module
args = "src=%s dest=%s metadata=%s%s" % (temppath, dest, metadata, vars_arg)
(result1, err, executed) = self._execute_module(conn, tmp, template_module, args)
# template the source data locally
source_data = file(utils.path_dwim(self.basedir, source)).read()
resultant = ''
try:
resultant = utils.template(source_data, inject, self.setup_cache)
except Exception, e:
return (host, False, dict(failed=True, msg=str(e)), '')
xfered = self._transfer_str(conn, tmp, 'source', resultant)
# run the COPY module
args = "src=%s dest=%s" % (xfered, dest)
(result1, err, executed) = self._execute_module(conn, tmp, copy_module, args)
(host, ok, data, err) = self._return_from_module(conn, host, result1, err, executed)
# modify file attribs if needed
if ok:
return self._chain_file_module(conn, tmp, data, err, options, executed)
else:
@ -628,12 +547,17 @@ class Runner(object):
def _executor_internal(self, host):
''' callback executed in parallel for each host. returns (hostname, connected_ok, extra) '''
ok, conn = self._connect(host)
if not ok:
return [ host, False, conn , None]
host_variables = self.inventory.get_variables(host)
port = host_variables.get('ansible_ssh_port', self.remote_port)
conn = None
try:
conn = self.connector.connect(host, port)
except errors.AnsibleConnectionFailed, e:
return [ host, False, "FAILED: %s" % str(e), None ]
cache = self.setup_cache.get(host, {})
module_name = utils.template(self.module_name, cache)
module_name = utils.template(self.module_name, cache, self.setup_cache)
tmp = self._get_tmp_path(conn)
result = None
@ -692,7 +616,14 @@ class Runner(object):
def _get_tmp_path(self, conn):
''' gets a temporary path on a remote box '''
result, err = self._exec_command(conn, "mktemp -d /tmp/ansible.XXXXXX", None, sudoable=False)
basetmp = "/var/tmp"
if self.remote_user != 'root':
basetmp = "/home/%s/.ansible/tmp" % self.remote_user
cmd = "mktemp -d %s/ansible.XXXXXX" % basetmp
if self.remote_user != 'root':
cmd = "mkdir -p %s && %s" % (basetmp, cmd)
result, err = self._exec_command(conn, cmd, None, sudoable=False)
cleaned = result.split("\n")[0].strip() + '/'
return cleaned
@ -714,13 +645,6 @@ class Runner(object):
# *****************************************************
def _match_hosts(self, pattern):
''' return all matched hosts fitting a pattern '''
return [ h for h in self.host_list if self._matches(h, pattern) ]
# *****************************************************
def _parallel_exec(self, hosts):
''' handles mulitprocessing when more than 1 fork is required '''
@ -767,7 +691,7 @@ class Runner(object):
results2["dark"][host] = result
# hosts which were contacted but never got a chance to return
for host in self._match_hosts(self.pattern):
for host in self.inventory.list_hosts(self.pattern):
if not (host in results2['dark'] or host in results2['contacted']):
results2["dark"][host] = {}
@ -779,7 +703,7 @@ class Runner(object):
''' xfer & run module on all matched hosts '''
# find hosts that match the pattern
hosts = self._match_hosts(self.pattern)
hosts = self.inventory.list_hosts(self.pattern)
if len(hosts) == 0:
self.callbacks.on_no_hosts()
return dict(contacted={}, dark={})

View file

@ -33,7 +33,6 @@ except ImportError:
from ansible import errors
import ansible.constants as C
###############################################################
# UTILITY FUNCTIONS FOR COMMAND LINE TOOLS
###############################################################
@ -239,14 +238,16 @@ def varReplace(raw, vars):
return ''.join(done)
def template(text, vars):
def template(text, vars, setup_cache):
''' run a text buffer through the templating engine '''
vars = vars.copy()
text = varReplace(str(text), vars)
vars['hostvars'] = setup_cache
template = jinja2.Template(text)
return template.render(vars)
def double_template(text, vars):
return template(template(text, vars), vars)
def double_template(text, vars, setup_cache):
return template(template(text, vars, setup_cache), vars, setup_cache)
def template_from_file(path, vars):
''' run a file through the templating engine '''
@ -279,7 +280,7 @@ class SortedOptParser(optparse.OptionParser):
self.option_list.sort(key=methodcaller('get_opt_string'))
return optparse.OptionParser.format_help(self, formatter=None)
def base_parser(constants=C, usage="", output_opts=False, port_opts=False, runas_opts=False, async_opts=False, connect_opts=False):
def base_parser(constants=C, usage="", output_opts=False, runas_opts=False, async_opts=False, connect_opts=False):
''' create an options parser for any ansible script '''
parser = SortedOptParser(usage)
@ -301,11 +302,6 @@ def base_parser(constants=C, usage="", output_opts=False, port_opts=False, runas
dest='timeout',
help="override the SSH timeout in seconds (default=%s)" % constants.DEFAULT_TIMEOUT)
if port_opts:
parser.add_option('-p', '--port', default=constants.DEFAULT_REMOTE_PORT, type='int',
dest='remote_port',
help="override the remote ssh port (default=%s)" % constants.DEFAULT_REMOTE_PORT)
if output_opts:
parser.add_option('-o', '--one-line', dest='one_line', action='store_true',
help='condense output')