forwarded docker_extra_args to latest upstream/origin/devel

This commit is contained in:
Thomas Steinbach 2016-03-24 21:25:38 +01:00
commit cd2c140f69
421 changed files with 22824 additions and 4800 deletions

View file

@ -25,6 +25,7 @@ from ansible.errors import AnsibleParserError
from ansible.playbook.play import Play
from ansible.playbook.playbook_include import PlaybookInclude
from ansible.plugins import get_all_plugin_loaders
from ansible import constants as C
try:
from __main__ import display
@ -87,7 +88,7 @@ class Playbook:
if pb is not None:
self._entries.extend(pb._entries)
else:
display.display("skipping playbook include '%s' due to conditional test failure" % entry.get('include', entry), color='cyan')
display.display("skipping playbook include '%s' due to conditional test failure" % entry.get('include', entry), color=C.COLOR_SKIP)
else:
entry_obj = Play.load(entry, variable_manager=variable_manager, loader=self._loader)
self._entries.append(entry_obj)

View file

@ -19,6 +19,7 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from copy import deepcopy
class Attribute:
@ -32,6 +33,11 @@ class Attribute:
self.priority = priority
self.always_post_validate = always_post_validate
if default is not None and self.isa in ('list', 'dict', 'set'):
self.default = deepcopy(default)
else:
self.default = default
def __eq__(self, other):
return other.priority == self.priority

View file

@ -153,7 +153,7 @@ class Base:
setattr(Base, name, property(getter, setter, deleter))
# Place the value into the instance so that the property can
# process and hold that value/
# process and hold that value.
setattr(self, name, value.default)
def preprocess_data(self, ds):
@ -267,6 +267,8 @@ class Base:
new_me._loader = self._loader
new_me._variable_manager = self._variable_manager
new_me._uuid = self._uuid
# if the ds value was set on the object, copy it to the new copy too
if hasattr(self, '_ds'):
new_me._ds = self._ds
@ -332,7 +334,10 @@ class Base:
if value is None:
value = []
elif not isinstance(value, list):
value = [ value ]
if isinstance(value, string_types):
value = value.split(',')
else:
value = [ value ]
if attribute.listof is not None:
for item in value:
if not isinstance(item, attribute.listof):
@ -344,11 +349,15 @@ class Base:
elif attribute.isa == 'set':
if value is None:
value = set()
else:
if not isinstance(value, (list, set)):
elif not isinstance(value, (list, set)):
if isinstance(value, string_types):
value = value.split(',')
else:
# Making a list like this handles strings of
# text and bytes properly
value = [ value ]
if not isinstance(value, set):
value = set(value)
if not isinstance(value, set):
value = set(value)
elif attribute.isa == 'dict':
if value is None:
value = dict()
@ -414,7 +423,7 @@ class Base:
def _validate_variable_keys(ds):
for key in ds:
if not isidentifier(key):
raise TypeError("%s is not a valid variable name" % key)
raise TypeError("'%s' is not a valid variable name" % key)
try:
if isinstance(ds, dict):

View file

@ -31,7 +31,7 @@ except ImportError:
class Become:
# Privlege escalation
# Privilege escalation
_become = FieldAttribute(isa='bool')
_become_method = FieldAttribute(isa='string')
_become_user = FieldAttribute(isa='string')
@ -60,7 +60,7 @@ class Become:
This is called from the Base object's preprocess_data() method which
in turn is called pretty much anytime any sort of playbook object
(plays, tasks, blocks, etc) are created.
(plays, tasks, blocks, etc) is created.
"""
self._detect_privilege_escalation_conflict(ds)
@ -90,16 +90,17 @@ class Become:
display.deprecated("Instead of su/su_user, use become/become_user and set become_method to 'su' (default is sudo)")
# if we are becoming someone else, but some fields are unset,
# make sure they're initialized to the default config values
if ds.get('become', False):
if ds.get('become_method', None) is None:
ds['become_method'] = C.DEFAULT_BECOME_METHOD
if ds.get('become_user', None) is None:
ds['become_user'] = C.DEFAULT_BECOME_USER
return ds
def set_become_defaults(self, become, become_method, become_user):
''' if we are becoming someone else, but some fields are unset,
make sure they're initialized to the default config values '''
if become:
if become_method is None:
become_method = C.DEFAULT_BECOME_METHOD
if become_user is None:
become_user = C.DEFAULT_BECOME_USER
def _get_attr_become(self):
'''
Override for the 'become' getattr fetcher, used from Base.

View file

@ -34,6 +34,8 @@ class Block(Base, Become, Conditional, Taggable):
_rescue = FieldAttribute(isa='list', default=[])
_always = FieldAttribute(isa='list', default=[])
_delegate_to = FieldAttribute(isa='list')
_delegate_facts = FieldAttribute(isa='bool', default=False)
_any_errors_fatal = FieldAttribute(isa='bool')
# for future consideration? this would be functionally
# similar to the 'else' clause for exceptions
@ -42,11 +44,16 @@ class Block(Base, Become, Conditional, Taggable):
def __init__(self, play=None, parent_block=None, role=None, task_include=None, use_handlers=False, implicit=False):
self._play = play
self._role = role
self._task_include = task_include
self._parent_block = parent_block
self._task_include = None
self._parent_block = None
self._dep_chain = None
self._use_handlers = use_handlers
self._implicit = implicit
self._dep_chain = []
if task_include:
self._task_include = task_include
elif parent_block:
self._parent_block = parent_block
super(Block, self).__init__()
@ -142,6 +149,17 @@ class Block(Base, Become, Conditional, Taggable):
except AssertionError:
raise AnsibleParserError("A malformed block was encountered.", obj=self._ds)
def get_dep_chain(self):
if self._dep_chain is None:
if self._parent_block:
return self._parent_block.get_dep_chain()
elif self._task_include:
return self._task_include._block.get_dep_chain()
else:
return None
else:
return self._dep_chain[:]
def copy(self, exclude_parent=False, exclude_tasks=False):
def _dupe_task_list(task_list, new_block):
new_task_list = []
@ -158,7 +176,9 @@ class Block(Base, Become, Conditional, Taggable):
new_me = super(Block, self).copy()
new_me._play = self._play
new_me._use_handlers = self._use_handlers
new_me._dep_chain = self._dep_chain[:]
if self._dep_chain:
new_me._dep_chain = self._dep_chain[:]
if not exclude_tasks:
new_me.block = _dupe_task_list(self.block or [], new_me)
@ -175,7 +195,8 @@ class Block(Base, Become, Conditional, Taggable):
new_me._task_include = None
if self._task_include:
new_me._task_include = self._task_include.copy()
new_me._task_include = self._task_include.copy(exclude_block=True)
new_me._task_include._block = self._task_include._block.copy(exclude_tasks=True)
return new_me
@ -190,7 +211,7 @@ class Block(Base, Become, Conditional, Taggable):
if attr not in ('block', 'rescue', 'always'):
data[attr] = getattr(self, attr)
data['dep_chain'] = self._dep_chain
data['dep_chain'] = self.get_dep_chain()
if self._role is not None:
data['role'] = self._role.serialize()
@ -215,7 +236,7 @@ class Block(Base, Become, Conditional, Taggable):
if attr in data and attr not in ('block', 'rescue', 'always'):
setattr(self, attr, data.get(attr))
self._dep_chain = data.get('dep_chain', [])
self._dep_chain = data.get('dep_chain', None)
# if there was a serialized role, unpack it too
role_data = data.get('role')
@ -236,10 +257,12 @@ class Block(Base, Become, Conditional, Taggable):
pb = Block()
pb.deserialize(pb_data)
self._parent_block = pb
self._dep_chain = self._parent_block.get_dep_chain()
def evaluate_conditional(self, templar, all_vars):
if len(self._dep_chain):
for dep in self._dep_chain:
dep_chain = self.get_dep_chain()
if dep_chain:
for dep in dep_chain:
if not dep.evaluate_conditional(templar, all_vars):
return False
if self._task_include is not None:
@ -263,8 +286,10 @@ class Block(Base, Become, Conditional, Taggable):
if self._task_include:
self._task_include.set_loader(loader)
for dep in self._dep_chain:
dep.set_loader(loader)
dep_chain = self.get_dep_chain()
if dep_chain:
for dep in dep_chain:
dep.set_loader(loader)
def _get_parent_attribute(self, attr, extend=False):
'''
@ -287,18 +312,18 @@ class Block(Base, Become, Conditional, Taggable):
value = self._extend_value(value, parent_value)
else:
value = parent_value
if self._role and (value is None or extend):
parent_value = getattr(self._role, attr)
if self._role and (value is None or extend) and hasattr(self._role, attr):
parent_value = getattr(self._role, attr, None)
if extend:
value = self._extend_value(value, parent_value)
else:
value = parent_value
if len(self._dep_chain) and (not value or extend):
reverse_dep_chain = self._dep_chain[:]
reverse_dep_chain.reverse()
for dep in reverse_dep_chain:
dep_value = getattr(dep, attr)
dep_chain = self.get_dep_chain()
if dep_chain and (value is None or extend):
dep_chain.reverse()
for dep in dep_chain:
dep_value = getattr(dep, attr, None)
if extend:
value = self._extend_value(value, dep_value)
else:
@ -306,14 +331,13 @@ class Block(Base, Become, Conditional, Taggable):
if value is not None and not extend:
break
if self._play and (value is None or extend):
parent_value = getattr(self._play, attr)
if self._play and (value is None or extend) and hasattr(self._play, attr):
parent_value = getattr(self._play, attr, None)
if extend:
value = self._extend_value(value, parent_value)
else:
value = parent_value
except KeyError:
except KeyError as e:
pass
return value
@ -329,6 +353,12 @@ class Block(Base, Become, Conditional, Taggable):
return environment
def _get_attr_any_errors_fatal(self):
'''
Override for the 'tags' getattr fetcher, used from Base.
'''
return self._get_parent_attribute('any_errors_fatal')
def filter_tagged_tasks(self, play_context, all_vars):
'''
Creates a new block, with task lists filtered based on the tags contained
@ -347,7 +377,7 @@ class Block(Base, Become, Conditional, Taggable):
return tmp_list
def evaluate_block(block):
new_block = self.copy()
new_block = self.copy(exclude_tasks=True)
new_block.block = evaluate_and_append_task(block.block)
new_block.rescue = evaluate_and_append_task(block.rescue)
new_block.always = evaluate_and_append_task(block.always)
@ -357,3 +387,4 @@ class Block(Base, Become, Conditional, Taggable):
def has_tasks(self):
return len(self.block) > 0 or len(self.rescue) > 0 or len(self.always) > 0

View file

@ -22,7 +22,7 @@ __metaclass__ = type
from jinja2.exceptions import UndefinedError
from ansible.compat.six import text_type
from ansible.errors import AnsibleError
from ansible.errors import AnsibleError, AnsibleUndefinedVariable
from ansible.playbook.attribute import FieldAttribute
from ansible.template import Templar
@ -56,7 +56,7 @@ class Conditional:
False if any of them evaluate as such.
'''
# since this is a mixin, it may not have an underlying datastructure
# since this is a mix-in, it may not have an underlying datastructure
# associated with it, so we pull it out now in case we need it for
# error reporting below
ds = None
@ -86,19 +86,25 @@ class Conditional:
if conditional in all_vars and '-' not in text_type(all_vars[conditional]):
conditional = all_vars[conditional]
# make sure the templar is using the variables specifed to this method
# make sure the templar is using the variables specified with this method
templar.set_available_variables(variables=all_vars)
conditional = templar.template(conditional)
if not isinstance(conditional, basestring) or conditional == "":
return conditional
try:
conditional = templar.template(conditional)
if not isinstance(conditional, text_type) or conditional == "":
return conditional
# a Jinja2 evaluation that results in something Python can eval!
presented = "{%% if %s %%} True {%% else %%} False {%% endif %%}" % conditional
conditional = templar.template(presented, fail_on_undefined=False)
val = conditional.strip()
if val == presented:
# a Jinja2 evaluation that results in something Python can eval!
presented = "{%% if %s %%} True {%% else %%} False {%% endif %%}" % conditional
conditional = templar.template(presented)
val = conditional.strip()
if val == "True":
return True
elif val == "False":
return False
else:
raise AnsibleError("unable to evaluate conditional: %s" % original)
except (AnsibleUndefinedVariable, UndefinedError) as e:
# the templating failed, meaning most likely a
# variable was undefined. If we happened to be
# looking for an undefined variable, return True,
@ -108,11 +114,5 @@ class Conditional:
elif "is defined" in original:
return False
else:
raise AnsibleError("error while evaluating conditional: %s (%s)" % (original, presented))
elif val == "True":
return True
elif val == "False":
return False
else:
raise AnsibleError("unable to evaluate conditional: %s" % original)
raise AnsibleError("error while evaluating conditional (%s): %s" % (original, e))

View file

@ -20,9 +20,17 @@ __metaclass__ = type
import os
from ansible.errors import AnsibleParserError
from ansible import constants as C
from ansible.compat.six import string_types
from ansible.errors import AnsibleParserError, AnsibleUndefinedVariable
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleSequence
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
def load_list_of_blocks(ds, play, parent_block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None):
'''
@ -72,16 +80,18 @@ def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_h
from ansible.playbook.block import Block
from ansible.playbook.handler import Handler
from ansible.playbook.task import Task
from ansible.playbook.task_include import TaskInclude
from ansible.template import Templar
assert isinstance(ds, list)
task_list = []
for task in ds:
assert isinstance(task, dict)
for task_ds in ds:
assert isinstance(task_ds, dict)
if 'block' in task:
if 'block' in task_ds:
t = Block.load(
task,
task_ds,
play=play,
parent_block=block,
role=role,
@ -90,13 +100,133 @@ def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_h
variable_manager=variable_manager,
loader=loader,
)
task_list.append(t)
else:
if use_handlers:
t = Handler.load(task, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)
else:
t = Task.load(task, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)
if 'include' in task_ds:
t = TaskInclude.load(task_ds, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)
task_list.append(t)
all_vars = variable_manager.get_vars(loader=loader, play=play, task=t)
templar = Templar(loader=loader, variables=all_vars)
# check to see if this include is static, which can be true if:
# 1. the user set the 'static' option to true
# 2. one of the appropriate config options was set
# 3. the included file name contains no variables, and has no loop
is_static = t.static or \
C.DEFAULT_TASK_INCLUDES_STATIC or \
(use_handlers and C.DEFAULT_HANDLER_INCLUDES_STATIC) or \
not templar._contains_vars(t.args.get('_raw_params')) and t.loop is None
if is_static:
if t.loop is not None:
raise AnsibleParserError("You cannot use 'static' on an include with a loop", obj=task_ds)
# FIXME: all of this code is very similar (if not identical) to that in
# plugins/strategy/__init__.py, and should be unified to avoid
# patches only being applied to one or the other location
if task_include:
# handle relative includes by walking up the list of parent include
# tasks and checking the relative result to see if it exists
parent_include = task_include
cumulative_path = None
while parent_include is not None:
parent_include_dir = templar.template(os.path.dirname(parent_include.args.get('_raw_params')))
if cumulative_path is None:
cumulative_path = parent_include_dir
elif not os.path.isabs(cumulative_path):
cumulative_path = os.path.join(parent_include_dir, cumulative_path)
include_target = templar.template(t.args['_raw_params'])
if t._role:
new_basedir = os.path.join(t._role._role_path, 'tasks', cumulative_path)
include_file = loader.path_dwim_relative(new_basedir, 'tasks', include_target)
else:
include_file = loader.path_dwim_relative(loader.get_basedir(), cumulative_path, include_target)
if os.path.exists(include_file):
break
else:
parent_include = parent_include._task_include
else:
try:
include_target = templar.template(t.args['_raw_params'])
except AnsibleUndefinedVariable as e:
raise AnsibleParserError(
"Error when evaluating variable in include name: %s.\n\n" \
"When using static includes, ensure that any variables used in their names are defined in vars/vars_files\n" \
"or extra-vars passed in from the command line. Static includes cannot use variables from inventory\n" \
"sources like group or host vars." % t.args['_raw_params'],
obj=task_ds,
suppress_extended_error=True,
)
if t._role:
if use_handlers:
include_file = loader.path_dwim_relative(t._role._role_path, 'handlers', include_target)
else:
include_file = loader.path_dwim_relative(t._role._role_path, 'tasks', include_target)
else:
include_file = loader.path_dwim(include_target)
data = loader.load_from_file(include_file)
if data is None:
return []
elif not isinstance(data, list):
raise AnsibleError("included task files must contain a list of tasks", obj=data)
included_blocks = load_list_of_blocks(
data,
play=play,
parent_block=block,
task_include=t,
role=role,
use_handlers=use_handlers,
loader=loader,
variable_manager=variable_manager,
)
# Remove the raw params field from the module args, so it won't show up
# later when getting the vars for this task/childen
t.args.pop('_raw_params', None)
# pop tags out of the include args, if they were specified there, and assign
# them to the include. If the include already had tags specified, we raise an
# error so that users know not to specify them both ways
tags = t.vars.pop('tags', [])
if isinstance(tags, string_types):
tags = tags.split(',')
if len(tags) > 0:
if len(t.tags) > 0:
raise AnsibleParserError(
"Include tasks should not specify tags in more than one way (both via args and directly on the task)." \
" Mixing tag specify styles is prohibited for whole import hierarchy, not only for single import statement",
obj=task_ds,
suppress_extended_error=True,
)
display.deprecated("You should not specify tags in the include parameters. All tags should be specified using the task-level option")
else:
tags = t.tags[:]
# now we extend the tags on each of the included blocks
for b in included_blocks:
b.tags = list(set(b.tags).union(tags))
# END FIXME
# FIXME: send callback here somehow...
# FIXME: handlers shouldn't need this special handling, but do
# right now because they don't iterate blocks correctly
if use_handlers:
for b in included_blocks:
task_list.extend(b.block)
else:
task_list.extend(included_blocks)
else:
task_list.append(t)
elif use_handlers:
t = Handler.load(task_ds, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)
task_list.append(t)
else:
t = Task.load(task_ds, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)
task_list.append(t)
return task_list

View file

@ -49,9 +49,15 @@ class IncludedFile:
return "%s (%s): %s" % (self._filename, self._args, self._hosts)
@staticmethod
def process_include_results(results, tqm, iterator, loader, variable_manager):
def process_include_results(results, tqm, iterator, inventory, loader, variable_manager):
included_files = []
def get_original_host(host):
if host.name in inventory._hosts_cache:
return inventory._hosts_cache[host.name]
else:
return inventory.get_host(host.name)
for res in results:
if res._task.action == 'include':
@ -67,9 +73,10 @@ class IncludedFile:
if 'skipped' in include_result and include_result['skipped'] or 'failed' in include_result:
continue
original_task = iterator.get_original_task(res._host, res._task)
original_host = get_original_host(res._host)
original_task = iterator.get_original_task(original_host, res._task)
task_vars = variable_manager.get_vars(loader=loader, play=iterator._play, host=res._host, task=original_task)
task_vars = variable_manager.get_vars(loader=loader, play=iterator._play, host=original_host, task=original_task)
templar = Templar(loader=loader, variables=task_vars)
include_variables = include_result.get('include_variables', dict())
@ -77,18 +84,26 @@ class IncludedFile:
task_vars['item'] = include_variables['item'] = include_result['item']
if original_task:
if original_task.static:
continue
if original_task._task_include:
# handle relative includes by walking up the list of parent include
# tasks and checking the relative result to see if it exists
parent_include = original_task._task_include
cumulative_path = None
while parent_include is not None:
parent_include_dir = templar.template(os.path.dirname(parent_include.args.get('_raw_params')))
if cumulative_path is None:
cumulative_path = parent_include_dir
elif not os.path.isabs(cumulative_path):
cumulative_path = os.path.join(parent_include_dir, cumulative_path)
include_target = templar.template(include_result['include'])
if original_task._role:
new_basedir = os.path.join(original_task._role._role_path, 'tasks', parent_include_dir)
new_basedir = os.path.join(original_task._role._role_path, 'tasks', cumulative_path)
include_file = loader.path_dwim_relative(new_basedir, 'tasks', include_target)
else:
include_file = loader.path_dwim_relative(loader.get_basedir(), parent_include_dir, include_target)
include_file = loader.path_dwim_relative(loader.get_basedir(), cumulative_path, include_target)
if os.path.exists(include_file):
break
@ -111,6 +126,6 @@ class IncludedFile:
except ValueError:
included_files.append(inc_file)
inc_file.add_host(res._host)
inc_file.add_host(original_host)
return included_files

View file

@ -64,7 +64,8 @@ class Play(Base, Taggable, Become):
# Connection
_gather_facts = FieldAttribute(isa='bool', default=None, always_post_validate=True)
_hosts = FieldAttribute(isa='list', default=[], required=True, listof=string_types, always_post_validate=True)
_gather_subset = FieldAttribute(isa='list', default=None, always_post_validate=True)
_hosts = FieldAttribute(isa='list', required=True, listof=string_types, always_post_validate=True)
_name = FieldAttribute(isa='string', default='', always_post_validate=True)
# Variable Attributes
@ -105,6 +106,11 @@ class Play(Base, Taggable, Become):
@staticmethod
def load(data, variable_manager=None, loader=None):
if ('name' not in data or data['name'] is None) and 'hosts' in data:
if isinstance(data['hosts'], list):
data['name'] = ','.join(data['hosts'])
else:
data['name'] = data['hosts']
p = Play()
return p.load_data(data, variable_manager=variable_manager, loader=loader)

View file

@ -50,7 +50,7 @@ except ImportError:
MAGIC_VARIABLE_MAPPING = dict(
connection = ('ansible_connection',),
connection_args = ('ansible_connection_args',),
docker_extra_args = ('ansible_docker_extra_args',),
remote_addr = ('ansible_ssh_host', 'ansible_host'),
remote_user = ('ansible_ssh_user', 'ansible_user'),
port = ('ansible_ssh_port', 'ansible_port'),
@ -79,6 +79,7 @@ MAGIC_VARIABLE_MAPPING = dict(
su_pass = ('ansible_su_password', 'ansible_su_pass'),
su_exe = ('ansible_su_exe',),
su_flags = ('ansible_su_flags',),
executable = ('ansible_shell_executable',),
)
SU_PROMPT_LOCALIZATIONS = [
@ -121,11 +122,25 @@ TASK_ATTRIBUTE_OVERRIDES = (
'become_pass',
'become_method',
'connection',
'docker_extra_args',
'delegate_to',
'no_log',
'remote_user',
)
RESET_VARS = (
'ansible_connection',
'ansible_docker_extra_args',
'ansible_ssh_host',
'ansible_ssh_pass',
'ansible_ssh_port',
'ansible_ssh_user',
'ansible_ssh_private_key_file',
'ansible_ssh_pipelining',
'ansible_user',
'ansible_host',
'ansible_port',
)
class PlayContext(Base):
@ -137,7 +152,7 @@ class PlayContext(Base):
# connection fields, some are inherited from Base:
# (connection, port, remote_user, environment, no_log)
_connection_args = FieldAttribute(isa='string')
_docker_extra_args = FieldAttribute(isa='string')
_remote_addr = FieldAttribute(isa='string')
_password = FieldAttribute(isa='string')
_private_key_file = FieldAttribute(isa='string', default=C.DEFAULT_PRIVATE_KEY_FILE)
@ -153,6 +168,7 @@ class PlayContext(Base):
_accelerate = FieldAttribute(isa='bool', default=False)
_accelerate_ipv6 = FieldAttribute(isa='bool', default=False, always_post_validate=True)
_accelerate_port = FieldAttribute(isa='int', default=C.ACCELERATE_PORT, always_post_validate=True)
_executable = FieldAttribute(isa='string', default=C.DEFAULT_EXECUTABLE)
# privilege escalation fields
_become = FieldAttribute(isa='bool')
@ -245,34 +261,23 @@ class PlayContext(Base):
if options.connection:
self.connection = options.connection
if hasattr(options, 'connection_args') and options.connection_args:
self.connection_args = options.connection_args
self.remote_user = options.remote_user
self.private_key_file = options.private_key_file
self.ssh_common_args = options.ssh_common_args
self.sftp_extra_args = options.sftp_extra_args
self.scp_extra_args = options.scp_extra_args
self.ssh_extra_args = options.ssh_extra_args
# privilege escalation
self.become = options.become
self.become_method = options.become_method
self.become_user = options.become_user
self.check_mode = boolean(options.check)
# get ssh options FIXME: make these common to all connections
for flag in ['ssh_common_args', 'docker_extra_args', 'sftp_extra_args', 'scp_extra_args', 'ssh_extra_args']:
setattr(self, flag, getattr(options,flag, ''))
# general flags (should we move out?)
if options.verbosity:
self.verbosity = options.verbosity
if options.check:
self.check_mode = boolean(options.check)
if hasattr(options, 'force_handlers') and options.force_handlers:
self.force_handlers = boolean(options.force_handlers)
if hasattr(options, 'step') and options.step:
self.step = boolean(options.step)
if hasattr(options, 'start_at_task') and options.start_at_task:
self.start_at_task = to_unicode(options.start_at_task)
if hasattr(options, 'diff') and options.diff:
self.diff = boolean(options.diff)
for flag in ['connection','remote_user', 'private_key_file', 'verbosity', 'force_handlers', 'step', 'start_at_task', 'diff']:
attribute = getattr(options, flag, False)
if attribute:
setattr(self, flag, attribute)
if hasattr(options, 'timeout') and options.timeout:
self.timeout = int(options.timeout)
@ -359,24 +364,54 @@ class PlayContext(Base):
else:
delegated_vars = dict()
# setup shell
for exe_var in MAGIC_VARIABLE_MAPPING.get('executable'):
if exe_var in variables:
setattr(new_info, 'executable', variables.get(exe_var))
attrs_considered = []
for (attr, variable_names) in iteritems(MAGIC_VARIABLE_MAPPING):
for variable_name in variable_names:
if isinstance(delegated_vars, dict) and variable_name in delegated_vars:
setattr(new_info, attr, delegated_vars[variable_name])
if attr in attrs_considered:
continue
# if delegation task ONLY use delegated host vars, avoid delegated FOR host vars
if task.delegate_to is not None:
if isinstance(delegated_vars, dict) and variable_name in delegated_vars:
setattr(new_info, attr, delegated_vars[variable_name])
attrs_considered.append(attr)
elif variable_name in variables:
setattr(new_info, attr, variables[variable_name])
attrs_considered.append(attr)
# no else, as no other vars should be considered
# make sure we get port defaults if needed
if new_info.port is None and C.DEFAULT_REMOTE_PORT is not None:
new_info.port = int(C.DEFAULT_REMOTE_PORT)
# become legacy updates
# become legacy updates -- from commandline
if not new_info.become_pass:
if new_info.become_method == 'sudo' and new_info.sudo_pass:
setattr(new_info, 'become_pass', new_info.sudo_pass)
elif new_info.become_method == 'su' and new_info.su_pass:
setattr(new_info, 'become_pass', new_info.su_pass)
# become legacy updates -- from inventory file (inventory overrides
# commandline)
for become_pass_name in MAGIC_VARIABLE_MAPPING.get('become_pass'):
if become_pass_name in variables:
break
else: # This is a for-else
if new_info.become_method == 'sudo':
for sudo_pass_name in MAGIC_VARIABLE_MAPPING.get('sudo_pass'):
if sudo_pass_name in variables:
setattr(new_info, 'become_pass', variables[sudo_pass_name])
break
if new_info.become_method == 'sudo':
for su_pass_name in MAGIC_VARIABLE_MAPPING.get('su_pass'):
if su_pass_name in variables:
setattr(new_info, 'become_pass', variables[su_pass_name])
break
# make sure we get port defaults if needed
if new_info.port is None and C.DEFAULT_REMOTE_PORT is not None:
new_info.port = int(C.DEFAULT_REMOTE_PORT)
# special overrides for the connection setting
if len(delegated_vars) > 0:
# in the event that we were using local before make sure to reset the
@ -397,6 +432,13 @@ class PlayContext(Base):
if new_info.no_log is None:
new_info.no_log = C.DEFAULT_NO_LOG
# set become defaults if not previouslly set
task.set_become_defaults(new_info.become, new_info.become_method, new_info.become_user)
# have always_run override check mode
if task.always_run:
new_info.check_mode = False
return new_info
def make_become_cmd(self, cmd, executable=None):
@ -407,7 +449,7 @@ class PlayContext(Base):
self.prompt = None
if executable is None:
executable = C.DEFAULT_EXECUTABLE
executable = self.executable
if self.become:
@ -432,8 +474,10 @@ class PlayContext(Base):
if self.become_method == 'sudo':
# If we have a password, we run sudo with a randomly-generated
# prompt set using -p. Otherwise we run it with -n, which makes
# prompt set using -p. Otherwise we run it with default -n, which makes
# it fail if it would have prompted for a password.
# Cannot rely on -n as it can be removed from defaults, which should be
# done for older versions of sudo that do not support the option.
#
# Passing a quoted compound command to sudo (or sudo -s)
# directly doesn't work, so we shellquote it with pipes.quote()
@ -449,12 +493,13 @@ class PlayContext(Base):
elif self.become_method == 'su':
# passing code ref to examine prompt as simple string comparisson isn't good enough with su
def detect_su_prompt(data):
SU_PROMPT_LOCALIZATIONS_RE = re.compile("|".join(['(\w+\'s )?' + x + ' ?: ?' for x in SU_PROMPT_LOCALIZATIONS]), flags=re.IGNORECASE)
return bool(SU_PROMPT_LOCALIZATIONS_RE.match(data))
prompt = detect_su_prompt
becomecmd = '%s %s %s -c "%s -c %s"' % (exe, flags, self.become_user, executable, success_cmd)
becomecmd = '%s %s %s -c %s' % (exe, flags, self.become_user, pipes.quote('%s -c %s' % (executable, success_cmd)))
elif self.become_method == 'pbrun':
@ -468,7 +513,7 @@ class PlayContext(Base):
elif self.become_method == 'runas':
raise AnsibleError("'runas' is not yet implemented")
#TODO: figure out prompt
#FIXME: figure out prompt
# this is not for use with winrm plugin but if they ever get ssh native on windoez
becomecmd = '%s %s /user:%s "%s"' % (exe, flags, self.become_user, success_cmd)
@ -483,6 +528,7 @@ class PlayContext(Base):
if self.become_user:
flags += ' -u %s ' % self.become_user
#FIXME: make shell independant
becomecmd = '%s %s echo %s && %s %s env ANSIBLE=true %s' % (exe, flags, success_key, exe, flags, cmd)
else:
@ -491,7 +537,7 @@ class PlayContext(Base):
if self.become_pass:
self.prompt = prompt
self.success_key = success_key
return ('%s -c %s' % (executable, pipes.quote(becomecmd)))
return becomecmd
return cmd
@ -501,10 +547,14 @@ class PlayContext(Base):
In case users need to access from the play, this is a legacy from runner.
'''
# TODO: should we be setting the more generic values here rather than
# the more specific _ssh_ ones?
for special_var in ['ansible_connection', 'ansible_ssh_host', 'ansible_ssh_pass', 'ansible_ssh_port', 'ansible_ssh_user', 'ansible_ssh_private_key_file', 'ansible_ssh_pipelining']:
if special_var not in variables:
for prop, varnames in MAGIC_VARIABLE_MAPPING.items():
if special_var in varnames:
variables[special_var] = getattr(self, prop)
for prop, var_list in MAGIC_VARIABLE_MAPPING.items():
try:
if 'become' in prop:
continue
var_val = getattr(self, prop)
for var_opt in var_list:
if var_opt not in variables and var_val is not None:
variables[var_opt] = var_val
except AttributeError:
continue

View file

@ -22,7 +22,7 @@ __metaclass__ = type
import os
from ansible.compat.six import iteritems
from ansible.errors import AnsibleParserError
from ansible.errors import AnsibleParserError, AnsibleError
from ansible.parsing.splitter import split_args, parse_kv
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleMapping
from ansible.playbook.attribute import FieldAttribute
@ -60,8 +60,15 @@ class PlaybookInclude(Base, Conditional, Taggable):
all_vars.update(variable_manager.get_vars(loader=loader))
templar = Templar(loader=loader, variables=all_vars)
if not new_obj.evaluate_conditional(templar=templar, all_vars=all_vars):
return None
try:
forward_conditional = False
if not new_obj.evaluate_conditional(templar=templar, all_vars=all_vars):
return None
except AnsibleError:
# conditional evaluation raised an error, so we set a flag to indicate
# we need to forward the conditionals on to the included play(s)
forward_conditional = True
# then we use the object to load a Playbook
pb = Playbook(loader=loader)
@ -85,6 +92,13 @@ class PlaybookInclude(Base, Conditional, Taggable):
if entry._included_path is None:
entry._included_path = os.path.dirname(file_name)
# Check to see if we need to forward the conditionals on to the included
# plays. If so, we can take a shortcut here and simply prepend them to
# those attached to each block (if any)
if forward_conditional:
for task_block in entry.tasks:
task_block.when = self.when[:] + task_block.when
return pb
def preprocess_data(self, ds):

View file

@ -43,7 +43,10 @@ __all__ = ['Role', 'hash_params']
# strategies (ansible/plugins/strategy/__init__.py)
def hash_params(params):
if not isinstance(params, dict):
return params
if isinstance(params, list):
return frozenset(params)
else:
return params
else:
s = set()
for k,v in iteritems(params):
@ -61,6 +64,7 @@ def hash_params(params):
class Role(Base, Become, Conditional, Taggable):
_delegate_to = FieldAttribute(isa='string')
_delegate_facts = FieldAttribute(isa='bool', default=False)
def __init__(self, play=None):
self._role_name = None
@ -149,7 +153,7 @@ class Role(Base, Become, Conditional, Taggable):
current_when = getattr(self, 'when')[:]
current_when.extend(role_include.when)
setattr(self, 'when', current_when)
current_tags = getattr(self, 'tags')[:]
current_tags.extend(role_include.tags)
setattr(self, 'tags', current_tags)
@ -172,16 +176,16 @@ class Role(Base, Become, Conditional, Taggable):
task_data = self._load_role_yaml('tasks')
if task_data:
try:
self._task_blocks = load_list_of_blocks(task_data, play=self._play, role=self, loader=self._loader)
except:
self._task_blocks = load_list_of_blocks(task_data, play=self._play, role=self, loader=self._loader, variable_manager=self._variable_manager)
except AssertionError:
raise AnsibleParserError("The tasks/main.yml file for role '%s' must contain a list of tasks" % self._role_name , obj=task_data)
handler_data = self._load_role_yaml('handlers')
if handler_data:
try:
self._handler_blocks = load_list_of_blocks(handler_data, play=self._play, role=self, use_handlers=True, loader=self._loader)
except:
raise AnsibleParserError("The handlers/main.yml file for role '%s' must contain a list of tasks" % self._role_name , obj=task_data)
self._handler_blocks = load_list_of_blocks(handler_data, play=self._play, role=self, use_handlers=True, loader=self._loader, variable_manager=self._variable_manager)
except AssertionError:
raise AnsibleParserError("The handlers/main.yml file for role '%s' must contain a list of tasks" % self._role_name , obj=handler_data)
# vars and default vars are regular dictionaries
self._role_vars = self._load_role_yaml('vars')
@ -248,31 +252,41 @@ class Role(Base, Become, Conditional, Taggable):
def get_parents(self):
return self._parents
def get_default_vars(self):
def get_default_vars(self, dep_chain=[]):
default_vars = dict()
for dep in self.get_all_dependencies():
default_vars = combine_vars(default_vars, dep.get_default_vars())
if dep_chain:
for parent in dep_chain:
default_vars = combine_vars(default_vars, parent._default_vars)
default_vars = combine_vars(default_vars, self._default_vars)
return default_vars
def get_inherited_vars(self, dep_chain=[], include_params=True):
def get_inherited_vars(self, dep_chain=[]):
inherited_vars = dict()
for parent in dep_chain:
inherited_vars = combine_vars(inherited_vars, parent._role_vars)
if include_params:
inherited_vars = combine_vars(inherited_vars, parent._role_params)
if dep_chain:
for parent in dep_chain:
inherited_vars = combine_vars(inherited_vars, parent._role_vars)
return inherited_vars
def get_role_params(self, dep_chain=[]):
params = {}
if dep_chain:
for parent in dep_chain:
params = combine_vars(params, parent._role_params)
params = combine_vars(params, self._role_params)
return params
def get_vars(self, dep_chain=[], include_params=True):
all_vars = self.get_inherited_vars(dep_chain, include_params=include_params)
all_vars = self.get_inherited_vars(dep_chain)
for dep in self.get_all_dependencies():
all_vars = combine_vars(all_vars, dep.get_vars(include_params=include_params))
all_vars = combine_vars(all_vars, self._role_vars)
if include_params:
all_vars = combine_vars(all_vars, self._role_params)
all_vars = combine_vars(all_vars, self.get_role_params(dep_chain=dep_chain))
return all_vars
@ -313,7 +327,7 @@ class Role(Base, Become, Conditional, Taggable):
return host.name in self._completed and not self._metadata.allow_duplicates
def compile(self, play, dep_chain=[]):
def compile(self, play, dep_chain=None):
'''
Returns the task list for this role, which is created by first
recursively compiling the tasks for all direct dependencies, and
@ -327,18 +341,20 @@ class Role(Base, Become, Conditional, Taggable):
block_list = []
# update the dependency chain here
if dep_chain is None:
dep_chain = []
new_dep_chain = dep_chain + [self]
deps = self.get_direct_dependencies()
for dep in deps:
dep_blocks = dep.compile(play=play, dep_chain=new_dep_chain)
for dep_block in dep_blocks:
new_dep_block = dep_block.copy()
new_dep_block._dep_chain = new_dep_chain
new_dep_block._play = play
block_list.append(new_dep_block)
block_list.extend(dep_blocks)
block_list.extend(self._task_blocks)
for task_block in self._task_blocks:
new_task_block = task_block.copy()
new_task_block._dep_chain = new_dep_chain
new_task_block._play = play
block_list.append(new_task_block)
return block_list

View file

@ -135,46 +135,44 @@ class RoleDefinition(Base, Become, Conditional, Taggable):
append it to the default role path
'''
role_path = unfrackpath(role_name)
# we always start the search for roles in the base directory of the playbook
role_search_paths = [
os.path.join(self._loader.get_basedir(), u'roles'),
self._loader.get_basedir(),
]
# also search in the configured roles path
if C.DEFAULT_ROLES_PATH:
configured_paths = C.DEFAULT_ROLES_PATH.split(os.pathsep)
role_search_paths.extend(configured_paths)
# finally, append the roles basedir, if it was set, so we can
# search relative to that directory for dependent roles
if self._role_basedir:
role_search_paths.append(self._role_basedir)
# create a templar class to template the dependency names, in
# case they contain variables
if self._variable_manager is not None:
all_vars = self._variable_manager.get_vars(loader=self._loader, play=self._play)
else:
all_vars = dict()
templar = Templar(loader=self._loader, variables=all_vars)
role_name = templar.template(role_name)
# now iterate through the possible paths and return the first one we find
for path in role_search_paths:
path = templar.template(path)
role_path = unfrackpath(os.path.join(path, role_name))
if self._loader.path_exists(role_path):
return (role_name, role_path)
# if not found elsewhere try to extract path from name
role_path = unfrackpath(role_name)
if self._loader.path_exists(role_path):
role_name = os.path.basename(role_name)
return (role_name, role_path)
else:
# we always start the search for roles in the base directory of the playbook
role_search_paths = [
os.path.join(self._loader.get_basedir(), u'roles'),
u'./roles',
self._loader.get_basedir(),
u'./'
]
# also search in the configured roles path
if C.DEFAULT_ROLES_PATH:
configured_paths = C.DEFAULT_ROLES_PATH.split(os.pathsep)
role_search_paths.extend(configured_paths)
# finally, append the roles basedir, if it was set, so we can
# search relative to that directory for dependent roles
if self._role_basedir:
role_search_paths.append(self._role_basedir)
# create a templar class to template the dependency names, in
# case they contain variables
if self._variable_manager is not None:
all_vars = self._variable_manager.get_vars(loader=self._loader, play=self._play)
else:
all_vars = dict()
templar = Templar(loader=self._loader, variables=all_vars)
role_name = templar.template(role_name)
# now iterate through the possible paths and return the first one we find
for path in role_search_paths:
path = templar.template(path)
role_path = unfrackpath(os.path.join(path, role_name))
if self._loader.path_exists(role_path):
return (role_name, role_path)
raise AnsibleError("the role '%s' was not found in %s" % (role_name, ":".join(role_search_paths)), obj=self._ds)
@ -190,7 +188,12 @@ class RoleDefinition(Base, Become, Conditional, Taggable):
for (key, value) in iteritems(ds):
# use the list of FieldAttribute values to determine what is and is not
# an extra parameter for this role (or sub-class of this role)
if key not in base_attribute_names:
# FIXME: hard-coded list of exception key names here corresponds to the
# connection fields in the Base class. There may need to be some
# other mechanism where we exclude certain kinds of field attributes,
# or make this list more automatic in some way so we don't have to
# remember to update it manually.
if key not in base_attribute_names or key in ('connection', 'port', 'remote_user'):
# this key does not match a field attribute, so it must be a role param
role_params[key] = value
else:

View file

@ -40,7 +40,8 @@ class RoleInclude(RoleDefinition):
is included for execution in a play.
"""
_delegate_to = FieldAttribute(isa='string')
_delegate_to = FieldAttribute(isa='string')
_delegate_facts = FieldAttribute(isa='bool', default=False)
def __init__(self, play=None, role_basedir=None, variable_manager=None, loader=None):
super(RoleInclude, self).__init__(play=play, role_basedir=role_basedir, variable_manager=variable_manager, loader=loader)

View file

@ -190,6 +190,17 @@ class RoleRequirement(RoleDefinition):
if rc != 0:
raise AnsibleError ("- command %s failed in directory %s (rc=%s)" % (' '.join(clone_cmd), tempdir, rc))
if scm == 'git' and version:
checkout_cmd = [scm, 'checkout', version]
with open('/dev/null', 'w') as devnull:
try:
popen = subprocess.Popen(checkout_cmd, cwd=os.path.join(tempdir, name), stdout=devnull, stderr=devnull)
except (IOError, OSError):
raise AnsibleError("error executing: %s" % " ".join(checkout_cmd))
rc = popen.wait()
if rc != 0:
raise AnsibleError("- command %s failed in directory %s (rc=%s)" % (' '.join(checkout_cmd), tempdir, rc))
temp_file = tempfile.NamedTemporaryFile(delete=False, suffix='.tar')
if scm == 'hg':
archive_cmd = ['hg', 'archive', '--prefix', "%s/" % name]

View file

@ -38,7 +38,11 @@ class Taggable:
if isinstance(ds, list):
return ds
elif isinstance(ds, basestring):
return [ ds ]
value = ds.split(',')
if isinstance(value, list):
return [ x.strip() for x in value ]
else:
return [ ds ]
else:
raise AnsibleError('tags must be specified as a list', obj=ds)

View file

@ -21,13 +21,12 @@ __metaclass__ = type
from ansible.compat.six import iteritems, string_types
from ansible.errors import AnsibleError
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.parsing.mod_args import ModuleArgsParser
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleMapping, AnsibleUnicode
from ansible.plugins import lookup_loader
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
from ansible.playbook.become import Become
@ -36,6 +35,8 @@ from ansible.playbook.conditional import Conditional
from ansible.playbook.role import Role
from ansible.playbook.taggable import Taggable
from ansible.utils.unicode import to_str
try:
from __main__ import display
except ImportError:
@ -69,10 +70,11 @@ class Task(Base, Conditional, Taggable, Become):
_any_errors_fatal = FieldAttribute(isa='bool')
_async = FieldAttribute(isa='int', default=0)
_changed_when = FieldAttribute(isa='string')
_changed_when = FieldAttribute(isa='list', default=[])
_delay = FieldAttribute(isa='int', default=5)
_delegate_to = FieldAttribute(isa='string')
_failed_when = FieldAttribute(isa='string')
_delegate_facts = FieldAttribute(isa='bool', default=False)
_failed_when = FieldAttribute(isa='list', default=[])
_first_available_file = FieldAttribute(isa='list')
_loop = FieldAttribute(isa='string', private=True)
_loop_args = FieldAttribute(isa='list', private=True)
@ -81,7 +83,7 @@ class Task(Base, Conditional, Taggable, Become):
_poll = FieldAttribute(isa='int')
_register = FieldAttribute(isa='string')
_retries = FieldAttribute(isa='int', default=3)
_until = FieldAttribute(isa='list')
_until = FieldAttribute(isa='list', default=[])
def __init__(self, block=None, role=None, task_include=None):
''' constructors a task, without the Task.load classmethod, it will be pretty blank '''
@ -106,11 +108,10 @@ class Task(Base, Conditional, Taggable, Become):
elif self.name:
return self.name
else:
flattened_args = self._merge_kv(self.args)
if self._role:
return "%s : %s %s" % (self._role.get_name(), self.action, flattened_args)
return "%s : %s" % (self._role.get_name(), self.action)
else:
return "%s %s" % (self.action, flattened_args)
return "%s" % (self.action,)
def _merge_kv(self, ds):
if ds is None:
@ -133,7 +134,10 @@ class Task(Base, Conditional, Taggable, Become):
def __repr__(self):
''' returns a human readable representation of the task '''
return "TASK: %s" % self.get_name()
if self.get_name() == 'meta':
return "TASK: meta (%s)" % self.args['_raw_params']
else:
return "TASK: %s" % self.get_name()
def _preprocess_loop(self, ds, new_ds, k, v):
''' take a lookup plugin name and store it correctly '''
@ -165,7 +169,10 @@ class Task(Base, Conditional, Taggable, Become):
# and the delegate_to value from the various possible forms
# supported as legacy
args_parser = ModuleArgsParser(task_ds=ds)
(action, args, delegate_to) = args_parser.parse()
try:
(action, args, delegate_to) = args_parser.parse()
except AnsibleParserError as e:
raise AnsibleParserError(to_str(e), obj=ds)
# the command/shell/script modules used to support the `cmd` arg,
# which corresponds to what we now call _raw_params, so move that
@ -213,14 +220,6 @@ class Task(Base, Conditional, Taggable, Become):
return super(Task, self).preprocess_data(new_ds)
def _load_any_errors_fatal(self, attr, value):
'''
Exists only to show a deprecation warning, as this attribute is not valid
at the task level.
'''
display.deprecated("Setting any_errors_fatal on a task is no longer supported. This should be set at the play level only")
return None
def post_validate(self, templar):
'''
Override of base class post_validate, to also do final validation on
@ -234,6 +233,13 @@ class Task(Base, Conditional, Taggable, Become):
super(Task, self).post_validate(templar)
def _post_validate_register(self, attr, value, templar):
'''
Override post validation for the register args field, which is not
supposed to be templated
'''
return value
def _post_validate_loop_args(self, attr, value, templar):
'''
Override post validation for the loop args field, which is templated
@ -249,13 +255,44 @@ class Task(Base, Conditional, Taggable, Become):
if value is None:
return dict()
for env_item in value:
if isinstance(env_item, (string_types, AnsibleUnicode)) and env_item in templar._available_variables.keys():
display.deprecated("Using bare variables for environment is deprecated."
" Update your playbooks so that the environment value uses the full variable syntax ('{{foo}}')")
break
elif isinstance(value, list):
if len(value) == 1:
return templar.template(value[0], convert_bare=True)
else:
env = []
for env_item in value:
if isinstance(env_item, (string_types, AnsibleUnicode)) and env_item in templar._available_variables.keys():
env[env_item] = templar.template(env_item, convert_bare=True)
elif isinstance(value, dict):
env = dict()
for env_item in value:
if isinstance(env_item, (string_types, AnsibleUnicode)) and env_item in templar._available_variables.keys():
env[env_item] = templar.template(value[env_item], convert_bare=True)
# at this point it should be a simple string
return templar.template(value, convert_bare=True)
def _post_validate_changed_when(self, attr, value, templar):
'''
changed_when is evaluated after the execution of the task is complete,
and should not be templated during the regular post_validate step.
'''
return value
def _post_validate_failed_when(self, attr, value, templar):
'''
failed_when is evaluated after the execution of the task is complete,
and should not be templated during the regular post_validate step.
'''
return value
def _post_validate_until(self, attr, value, templar):
'''
until is evaluated after the execution of the task is complete,
and should not be templated during the regular post_validate step.
'''
return value
def get_vars(self):
all_vars = dict()
if self._block:
@ -398,3 +435,10 @@ class Task(Base, Conditional, Taggable, Become):
if parent_environment is not None:
environment = self._extend_value(environment, parent_environment)
return environment
def _get_attr_any_errors_fatal(self):
'''
Override for the 'tags' getattr fetcher, used from Base.
'''
return self._get_parent_attribute('any_errors_fatal')

View file

@ -0,0 +1,72 @@
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.task import Task
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
__all__ = ['TaskInclude']
class TaskInclude(Task):
"""
A task include is derived from a regular task to handle the special
circumstances related to the `- include: ...` task.
"""
# =================================================================================
# ATTRIBUTES
_static = FieldAttribute(isa='bool', default=False)
@staticmethod
def load(data, block=None, role=None, task_include=None, variable_manager=None, loader=None):
t = TaskInclude(block=block, role=role, task_include=task_include)
return t.load_data(data, variable_manager=variable_manager, loader=loader)
def get_vars(self):
'''
We override the parent Task() classes get_vars here because
we need to include the args of the include into the vars as
they are params to the included tasks.
'''
all_vars = dict()
if self._block:
all_vars.update(self._block.get_vars())
if self._task_include:
all_vars.update(self._task_include.get_vars())
all_vars.update(self.vars)
all_vars.update(self.args)
if 'tags' in all_vars:
del all_vars['tags']
if 'when' in all_vars:
del all_vars['when']
return all_vars