mirror of
https://github.com/ansible-collections/community.general.git
synced 2025-07-26 14:41:23 -07:00
WIP on the re-implementation of fact caching and various backends.
This commit is contained in:
parent
fb5a1403dd
commit
aa419044c4
11 changed files with 466 additions and 48 deletions
|
@ -22,6 +22,7 @@ from ansible.utils.template import template
|
|||
from ansible import utils
|
||||
from ansible import errors
|
||||
import ansible.callbacks
|
||||
import ansible.cache
|
||||
import os
|
||||
import shlex
|
||||
import collections
|
||||
|
@ -32,9 +33,10 @@ import pipes
|
|||
# the setup cache stores all variables about a host
|
||||
# gathered during the setup step, while the vars cache
|
||||
# holds all other variables about a host
|
||||
SETUP_CACHE = collections.defaultdict(dict)
|
||||
SETUP_CACHE = ansible.cache.FactCache()
|
||||
VARS_CACHE = collections.defaultdict(dict)
|
||||
|
||||
|
||||
class PlayBook(object):
|
||||
'''
|
||||
runs an ansible playbook, given as a datastructure or YAML filename.
|
||||
|
@ -98,7 +100,7 @@ class PlayBook(object):
|
|||
inventory: can be specified instead of host_list to use a pre-existing inventory object
|
||||
check: don't change anything, just try to detect some potential changes
|
||||
any_errors_fatal: terminate the entire execution immediately when one of the hosts has failed
|
||||
force_handlers: continue to notify and run handlers even if a task fails
|
||||
force_handlers: continue to notify and run handlers even if a task fails
|
||||
"""
|
||||
|
||||
self.SETUP_CACHE = SETUP_CACHE
|
||||
|
@ -187,7 +189,7 @@ class PlayBook(object):
|
|||
|
||||
def _get_playbook_vars(self, play_ds, existing_vars):
|
||||
'''
|
||||
Gets the vars specified with the play and blends them
|
||||
Gets the vars specified with the play and blends them
|
||||
with any existing vars that have already been read in
|
||||
'''
|
||||
new_vars = existing_vars.copy()
|
||||
|
@ -470,6 +472,13 @@ class PlayBook(object):
|
|||
contacted = results.get('contacted', {})
|
||||
self.stats.compute(results, ignore_errors=task.ignore_errors)
|
||||
|
||||
def _register_play_vars(host, result):
|
||||
# when 'register' is used, persist the result in the vars cache
|
||||
# rather than the setup cache - vars should be transient between playbook executions
|
||||
if 'stdout' in result and 'stdout_lines' not in result:
|
||||
result['stdout_lines'] = result['stdout'].splitlines()
|
||||
utils.update_hash(self.VARS_CACHE, host, {task.register: result})
|
||||
|
||||
# add facts to the global setup cache
|
||||
for host, result in contacted.iteritems():
|
||||
if 'results' in result:
|
||||
|
@ -478,22 +487,19 @@ class PlayBook(object):
|
|||
for res in result['results']:
|
||||
if type(res) == dict:
|
||||
facts = res.get('ansible_facts', {})
|
||||
self.SETUP_CACHE[host].update(facts)
|
||||
utils.update_hash(self.SETUP_CACHE, host, facts)
|
||||
else:
|
||||
# when facts are returned, persist them in the setup cache
|
||||
facts = result.get('ansible_facts', {})
|
||||
self.SETUP_CACHE[host].update(facts)
|
||||
utils.update_hash(self.SETUP_CACHE, host, facts)
|
||||
if task.register:
|
||||
if 'stdout' in result and 'stdout_lines' not in result:
|
||||
result['stdout_lines'] = result['stdout'].splitlines()
|
||||
self.SETUP_CACHE[host][task.register] = result
|
||||
_register_play_vars(host, result)
|
||||
|
||||
# also have to register some failed, but ignored, tasks
|
||||
if task.ignore_errors and task.register:
|
||||
failed = results.get('failed', {})
|
||||
for host, result in failed.iteritems():
|
||||
if 'stdout' in result and 'stdout_lines' not in result:
|
||||
result['stdout_lines'] = result['stdout'].splitlines()
|
||||
self.SETUP_CACHE[host][task.register] = result
|
||||
_register_play_vars(host, result)
|
||||
|
||||
# flag which notify handlers need to be run
|
||||
if len(task.notify) > 0:
|
||||
|
@ -585,8 +591,8 @@ class PlayBook(object):
|
|||
# let runner template out future commands
|
||||
setup_ok = setup_results.get('contacted', {})
|
||||
for (host, result) in setup_ok.iteritems():
|
||||
self.SETUP_CACHE[host].update({'module_setup': True})
|
||||
self.SETUP_CACHE[host].update(result.get('ansible_facts', {}))
|
||||
utils.update_hash(self.SETUP_CACHE, host, {'module_setup': True})
|
||||
utils.update_hash(self.SETUP_CACHE, host, result.get('ansible_facts', {}))
|
||||
return setup_results
|
||||
|
||||
# *****************************************************
|
||||
|
@ -620,7 +626,7 @@ class PlayBook(object):
|
|||
|
||||
def _run_play(self, play):
|
||||
''' run a list of tasks for a given pattern, in order '''
|
||||
|
||||
|
||||
self.callbacks.on_play_start(play.name)
|
||||
# Get the hosts for this play
|
||||
play._play_hosts = self.inventory.list_hosts(play.hosts)
|
||||
|
|
|
@ -29,6 +29,7 @@ import os
|
|||
import sys
|
||||
import uuid
|
||||
|
||||
|
||||
class Play(object):
|
||||
|
||||
__slots__ = [
|
||||
|
@ -85,7 +86,7 @@ class Play(object):
|
|||
# now we load the roles into the datastructure
|
||||
self.included_roles = []
|
||||
ds = self._load_roles(self.roles, ds)
|
||||
|
||||
|
||||
# and finally re-process the vars files as they may have
|
||||
# been updated by the included roles
|
||||
self.vars_files = ds.get('vars_files', [])
|
||||
|
@ -153,6 +154,7 @@ class Play(object):
|
|||
self._tasks = self._load_tasks(self._ds.get('tasks', []), load_vars)
|
||||
self._handlers = self._load_tasks(self._ds.get('handlers', []), load_vars)
|
||||
|
||||
|
||||
# apply any missing tags to role tasks
|
||||
self._late_merge_role_tags()
|
||||
|
||||
|
@ -167,7 +169,7 @@ class Play(object):
|
|||
def _get_role_path(self, role):
|
||||
"""
|
||||
Returns the path on disk to the directory containing
|
||||
the role directories like tasks, templates, etc. Also
|
||||
the role directories like tasks, templates, etc. Also
|
||||
returns any variables that were included with the role
|
||||
"""
|
||||
orig_path = template(self.basedir,role,self.vars)
|
||||
|
@ -242,7 +244,7 @@ class Play(object):
|
|||
allow_dupes = utils.boolean(meta_data.get('allow_duplicates',''))
|
||||
|
||||
# if any tags were specified as role/dep variables, merge
|
||||
# them into the current dep_vars so they're passed on to any
|
||||
# them into the current dep_vars so they're passed on to any
|
||||
# further dependencies too, and so we only have one place
|
||||
# (dep_vars) to look for tags going forward
|
||||
def __merge_tags(var_obj):
|
||||
|
@ -318,7 +320,7 @@ class Play(object):
|
|||
dep_stack.append([dep,dep_path,dep_vars,dep_defaults_data])
|
||||
|
||||
# only add the current role when we're at the top level,
|
||||
# otherwise we'll end up in a recursive loop
|
||||
# otherwise we'll end up in a recursive loop
|
||||
if level == 0:
|
||||
self.included_roles.append(role)
|
||||
dep_stack.append([role,role_path,role_vars,defaults_data])
|
||||
|
@ -505,7 +507,7 @@ class Play(object):
|
|||
if not isinstance(x, dict):
|
||||
raise errors.AnsibleError("expecting dict; got: %s, error in %s" % (x, original_file))
|
||||
|
||||
# evaluate sudo vars for current and child tasks
|
||||
# evaluate sudo vars for current and child tasks
|
||||
included_sudo_vars = {}
|
||||
for k in ["sudo", "sudo_user"]:
|
||||
if k in x:
|
||||
|
@ -554,7 +556,7 @@ class Play(object):
|
|||
else:
|
||||
default_vars = utils.combine_vars(self.default_vars, default_vars)
|
||||
|
||||
# append the vars defined with the include (from above)
|
||||
# append the vars defined with the include (from above)
|
||||
# as well as the old-style 'vars' element. The old-style
|
||||
# vars are given higher precedence here (just in case)
|
||||
task_vars = utils.combine_vars(task_vars, include_vars)
|
||||
|
@ -610,8 +612,8 @@ class Play(object):
|
|||
|
||||
def _is_valid_tag(self, tag_list):
|
||||
"""
|
||||
Check to see if the list of tags passed in is in the list of tags
|
||||
we only want (playbook.only_tags), or if it is not in the list of
|
||||
Check to see if the list of tags passed in is in the list of tags
|
||||
we only want (playbook.only_tags), or if it is not in the list of
|
||||
tags we don't want (playbook.skip_tags).
|
||||
"""
|
||||
matched_skip_tags = set(tag_list) & set(self.playbook.skip_tags)
|
||||
|
@ -774,7 +776,7 @@ class Play(object):
|
|||
inject.update(self.vars)
|
||||
filename4 = template(self.basedir, filename3, inject)
|
||||
filename4 = utils.path_dwim(self.basedir, filename4)
|
||||
else:
|
||||
else:
|
||||
filename4 = utils.path_dwim(self.basedir, filename3)
|
||||
return filename2, filename3, filename4
|
||||
|
||||
|
@ -823,7 +825,7 @@ class Play(object):
|
|||
inject.update(self.playbook.SETUP_CACHE.get(host, {}))
|
||||
inject.update(self.playbook.VARS_CACHE.get(host, {}))
|
||||
else:
|
||||
inject = None
|
||||
inject = None
|
||||
|
||||
for filename in self.vars_files:
|
||||
if type(filename) == list:
|
||||
|
@ -854,4 +856,4 @@ class Play(object):
|
|||
|
||||
# finally, update the VARS_CACHE for the host, if it is set
|
||||
if host is not None:
|
||||
self.playbook.VARS_CACHE[host].update(self.playbook.extra_vars)
|
||||
self.playbook.VARS_CACHE.setdefault(host, {}).update(self.playbook.extra_vars)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue