mirror of
https://github.com/ansible-collections/community.general.git
synced 2025-07-02 14:40:19 -07:00
Tweaked merge_hash to also affect Runner behavior
This commit is contained in:
parent
94d189bc7f
commit
6826aa7360
9 changed files with 110 additions and 23 deletions
|
@ -49,11 +49,7 @@ class VarsModule(object):
|
|||
data = utils.parse_yaml_from_file(path)
|
||||
if type(data) != dict:
|
||||
raise errors.AnsibleError("%s must be stored as a dictionary/hash" % path)
|
||||
if C.DEFAULT_HASH_BEHAVIOUR == "merge":
|
||||
# let data content override results if needed
|
||||
results = utils.merge_hash(results, data)
|
||||
else:
|
||||
results.update(data)
|
||||
results = utils.combine_vars(results, data);
|
||||
|
||||
# load vars in inventory_dir/hosts_vars/name_of_host
|
||||
path = os.path.join(basedir, "host_vars/%s" % host.name)
|
||||
|
@ -61,10 +57,6 @@ class VarsModule(object):
|
|||
data = utils.parse_yaml_from_file(path)
|
||||
if type(data) != dict:
|
||||
raise errors.AnsibleError("%s must be stored as a dictionary/hash" % path)
|
||||
if C.DEFAULT_HASH_BEHAVIOUR == "merge":
|
||||
# let data content override results if needed
|
||||
results = utils.merge_hash(results, data)
|
||||
else:
|
||||
results.update(data)
|
||||
results = utils.combine_vars(results, data);
|
||||
return results
|
||||
|
||||
|
|
|
@ -330,8 +330,9 @@ class Play(object):
|
|||
if host is not None and self._has_vars_in(filename2) and not self._has_vars_in(filename3):
|
||||
# running a host specific pass and has host specific variables
|
||||
# load into setup cache
|
||||
self.playbook.SETUP_CACHE[host].update(new_vars)
|
||||
self.playbook.SETUP_CACHE[host] = utils.combine_vars(
|
||||
self.playbook.SETUP_CACHE[host], new_vars)
|
||||
self.playbook.callbacks.on_import_for_host(host, filename4)
|
||||
elif host is None:
|
||||
# running a non-host specific pass and we can update the global vars instead
|
||||
self.vars.update(new_vars)
|
||||
self.vars = utils.combine_vars(self.vars, new_vars)
|
||||
|
|
|
@ -175,7 +175,7 @@ class Runner(object):
|
|||
|
||||
# ensure we are using unique tmp paths
|
||||
random.seed()
|
||||
|
||||
|
||||
# *****************************************************
|
||||
|
||||
def _complex_args_hack(self, complex_args, module_args):
|
||||
|
@ -333,9 +333,9 @@ class Runner(object):
|
|||
port = self.remote_port
|
||||
|
||||
inject = {}
|
||||
inject.update(host_variables)
|
||||
inject.update(self.module_vars)
|
||||
inject.update(self.setup_cache[host])
|
||||
inject = utils.combine_vars(inject, host_variables)
|
||||
inject = utils.combine_vars(inject, self.module_vars)
|
||||
inject = utils.combine_vars(inject, self.setup_cache[host])
|
||||
inject['hostvars'] = HostVars(self.setup_cache, self.inventory)
|
||||
inject['group_names'] = host_variables.get('group_names', [])
|
||||
inject['groups'] = self.inventory.groups_list()
|
||||
|
@ -492,7 +492,7 @@ class Runner(object):
|
|||
# all modules get a tempdir, action plugins get one unless they have NEEDS_TMPPATH set to False
|
||||
if getattr(handler, 'NEEDS_TMPPATH', True):
|
||||
tmp = self._make_tmp_path(conn)
|
||||
|
||||
|
||||
result = handler.run(conn, tmp, module_name, module_args, inject, complex_args)
|
||||
|
||||
conn.close()
|
||||
|
@ -625,8 +625,8 @@ class Runner(object):
|
|||
module_data = f.read()
|
||||
if module_common.REPLACER in module_data:
|
||||
is_new_style=True
|
||||
|
||||
complex_args_json = utils.jsonify(complex_args)
|
||||
|
||||
complex_args_json = utils.jsonify(complex_args)
|
||||
encoded_args = "\"\"\"%s\"\"\"" % module_args.replace("\"","\\\"")
|
||||
encoded_lang = "\"\"\"%s\"\"\"" % C.DEFAULT_MODULE_LANG
|
||||
encoded_complex = "\"\"\"%s\"\"\"" % complex_args_json.replace("\\", "\\\\")
|
||||
|
@ -635,7 +635,7 @@ class Runner(object):
|
|||
module_data = module_data.replace(module_common.REPLACER_ARGS, encoded_args)
|
||||
module_data = module_data.replace(module_common.REPLACER_LANG, encoded_lang)
|
||||
module_data = module_data.replace(module_common.REPLACER_COMPLEX, encoded_complex)
|
||||
|
||||
|
||||
if is_new_style:
|
||||
facility = C.DEFAULT_SYSLOG_FACILITY
|
||||
if 'ansible_syslog_facility' in inject:
|
||||
|
@ -737,7 +737,7 @@ class Runner(object):
|
|||
# run once per hostgroup, rather than pausing once per each
|
||||
# host.
|
||||
p = utils.plugins.action_loader.get(self.module_name, self)
|
||||
|
||||
|
||||
if p and getattr(p, 'BYPASS_HOST_LOOP', None):
|
||||
|
||||
# Expose the current hostgroup to the bypassing plugins
|
||||
|
|
|
@ -306,7 +306,7 @@ def merge_hash(a, b):
|
|||
for k, v in b.iteritems():
|
||||
if k in a and isinstance(a[k], dict):
|
||||
# if this key is a hash and exists in a
|
||||
# we recursively call ourselves with
|
||||
# we recursively call ourselves with
|
||||
# the key value of b
|
||||
a[k] = merge_hash(a[k], v)
|
||||
else:
|
||||
|
@ -663,8 +663,13 @@ def get_diff(diff):
|
|||
return ">> the files are different, but the diff library cannot compare unicode strings"
|
||||
|
||||
def is_list_of_strings(items):
|
||||
for x in items:
|
||||
for x in items:
|
||||
if not isinstance(x, basestring):
|
||||
return False
|
||||
return True
|
||||
|
||||
def combine_vars(a, b):
|
||||
if C.DEFAULT_HASH_BEHAVIOUR == "merge":
|
||||
return merge_hash(a, b)
|
||||
else:
|
||||
return dict(a.items() + b.items())
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue