mirror of
https://github.com/ansible-collections/community.general.git
synced 2025-04-24 19:31:26 -07:00
Merge pull request #1159 from dhozac/include-with_items
Allow task includes to work with with_items
This commit is contained in:
commit
e756ee3741
3 changed files with 42 additions and 23 deletions
|
@ -278,17 +278,6 @@ def _varFind(text):
|
|||
path.append(text[part_start[0]:var_end])
|
||||
return {'path': path, 'start': start, 'end': end}
|
||||
|
||||
def varLookup(varname, vars):
|
||||
''' helper function used by with_items '''
|
||||
|
||||
m = _varFind(varname)
|
||||
if not m:
|
||||
return None
|
||||
try:
|
||||
return _varLookup(m['path'], vars)
|
||||
except VarNotFoundException:
|
||||
return None
|
||||
|
||||
def varReplace(raw, vars, depth=0):
|
||||
''' Perform variable replacement of $variables in string raw using vars dictionary '''
|
||||
# this code originally from yum
|
||||
|
@ -360,6 +349,30 @@ def varReplaceFilesAndPipes(basedir, raw):
|
|||
|
||||
return ''.join(done)
|
||||
|
||||
def varReplaceWithItems(basedir, varname, vars):
|
||||
''' helper function used by with_items '''
|
||||
|
||||
if isinstance(varname, basestring):
|
||||
m = _varFind(varname)
|
||||
if not m:
|
||||
return varname
|
||||
if m['start'] == 0 and m['end'] == len(varname):
|
||||
try:
|
||||
return varReplaceWithItems(basedir, _varLookup(m['path'], vars), vars)
|
||||
except VarNotFoundException:
|
||||
return varname
|
||||
else:
|
||||
return template(basedir, varname, vars)
|
||||
elif isinstance(varname, (list, tuple)):
|
||||
return [varReplaceWithItems(basedir, v, vars) for v in varname]
|
||||
elif isinstance(varname, dict):
|
||||
d = {}
|
||||
for (k, v) in varname.iteritems():
|
||||
d[k] = varReplaceWithItems(basedir, v, vars)
|
||||
return d
|
||||
else:
|
||||
raise Exception("invalid with_items type")
|
||||
|
||||
|
||||
def template(basedir, text, vars):
|
||||
''' run a text buffer through the templating engine until it no longer changes '''
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue