mirror of
https://github.com/ansible-collections/community.general.git
synced 2025-07-22 21:00:22 -07:00
parent
2f33c1a1a1
commit
5553b20828
206 changed files with 1853 additions and 1870 deletions
|
@ -20,25 +20,26 @@
|
|||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import getpass
|
||||
import operator
|
||||
import optparse
|
||||
import os
|
||||
import subprocess
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import yaml
|
||||
import re
|
||||
import getpass
|
||||
import subprocess
|
||||
|
||||
from abc import ABCMeta, abstractmethod
|
||||
|
||||
import ansible
|
||||
from ansible.release import __version__
|
||||
from ansible import constants as C
|
||||
from ansible.errors import AnsibleError, AnsibleOptionsError
|
||||
from ansible.inventory.manager import InventoryManager
|
||||
from ansible.module_utils.six import with_metaclass, string_types
|
||||
from ansible.module_utils._text import to_bytes, to_text
|
||||
from ansible.parsing.dataloader import DataLoader
|
||||
from ansible.release import __version__
|
||||
from ansible.utils.path import unfrackpath
|
||||
from ansible.utils.vars import load_extra_vars, load_options_vars
|
||||
from ansible.vars.manager import VariableManager
|
||||
|
@ -79,7 +80,7 @@ class InvalidOptsParser(SortedOptParser):
|
|||
add_help_option=False,
|
||||
prog=parser.prog,
|
||||
epilog=parser.epilog)
|
||||
self.version=parser.version
|
||||
self.version = parser.version
|
||||
|
||||
def _process_long_opt(self, rargs, values):
|
||||
try:
|
||||
|
@ -93,6 +94,7 @@ class InvalidOptsParser(SortedOptParser):
|
|||
except optparse.BadOptionError:
|
||||
pass
|
||||
|
||||
|
||||
class CLI(with_metaclass(ABCMeta, object)):
|
||||
''' code behind bin/ansible* programs '''
|
||||
|
||||
|
@ -293,12 +295,13 @@ class CLI(with_metaclass(ABCMeta, object)):
|
|||
|
||||
# base opts
|
||||
parser = SortedOptParser(usage, version=CLI.version("%prog"), description=desc, epilog=epilog)
|
||||
parser.add_option('-v','--verbose', dest='verbosity', default=C.DEFAULT_VERBOSITY, action="count",
|
||||
parser.add_option('-v', '--verbose', dest='verbosity', default=C.DEFAULT_VERBOSITY, action="count",
|
||||
help="verbose mode (-vvv for more, -vvvv to enable connection debugging)")
|
||||
|
||||
if inventory_opts:
|
||||
parser.add_option('-i', '--inventory', '--inventory-file', dest='inventory', action="append",
|
||||
help="specify inventory host path (default=[%s]) or comma separated host list. --inventory-file is deprecated" % C.DEFAULT_HOST_LIST)
|
||||
help="specify inventory host path (default=[%s]) or comma separated host list. "
|
||||
"--inventory-file is deprecated" % C.DEFAULT_HOST_LIST)
|
||||
parser.add_option('--list-hosts', dest='listhosts', action='store_true',
|
||||
help='outputs a list of matching hosts; does not execute anything else')
|
||||
parser.add_option('-l', '--limit', default=C.DEFAULT_SUBSET, dest='subset',
|
||||
|
@ -313,7 +316,7 @@ class CLI(with_metaclass(ABCMeta, object)):
|
|||
help="set additional variables as key=value or YAML/JSON", default=[])
|
||||
|
||||
if fork_opts:
|
||||
parser.add_option('-f','--forks', dest='forks', default=C.DEFAULT_FORKS, type='int',
|
||||
parser.add_option('-f', '--forks', dest='forks', default=C.DEFAULT_FORKS, type='int',
|
||||
help="specify number of parallel processes to use (default=%s)" % C.DEFAULT_FORKS)
|
||||
|
||||
if vault_opts:
|
||||
|
@ -343,7 +346,7 @@ class CLI(with_metaclass(ABCMeta, object)):
|
|||
connect_group = optparse.OptionGroup(parser, "Connection Options", "control as whom and how to connect to hosts")
|
||||
connect_group.add_option('-k', '--ask-pass', default=C.DEFAULT_ASK_PASS, dest='ask_pass', action='store_true',
|
||||
help='ask for connection password')
|
||||
connect_group.add_option('--private-key','--key-file', default=C.DEFAULT_PRIVATE_KEY_FILE, dest='private_key_file',
|
||||
connect_group.add_option('--private-key', '--key-file', default=C.DEFAULT_PRIVATE_KEY_FILE, dest='private_key_file',
|
||||
help='use this file to authenticate the connection', action="callback", callback=CLI.unfrack_path, type='string')
|
||||
connect_group.add_option('-u', '--user', default=C.DEFAULT_REMOTE_USER, dest='remote_user',
|
||||
help='connect as this user (default=%s)' % C.DEFAULT_REMOTE_USER)
|
||||
|
@ -380,7 +383,8 @@ class CLI(with_metaclass(ABCMeta, object)):
|
|||
runas_group.add_option("-b", "--become", default=C.DEFAULT_BECOME, action="store_true", dest='become',
|
||||
help="run operations with become (does not imply password prompting)")
|
||||
runas_group.add_option('--become-method', dest='become_method', default=C.DEFAULT_BECOME_METHOD, type='choice', choices=C.BECOME_METHODS,
|
||||
help="privilege escalation method to use (default=%s), valid choices: [ %s ]" % (C.DEFAULT_BECOME_METHOD, ' | '.join(C.BECOME_METHODS)))
|
||||
help="privilege escalation method to use (default=%s), valid choices: [ %s ]" %
|
||||
(C.DEFAULT_BECOME_METHOD, ' | '.join(C.BECOME_METHODS)))
|
||||
runas_group.add_option('--become-user', default=None, dest='become_user', type='string',
|
||||
help='run operations as this user (default=%s)' % C.DEFAULT_BECOME_USER)
|
||||
|
||||
|
@ -491,7 +495,7 @@ class CLI(with_metaclass(ABCMeta, object)):
|
|||
else:
|
||||
# set default if it exists
|
||||
if os.path.exists(C.DEFAULT_HOST_LIST):
|
||||
self.options.inventory = [ C.DEFAULT_HOST_LIST ]
|
||||
self.options.inventory = [C.DEFAULT_HOST_LIST]
|
||||
|
||||
@staticmethod
|
||||
def version(prog):
|
||||
|
@ -576,8 +580,7 @@ class CLI(with_metaclass(ABCMeta, object)):
|
|||
offset = time.timezone
|
||||
else:
|
||||
offset = time.altzone
|
||||
result = "({0} {1}) last updated {2} (GMT {3:+04d})".format(branch, commit,
|
||||
time.strftime("%Y/%m/%d %H:%M:%S", date), int(offset / -36))
|
||||
result = "({0} {1}) last updated {2} (GMT {3:+04d})".format(branch, commit, time.strftime("%Y/%m/%d %H:%M:%S", date), int(offset / -36))
|
||||
else:
|
||||
result = ''
|
||||
return result
|
||||
|
@ -669,7 +672,7 @@ class CLI(with_metaclass(ABCMeta, object)):
|
|||
else:
|
||||
try:
|
||||
f = open(this_path, "rb")
|
||||
vault_pass=f.read().strip()
|
||||
vault_pass = f.read().strip()
|
||||
f.close()
|
||||
except (OSError, IOError) as e:
|
||||
raise AnsibleError("Could not read vault password file %s: %s" % (this_path, e))
|
||||
|
@ -705,4 +708,3 @@ class CLI(with_metaclass(ABCMeta, object)):
|
|||
variable_manager.options_vars = load_options_vars(options, CLI.version_info(gitinfo=False))
|
||||
|
||||
return loader, inventory, variable_manager
|
||||
|
||||
|
|
|
@ -84,10 +84,10 @@ class AdHocCLI(CLI):
|
|||
def _play_ds(self, pattern, async, poll):
|
||||
check_raw = self.options.module_name in ('command', 'win_command', 'shell', 'win_shell', 'script', 'raw')
|
||||
return dict(
|
||||
name = "Ansible Ad-Hoc",
|
||||
hosts = pattern,
|
||||
gather_facts = 'no',
|
||||
tasks = [ dict(action=dict(module=self.options.module_name, args=parse_kv(self.options.module_args, check_raw=check_raw)), async=async, poll=poll) ]
|
||||
name="Ansible Ad-Hoc",
|
||||
hosts=pattern,
|
||||
gather_facts='no',
|
||||
tasks=[dict(action=dict(module=self.options.module_name, args=parse_kv(self.options.module_args, check_raw=check_raw)), async=async, poll=poll)]
|
||||
)
|
||||
|
||||
def run(self):
|
||||
|
@ -103,7 +103,7 @@ class AdHocCLI(CLI):
|
|||
|
||||
self.normalize_become_options()
|
||||
(sshpass, becomepass) = self.ask_passwords()
|
||||
passwords = { 'conn_pass': sshpass, 'become_pass': becomepass }
|
||||
passwords = {'conn_pass': sshpass, 'become_pass': becomepass}
|
||||
|
||||
loader, inventory, variable_manager = self._play_prereqs(self.options)
|
||||
|
||||
|
@ -155,11 +155,11 @@ class AdHocCLI(CLI):
|
|||
else:
|
||||
cb = 'minimal'
|
||||
|
||||
run_tree=False
|
||||
run_tree = False
|
||||
if self.options.tree:
|
||||
C.DEFAULT_CALLBACK_WHITELIST.append('tree')
|
||||
C.TREE_DIR = self.options.tree
|
||||
run_tree=True
|
||||
run_tree = True
|
||||
|
||||
# now create a task queue manager to execute the play
|
||||
self._tqm = None
|
||||
|
|
|
@ -58,8 +58,8 @@ class ConsoleCLI(CLI, cmd.Cmd):
|
|||
''' a REPL that allows for running ad-hoc tasks against a chosen inventory (based on dominis' ansible-shell).'''
|
||||
|
||||
modules = []
|
||||
ARGUMENTS = { 'host-pattern': 'A name of a group in the inventory, a shell-like glob '
|
||||
'selecting hosts in inventory or any combination of the two separated by commas.', }
|
||||
ARGUMENTS = {'host-pattern': 'A name of a group in the inventory, a shell-like glob '
|
||||
'selecting hosts in inventory or any combination of the two separated by commas.'}
|
||||
|
||||
def __init__(self, args):
|
||||
|
||||
|
@ -150,7 +150,7 @@ class ConsoleCLI(CLI, cmd.Cmd):
|
|||
elif module in C.IGNORE_FILES:
|
||||
continue
|
||||
elif module.startswith('_'):
|
||||
fullpath = '/'.join([path,module])
|
||||
fullpath = '/'.join([path, module])
|
||||
if os.path.islink(fullpath): # avoids aliases
|
||||
continue
|
||||
module = module.replace('_', '', 1)
|
||||
|
@ -184,10 +184,10 @@ class ConsoleCLI(CLI, cmd.Cmd):
|
|||
try:
|
||||
check_raw = self.options.module_name in ('command', 'shell', 'script', 'raw')
|
||||
play_ds = dict(
|
||||
name = "Ansible Shell",
|
||||
hosts = self.options.cwd,
|
||||
gather_facts = 'no',
|
||||
tasks = [ dict(action=dict(module=module, args=parse_kv(module_args, check_raw=check_raw)))]
|
||||
name="Ansible Shell",
|
||||
hosts=self.options.cwd,
|
||||
gather_facts='no',
|
||||
tasks=[dict(action=dict(module=module, args=parse_kv(module_args, check_raw=check_raw)))]
|
||||
)
|
||||
play = Play().load(play_ds, variable_manager=self.variable_manager, loader=self.loader)
|
||||
except Exception as e:
|
||||
|
@ -368,7 +368,7 @@ class ConsoleCLI(CLI, cmd.Cmd):
|
|||
mline = line.partition(' ')[2]
|
||||
offs = len(mline) - len(text)
|
||||
|
||||
if self.options.cwd in ('all','*','\\'):
|
||||
if self.options.cwd in ('all', '*', '\\'):
|
||||
completions = self.hosts + self.groups
|
||||
else:
|
||||
completions = [x.name for x in self.inventory.list_hosts(self.options.cwd)]
|
||||
|
@ -410,7 +410,7 @@ class ConsoleCLI(CLI, cmd.Cmd):
|
|||
|
||||
self.normalize_become_options()
|
||||
(sshpass, becomepass) = self.ask_passwords()
|
||||
self.passwords = { 'conn_pass': sshpass, 'become_pass': becomepass }
|
||||
self.passwords = {'conn_pass': sshpass, 'become_pass': becomepass}
|
||||
|
||||
self.loader, self.inventory, self.variable_manager = self._play_prereqs(self.options)
|
||||
|
||||
|
|
|
@ -21,8 +21,8 @@ __metaclass__ = type
|
|||
|
||||
import datetime
|
||||
import os
|
||||
import traceback
|
||||
import textwrap
|
||||
import traceback
|
||||
import yaml
|
||||
|
||||
from ansible import constants as C
|
||||
|
@ -66,7 +66,7 @@ class DocCLI(CLI):
|
|||
self.parser.add_option("-a", "--all", action="store_true", default=False, dest='all_plugins',
|
||||
help='Show documentation for all plugins')
|
||||
self.parser.add_option("-t", "--type", action="store", default='module', dest='type', type='choice',
|
||||
help='Choose which plugin type', choices=['module','cache', 'connection', 'callback', 'lookup', 'strategy', 'inventory'])
|
||||
help='Choose which plugin type', choices=['cache', 'callback', 'connection', 'inventory', 'lookup', 'module', 'strategy'])
|
||||
|
||||
super(DocCLI, self).parse()
|
||||
|
||||
|
@ -90,7 +90,7 @@ class DocCLI(CLI):
|
|||
elif plugin_type == 'strategy':
|
||||
loader = strategy_loader
|
||||
elif plugin_type == 'inventory':
|
||||
loader = PluginLoader( 'InventoryModule', 'ansible.plugins.inventory', 'inventory_plugins', 'inventory_plugins')
|
||||
loader = PluginLoader('InventoryModule', 'ansible.plugins.inventory', 'inventory_plugins', 'inventory_plugins')
|
||||
else:
|
||||
loader = module_loader
|
||||
|
||||
|
@ -309,7 +309,7 @@ class DocCLI(CLI):
|
|||
text.append(textwrap.fill(CLI.tty_ify(opt['description']), limit, initial_indent=opt_indent, subsequent_indent=opt_indent))
|
||||
del opt['description']
|
||||
|
||||
aliases= ''
|
||||
aliases = ''
|
||||
if 'aliases' in opt:
|
||||
choices = "(Aliases: " + ", ".join(str(i) for i in opt['aliases']) + ")"
|
||||
del opt['aliases']
|
||||
|
@ -361,7 +361,7 @@ class DocCLI(CLI):
|
|||
display.vv("Skipping %s key cuase we don't know how to handle eet" % k)
|
||||
|
||||
def get_man_text(self, doc):
|
||||
opt_indent=" "
|
||||
opt_indent = " "
|
||||
text = []
|
||||
text.append("> %s (%s)\n" % (doc[self.options.type].upper(), doc['filename']))
|
||||
pad = display.columns * 0.20
|
||||
|
@ -388,11 +388,11 @@ class DocCLI(CLI):
|
|||
if 'notes' in doc and doc['notes'] and len(doc['notes']) > 0:
|
||||
text.append("Notes:")
|
||||
for note in doc['notes']:
|
||||
text.append(textwrap.fill(CLI.tty_ify(note), limit-6, initial_indent=" * ", subsequent_indent=opt_indent))
|
||||
text.append(textwrap.fill(CLI.tty_ify(note), limit - 6, initial_indent=" * ", subsequent_indent=opt_indent))
|
||||
|
||||
if 'requirements' in doc and doc['requirements'] is not None and len(doc['requirements']) > 0:
|
||||
req = ", ".join(doc['requirements'])
|
||||
text.append("Requirements:%s\n" % textwrap.fill(CLI.tty_ify(req), limit-16, initial_indent=" ", subsequent_indent=opt_indent))
|
||||
text.append("Requirements:%s\n" % textwrap.fill(CLI.tty_ify(req), limit - 16, initial_indent=" ", subsequent_indent=opt_indent))
|
||||
|
||||
if 'examples' in doc and len(doc['examples']) > 0:
|
||||
text.append("Example%s:\n" % ('' if len(doc['examples']) < 2 else 's'))
|
||||
|
|
|
@ -23,11 +23,11 @@ from __future__ import (absolute_import, division, print_function)
|
|||
__metaclass__ = type
|
||||
|
||||
import os.path
|
||||
import sys
|
||||
import yaml
|
||||
import time
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import time
|
||||
import yaml
|
||||
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
|
||||
|
@ -36,11 +36,11 @@ from ansible.cli import CLI
|
|||
from ansible.errors import AnsibleError, AnsibleOptionsError
|
||||
from ansible.galaxy import Galaxy
|
||||
from ansible.galaxy.api import GalaxyAPI
|
||||
from ansible.galaxy.role import GalaxyRole
|
||||
from ansible.galaxy.login import GalaxyLogin
|
||||
from ansible.galaxy.role import GalaxyRole
|
||||
from ansible.galaxy.token import GalaxyToken
|
||||
from ansible.playbook.role.requirement import RoleRequirement
|
||||
from ansible.module_utils._text import to_text
|
||||
from ansible.playbook.role.requirement import RoleRequirement
|
||||
|
||||
try:
|
||||
from __main__ import display
|
||||
|
@ -52,7 +52,7 @@ except ImportError:
|
|||
class GalaxyCLI(CLI):
|
||||
'''command to manage Ansible roles in shared repostories, the default of which is Ansible Galaxy *https://galaxy.ansible.com*.'''
|
||||
|
||||
SKIP_INFO_KEYS = ("name", "description", "readme_html", "related", "summary_fields", "average_aw_composite", "average_aw_score", "url" )
|
||||
SKIP_INFO_KEYS = ("name", "description", "readme_html", "related", "summary_fields", "average_aw_composite", "average_aw_score", "url")
|
||||
VALID_ACTIONS = ("delete", "import", "info", "init", "install", "list", "login", "remove", "search", "setup")
|
||||
|
||||
def __init__(self, args):
|
||||
|
@ -64,7 +64,6 @@ class GalaxyCLI(CLI):
|
|||
|
||||
super(GalaxyCLI, self).set_action()
|
||||
|
||||
|
||||
# specific to actions
|
||||
if self.action == "delete":
|
||||
self.parser.set_usage("usage: %prog delete [options] github_user github_repo")
|
||||
|
@ -113,9 +112,9 @@ class GalaxyCLI(CLI):
|
|||
|
||||
# options that apply to more than one action
|
||||
if self.action in ['init', 'info']:
|
||||
self.parser.add_option( '--offline', dest='offline', default=False, action='store_true', help="Don't query the galaxy API when creating roles")
|
||||
self.parser.add_option('--offline', dest='offline', default=False, action='store_true', help="Don't query the galaxy API when creating roles")
|
||||
|
||||
if self.action not in ("delete","import","init","login","setup"):
|
||||
if self.action not in ("delete", "import", "init", "login", "setup"):
|
||||
# NOTE: while the option type=str, the default is a list, and the
|
||||
# callback will set the value to a list.
|
||||
self.parser.add_option('-p', '--roles-path', dest='roles_path', action="callback", callback=CLI.expand_paths, type=str,
|
||||
|
@ -123,15 +122,15 @@ class GalaxyCLI(CLI):
|
|||
help='The path to the directory containing your roles. The default is the roles_path configured in your ansible.cfg '
|
||||
'file (/etc/ansible/roles if not configured)')
|
||||
|
||||
if self.action in ("init","install"):
|
||||
if self.action in ("init", "install"):
|
||||
self.parser.add_option('-f', '--force', dest='force', action='store_true', default=False, help='Force overwriting an existing role')
|
||||
|
||||
def parse(self):
|
||||
''' create an options parser for bin/ansible '''
|
||||
|
||||
self.parser = CLI.base_parser(
|
||||
usage = "usage: %%prog [%s] [--help] [options] ..." % "|".join(self.VALID_ACTIONS),
|
||||
epilog = "\nSee '%s <command> --help' for more information on a specific command.\n\n" % os.path.basename(sys.argv[0])
|
||||
usage="usage: %%prog [%s] [--help] [options] ..." % "|".join(self.VALID_ACTIONS),
|
||||
epilog="\nSee '%s <command> --help' for more information on a specific command.\n\n" % os.path.basename(sys.argv[0])
|
||||
)
|
||||
|
||||
# common
|
||||
|
@ -292,12 +291,12 @@ class GalaxyCLI(CLI):
|
|||
role_info.update(gr.metadata)
|
||||
|
||||
req = RoleRequirement()
|
||||
role_spec= req.role_yaml_parse({'role': role})
|
||||
role_spec = req.role_yaml_parse({'role': role})
|
||||
if role_spec:
|
||||
role_info.update(role_spec)
|
||||
|
||||
data = self._display_role_info(role_info)
|
||||
### FIXME: This is broken in both 1.9 and 2.0 as
|
||||
# FIXME: This is broken in both 1.9 and 2.0 as
|
||||
# _display_role_info() always returns something
|
||||
if not data:
|
||||
data = u"\n- the role %s was not found" % role
|
||||
|
@ -589,11 +588,11 @@ class GalaxyCLI(CLI):
|
|||
|
||||
if len(task) > 1:
|
||||
# found multiple roles associated with github_user/github_repo
|
||||
display.display("WARNING: More than one Galaxy role associated with Github repo %s/%s." % (github_user,github_repo),
|
||||
display.display("WARNING: More than one Galaxy role associated with Github repo %s/%s." % (github_user, github_repo),
|
||||
color='yellow')
|
||||
display.display("The following Galaxy roles are being updated:" + u'\n', color=C.COLOR_CHANGED)
|
||||
for t in task:
|
||||
display.display('%s.%s' % (t['summary_fields']['role']['namespace'],t['summary_fields']['role']['name']), color=C.COLOR_CHANGED)
|
||||
display.display('%s.%s' % (t['summary_fields']['role']['namespace'], t['summary_fields']['role']['name']), color=C.COLOR_CHANGED)
|
||||
display.display(u'\nTo properly namespace this role, remove each of the above and re-import %s/%s from scratch' % (github_user, github_repo),
|
||||
color=C.COLOR_CHANGED)
|
||||
return 0
|
||||
|
@ -601,7 +600,7 @@ class GalaxyCLI(CLI):
|
|||
display.display("Successfully submitted import request %d" % task[0]['id'])
|
||||
if not self.options.wait:
|
||||
display.display("Role name: %s" % task[0]['summary_fields']['role']['name'])
|
||||
display.display("Repo: %s/%s" % (task[0]['github_user'],task[0]['github_repo']))
|
||||
display.display("Repo: %s/%s" % (task[0]['github_user'], task[0]['github_repo']))
|
||||
|
||||
if self.options.check_status or self.options.wait:
|
||||
# Get the status of the import
|
||||
|
@ -634,7 +633,7 @@ class GalaxyCLI(CLI):
|
|||
display.display("---------- ---------- ----------", color=C.COLOR_OK)
|
||||
for secret in secrets:
|
||||
display.display("%-10s %-10s %s/%s" % (secret['id'], secret['source'], secret['github_user'],
|
||||
secret['github_repo']),color=C.COLOR_OK)
|
||||
secret['github_repo']), color=C.COLOR_OK)
|
||||
return 0
|
||||
|
||||
if self.options.remove_id:
|
||||
|
@ -672,7 +671,7 @@ class GalaxyCLI(CLI):
|
|||
display.display("ID User Name")
|
||||
display.display("------ --------------- ----------")
|
||||
for role in resp['deleted_roles']:
|
||||
display.display("%-8s %-15s %s" % (role.id,role.namespace,role.name))
|
||||
display.display("%-8s %-15s %s" % (role.id, role.namespace, role.name))
|
||||
|
||||
display.display(resp['status'])
|
||||
|
||||
|
@ -692,4 +691,3 @@ class GalaxyCLI(CLI):
|
|||
if os.pathsep in data:
|
||||
data = data.split(os.pathsep)[0]
|
||||
return data
|
||||
|
||||
|
|
|
@ -36,18 +36,15 @@ except ImportError:
|
|||
display = Display()
|
||||
|
||||
|
||||
#---------------------------------------------------------------------------------------------------
|
||||
|
||||
class PlaybookCLI(CLI):
|
||||
''' the tool to run *Ansible playbooks*, which are a configuration and multinode deployment system.
|
||||
See the project home page (https://docs.ansible.com) for more information. '''
|
||||
|
||||
|
||||
def parse(self):
|
||||
|
||||
# create parser for CLI options
|
||||
parser = CLI.base_parser(
|
||||
usage = "%prog [options] playbook.yml [playbook2 ...]",
|
||||
usage="%prog [options] playbook.yml [playbook2 ...]",
|
||||
connect_opts=True,
|
||||
meta_opts=True,
|
||||
runas_opts=True,
|
||||
|
@ -102,7 +99,7 @@ class PlaybookCLI(CLI):
|
|||
if not self.options.listhosts and not self.options.listtasks and not self.options.listtags and not self.options.syntax:
|
||||
self.normalize_become_options()
|
||||
(sshpass, becomepass) = self.ask_passwords()
|
||||
passwords = { 'conn_pass': sshpass, 'become_pass': becomepass }
|
||||
passwords = {'conn_pass': sshpass, 'become_pass': becomepass}
|
||||
|
||||
loader, inventory, variable_manager = self._play_prereqs(self.options)
|
||||
|
||||
|
@ -122,7 +119,6 @@ class PlaybookCLI(CLI):
|
|||
# Invalid limit
|
||||
raise AnsibleError("Specified --limit does not match any hosts")
|
||||
|
||||
|
||||
# flush fact cache if requested
|
||||
if self.options.flush_cache:
|
||||
self._flush_cache(inventory, variable_manager)
|
||||
|
|
|
@ -28,8 +28,8 @@ import socket
|
|||
import sys
|
||||
import time
|
||||
|
||||
from ansible.errors import AnsibleOptionsError
|
||||
from ansible.cli import CLI
|
||||
from ansible.errors import AnsibleOptionsError
|
||||
from ansible.module_utils._text import to_native
|
||||
from ansible.plugins import module_loader
|
||||
from ansible.utils.cmd_functions import run_cmd
|
||||
|
@ -59,10 +59,10 @@ class PullCLI(CLI):
|
|||
DEFAULT_PLAYBOOK = 'local.yml'
|
||||
PLAYBOOK_ERRORS = {
|
||||
1: 'File does not exist',
|
||||
2: 'File is not readable'
|
||||
2: 'File is not readable',
|
||||
}
|
||||
SUPPORTED_REPO_MODULES = ['git']
|
||||
ARGUMENTS = { 'playbook.yml': 'The name of one the YAML format files to run as an Ansible playbook.'
|
||||
ARGUMENTS = {'playbook.yml': 'The name of one the YAML format files to run as an Ansible playbook.'
|
||||
'This can be a relative path within the checkout. By default, Ansible will'
|
||||
"look for a playbook based on the host's fully-qualified domain name,"
|
||||
'on the host hostname and finally a playbook named *local.yml*.', }
|
||||
|
@ -87,7 +87,8 @@ class PullCLI(CLI):
|
|||
self.parser.add_option('-o', '--only-if-changed', dest='ifchanged', default=False, action='store_true',
|
||||
help='only run the playbook if the repository has been updated')
|
||||
self.parser.add_option('-s', '--sleep', dest='sleep', default=None,
|
||||
help='sleep for random interval (between 0 and n number of seconds) before starting. This is a useful way to disperse git requests')
|
||||
help='sleep for random interval (between 0 and n number of seconds) before starting. '
|
||||
'This is a useful way to disperse git requests')
|
||||
self.parser.add_option('-f', '--force', dest='force', default=False, action='store_true',
|
||||
help='run the playbook even if the repository could not be updated')
|
||||
self.parser.add_option('-d', '--directory', dest='dest', default=None, help='directory to checkout repository to')
|
||||
|
@ -100,8 +101,8 @@ class PullCLI(CLI):
|
|||
self.parser.add_option('-m', '--module-name', dest='module_name', default=self.DEFAULT_REPO_TYPE,
|
||||
help='Repository module name, which ansible will use to check out the repo. Default is %s.' % self.DEFAULT_REPO_TYPE)
|
||||
self.parser.add_option('--verify-commit', dest='verify', default=False, action='store_true',
|
||||
help='verify GPG signature of checked out commit, if it fails abort running the playbook.'
|
||||
' This needs the corresponding VCS module to support such an operation')
|
||||
help='verify GPG signature of checked out commit, if it fails abort running the playbook. '
|
||||
'This needs the corresponding VCS module to support such an operation')
|
||||
self.parser.add_option('--clean', dest='clean', default=False, action='store_true',
|
||||
help='modified files in the working repository will be discarded')
|
||||
self.parser.add_option('--track-subs', dest='tracksubs', default=False, action='store_true',
|
||||
|
@ -120,7 +121,7 @@ class PullCLI(CLI):
|
|||
|
||||
if self.options.sleep:
|
||||
try:
|
||||
secs = random.randint(0,int(self.options.sleep))
|
||||
secs = random.randint(0, int(self.options.sleep))
|
||||
self.options.sleep = secs
|
||||
except ValueError:
|
||||
raise AnsibleOptionsError("%s is not a number." % self.options.sleep)
|
||||
|
@ -151,7 +152,7 @@ class PullCLI(CLI):
|
|||
limit_opts = 'localhost,%s,127.0.0.1' % ','.join(set([host, node, host.split('.')[0], node.split('.')[0]]))
|
||||
base_opts = '-c local '
|
||||
if self.options.verbosity > 0:
|
||||
base_opts += ' -%s' % ''.join([ "v" for x in range(0, self.options.verbosity) ])
|
||||
base_opts += ' -%s' % ''.join(["v" for x in range(0, self.options.verbosity)])
|
||||
|
||||
# Attempt to use the inventory passed in as an argument
|
||||
# It might not yet have been downloaded so use localhost as default
|
||||
|
@ -165,7 +166,7 @@ class PullCLI(CLI):
|
|||
else:
|
||||
inv_opts = "-i 'localhost,'"
|
||||
|
||||
#FIXME: enable more repo modules hg/svn?
|
||||
# FIXME: enable more repo modules hg/svn?
|
||||
if self.options.module_name == 'git':
|
||||
repo_opts = "name=%s dest=%s" % (self.options.url, self.options.dest)
|
||||
if self.options.checkout:
|
||||
|
|
|
@ -22,11 +22,11 @@ __metaclass__ = type
|
|||
import os
|
||||
import sys
|
||||
|
||||
from ansible.cli import CLI
|
||||
from ansible.errors import AnsibleError, AnsibleOptionsError
|
||||
from ansible.module_utils._text import to_text, to_bytes
|
||||
from ansible.parsing.dataloader import DataLoader
|
||||
from ansible.parsing.vault import VaultEditor
|
||||
from ansible.cli import CLI
|
||||
from ansible.module_utils._text import to_text, to_bytes
|
||||
|
||||
try:
|
||||
from __main__ import display
|
||||
|
@ -95,9 +95,9 @@ class VaultCLI(CLI):
|
|||
|
||||
self.parser = CLI.base_parser(
|
||||
vault_opts=True,
|
||||
usage = "usage: %%prog [%s] [options] [vaultfile.yml]" % "|".join(self.VALID_ACTIONS),
|
||||
desc = "encryption/decryption utility for Ansbile data files",
|
||||
epilog = "\nSee '%s <command> --help' for more information on a specific command.\n\n" % os.path.basename(sys.argv[0])
|
||||
usage="usage: %%prog [%s] [options] [vaultfile.yml]" % "|".join(self.VALID_ACTIONS),
|
||||
desc="encryption/decryption utility for Ansbile data files",
|
||||
epilog="\nSee '%s <command> --help' for more information on a specific command.\n\n" % os.path.basename(sys.argv[0])
|
||||
)
|
||||
|
||||
self.set_action()
|
||||
|
|
|
@ -30,7 +30,7 @@ from ansible.module_utils._text import to_text
|
|||
from ansible.parsing.quoting import unquote
|
||||
from ansible.utils.path import makedirs_safe
|
||||
|
||||
BOOL_TRUE = frozenset( [ "true", "t", "y", "1", "yes", "on" ] )
|
||||
BOOL_TRUE = frozenset(["true", "t", "y", "1", "yes", "on"])
|
||||
|
||||
|
||||
def mk_boolean(value):
|
||||
|
@ -173,18 +173,18 @@ p, CONFIG_FILE = load_config_file()
|
|||
# non configurable but used as defaults
|
||||
BLACKLIST_EXTS = ('.pyc', '.pyo', '.swp', '.bak', '~', '.rpm', '.md', '.txt')
|
||||
# the default whitelist for cow stencils
|
||||
DEFAULT_COW_WHITELIST = [ 'bud-frogs', 'bunny', 'cheese', 'daemon', 'default', 'dragon', 'elephant-in-snake', 'elephant',
|
||||
DEFAULT_COW_WHITELIST = ['bud-frogs', 'bunny', 'cheese', 'daemon', 'default', 'dragon', 'elephant-in-snake', 'elephant',
|
||||
'eyes', 'hellokitty', 'kitty', 'luke-koala', 'meow', 'milk', 'moofasa', 'moose', 'ren', 'sheep',
|
||||
'small', 'stegosaurus', 'stimpy', 'supermilker', 'three-eyes', 'turkey', 'turtle', 'tux', 'udder',
|
||||
'vader-koala', 'vader', 'www', ]
|
||||
'vader-koala', 'vader', 'www']
|
||||
|
||||
# sections in config file
|
||||
DEFAULTS='defaults'
|
||||
DEFAULTS = 'defaults'
|
||||
|
||||
#### DEPRECATED VARS ### # FIXME: add deprecation warning when these get set
|
||||
#none left now
|
||||
# DEPRECATED VARS # FIXME: add deprecation warning when these get set
|
||||
# none left now
|
||||
|
||||
#### DEPRECATED FEATURE TOGGLES: these will eventually be removed as it becomes the standard ####
|
||||
# DEPRECATED FEATURE TOGGLES: these will eventually be removed as it becomes the standard
|
||||
|
||||
# If --tags or --skip-tags is given multiple times on the CLI and this is True, merge the lists of tags together.
|
||||
# If False, let the last argument overwrite any previous ones.
|
||||
|
@ -194,7 +194,7 @@ MERGE_MULTIPLE_CLI_TAGS = get_config(p, DEFAULTS, 'merge_multiple_cli_tags', '
|
|||
# Controls which 'precedence path' to take, remove when decide on which!
|
||||
SOURCE_OVER_GROUPS = get_config(p, 'vars', 'source_over_groups', 'ANSIBLE_SOURCE_OVER_GROUPS', True, value_type='boolean')
|
||||
|
||||
#### GENERALLY CONFIGURABLE THINGS ####
|
||||
# GENERALLY CONFIGURABLE THINGS ####
|
||||
DEFAULT_DEBUG = get_config(p, DEFAULTS, 'debug', 'ANSIBLE_DEBUG', False, value_type='boolean')
|
||||
DEFAULT_VERBOSITY = get_config(p, DEFAULTS, 'verbosity', 'ANSIBLE_VERBOSITY', 0, value_type='integer')
|
||||
DEFAULT_ROLES_PATH = get_config(p, DEFAULTS, 'roles_path', 'ANSIBLE_ROLES_PATH',
|
||||
|
@ -206,8 +206,8 @@ DEFAULT_FACT_PATH = get_config(p, DEFAULTS, 'fact_path', 'ANSIBLE
|
|||
DEFAULT_FORKS = get_config(p, DEFAULTS, 'forks', 'ANSIBLE_FORKS', 5, value_type='integer')
|
||||
DEFAULT_MODULE_ARGS = get_config(p, DEFAULTS, 'module_args', 'ANSIBLE_MODULE_ARGS', '')
|
||||
DEFAULT_MODULE_LANG = get_config(p, DEFAULTS, 'module_lang', 'ANSIBLE_MODULE_LANG', os.getenv('LANG', 'en_US.UTF-8'))
|
||||
DEFAULT_MODULE_SET_LOCALE = get_config(p, DEFAULTS, 'module_set_locale','ANSIBLE_MODULE_SET_LOCALE',False, value_type='boolean')
|
||||
DEFAULT_MODULE_COMPRESSION= get_config(p, DEFAULTS, 'module_compression', None, 'ZIP_DEFLATED')
|
||||
DEFAULT_MODULE_SET_LOCALE = get_config(p, DEFAULTS, 'module_set_locale', 'ANSIBLE_MODULE_SET_LOCALE', False, value_type='boolean')
|
||||
DEFAULT_MODULE_COMPRESSION = get_config(p, DEFAULTS, 'module_compression', None, 'ZIP_DEFLATED')
|
||||
DEFAULT_TIMEOUT = get_config(p, DEFAULTS, 'timeout', 'ANSIBLE_TIMEOUT', 10, value_type='integer')
|
||||
DEFAULT_POLL_INTERVAL = get_config(p, DEFAULTS, 'poll_interval', 'ANSIBLE_POLL_INTERVAL', 15, value_type='integer')
|
||||
DEFAULT_REMOTE_USER = get_config(p, DEFAULTS, 'remote_user', 'ANSIBLE_REMOTE_USER', None)
|
||||
|
@ -240,9 +240,9 @@ SHOW_CUSTOM_STATS = get_config(p, DEFAULTS, 'show_custom_stats', 'ANSIBLE_SHOW_C
|
|||
NAMESPACE_FACTS = get_config(p, DEFAULTS, 'restrict_facts_namespace', 'ANSIBLE_RESTRICT_FACTS', False, value_type='boolean')
|
||||
|
||||
# Inventory
|
||||
DEFAULT_HOST_LIST = get_config(p, DEFAULTS,'inventory', 'ANSIBLE_INVENTORY', '/etc/ansible/hosts', value_type='path', expand_relative_paths=True)
|
||||
INVENTORY_ENABLED = get_config(p, DEFAULTS,'inventory_enabled', 'ANSIBLE_INVENTORY_ENABLED',
|
||||
[ 'host_list', 'script', 'ini', 'yaml' ], value_type='list')
|
||||
DEFAULT_HOST_LIST = get_config(p, DEFAULTS, 'inventory', 'ANSIBLE_INVENTORY', '/etc/ansible/hosts', value_type='path', expand_relative_paths=True)
|
||||
INVENTORY_ENABLED = get_config(p, DEFAULTS, 'inventory_enabled', 'ANSIBLE_INVENTORY_ENABLED',
|
||||
['host_list', 'script', 'ini', 'yaml'], value_type='list')
|
||||
INVENTORY_IGNORE_EXTS = get_config(p, DEFAULTS, 'inventory_ignore_extensions', 'ANSIBLE_INVENTORY_IGNORE',
|
||||
BLACKLIST_EXTS + (".orig", ".ini", ".cfg", ".retry"), value_type='list')
|
||||
INVENTORY_IGNORE_PATTERNS = get_config(p, DEFAULTS, 'inventory_ignore_patterns', 'ANSIBLE_INVENTORY_IGNORE_REGEX', [], value_type='list')
|
||||
|
@ -455,4 +455,4 @@ IGNORE_FILES = ["COPYING", "CONTRIBUTING", "LICENSE", "README", "VERSION", "GUID
|
|||
INTERNAL_RESULT_KEYS = ['add_host', 'add_group']
|
||||
RESTRICTED_RESULT_KEYS = ['ansible_rsync_path', 'ansible_playbook_python']
|
||||
# check all of these extensions when looking for 'variable' files which should be YAML or JSON.
|
||||
YAML_FILENAME_EXTENSIONS = [ ".yml", ".yaml", ".json" ]
|
||||
YAML_FILENAME_EXTENSIONS = [".yml", ".yaml", ".json"]
|
||||
|
|
|
@ -20,8 +20,8 @@ from __future__ import (absolute_import, division, print_function)
|
|||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
import sys
|
||||
|
||||
from ansible import constants as C
|
||||
from ansible.errors import AnsibleError
|
||||
|
@ -39,6 +39,7 @@ except ImportError:
|
|||
from ansible.utils.display import Display
|
||||
display = Display()
|
||||
|
||||
|
||||
class InventoryData(object):
|
||||
"""
|
||||
Holds inventory data (host and group objects).
|
||||
|
@ -92,8 +93,8 @@ class InventoryData(object):
|
|||
if not py_interp:
|
||||
# sys.executable is not set in some cornercases. #13585
|
||||
py_interp = '/usr/bin/python'
|
||||
display.warning('Unable to determine python interpreter from sys.executable. Using /usr/bin/python default.'
|
||||
' You can correct this by setting ansible_python_interpreter for localhost')
|
||||
display.warning('Unable to determine python interpreter from sys.executable. Using /usr/bin/python default. '
|
||||
'You can correct this by setting ansible_python_interpreter for localhost')
|
||||
new_host.set_variable("ansible_python_interpreter", py_interp)
|
||||
|
||||
if "ansible_connection" not in new_host.vars:
|
||||
|
@ -103,7 +104,6 @@ class InventoryData(object):
|
|||
|
||||
return new_host
|
||||
|
||||
|
||||
def _scan_groups_for_host(self, hostname, localhost=False):
|
||||
''' in case something did not update inventory correctly, fallback to group scan '''
|
||||
|
||||
|
@ -121,7 +121,6 @@ class InventoryData(object):
|
|||
|
||||
return found
|
||||
|
||||
|
||||
def reconcile_inventory(self):
|
||||
''' Ensure inventory basic rules, run after updates '''
|
||||
|
||||
|
@ -190,7 +189,6 @@ class InventoryData(object):
|
|||
|
||||
return matching_host
|
||||
|
||||
|
||||
def add_group(self, group):
|
||||
''' adds a group to inventory if not there already '''
|
||||
|
||||
|
@ -236,8 +234,7 @@ class InventoryData(object):
|
|||
if g and host not in g.get_hosts():
|
||||
g.add_host(h)
|
||||
self._groups_dict_cache = {}
|
||||
display.debug("Added host %s to group %s" % (host,group))
|
||||
|
||||
display.debug("Added host %s to group %s" % (host, group))
|
||||
|
||||
def set_variable(self, entity, varname, value):
|
||||
''' sets a varible for an inventory object '''
|
||||
|
@ -252,7 +249,6 @@ class InventoryData(object):
|
|||
inv_object.set_variable(varname, value)
|
||||
display.debug('set %s for %s' % (varname, entity))
|
||||
|
||||
|
||||
def add_child(self, group, child):
|
||||
''' Add host or group to group '''
|
||||
|
||||
|
@ -278,4 +274,3 @@ class InventoryData(object):
|
|||
self._groups_dict_cache[group_name] = [h.name for h in group.get_hosts()]
|
||||
|
||||
return self._groups_dict_cache
|
||||
|
||||
|
|
|
@ -20,10 +20,11 @@ __metaclass__ = type
|
|||
from ansible.errors import AnsibleError
|
||||
from ansible.utils.vars import combine_vars
|
||||
|
||||
|
||||
class Group:
|
||||
''' a group of ansible hosts '''
|
||||
|
||||
#__slots__ = [ 'name', 'hosts', 'vars', 'child_groups', 'parent_groups', 'depth', '_hosts_cache' ]
|
||||
# __slots__ = [ 'name', 'hosts', 'vars', 'child_groups', 'parent_groups', 'depth', '_hosts_cache' ]
|
||||
|
||||
def __init__(self, name=None):
|
||||
|
||||
|
@ -87,7 +88,7 @@ class Group:
|
|||
self.child_groups.append(group)
|
||||
|
||||
# update the depth of the child
|
||||
group.depth = max([self.depth+1, group.depth])
|
||||
group.depth = max([self.depth + 1, group.depth])
|
||||
|
||||
# update the depth of the grandchildren
|
||||
group._check_children_depth()
|
||||
|
@ -105,7 +106,7 @@ class Group:
|
|||
|
||||
try:
|
||||
for group in self.child_groups:
|
||||
group.depth = max([self.depth+1, group.depth])
|
||||
group.depth = max([self.depth + 1, group.depth])
|
||||
group._check_children_depth()
|
||||
except RuntimeError:
|
||||
raise AnsibleError("The group named '%s' has a recursive dependency loop." % self.name)
|
||||
|
@ -180,6 +181,5 @@ class Group:
|
|||
try:
|
||||
self.priority = int(priority)
|
||||
except TypeError:
|
||||
#FIXME: warn about invalid priority
|
||||
# FIXME: warn about invalid priority
|
||||
pass
|
||||
|
||||
|
|
|
@ -24,10 +24,11 @@ from ansible.utils.vars import combine_vars, get_unique_id
|
|||
|
||||
__all__ = ['Host']
|
||||
|
||||
|
||||
class Host:
|
||||
''' a single ansible host '''
|
||||
|
||||
#__slots__ = [ 'name', 'vars', 'groups' ]
|
||||
# __slots__ = [ 'name', 'vars', 'groups' ]
|
||||
|
||||
def __getstate__(self):
|
||||
return self.serialize()
|
||||
|
@ -73,7 +74,7 @@ class Host:
|
|||
self.vars = data.get('vars', dict())
|
||||
self.address = data.get('address', '')
|
||||
self._uuid = data.get('uuid', None)
|
||||
self.implicit= data.get('implicit', False)
|
||||
self.implicit = data.get('implicit', False)
|
||||
|
||||
groups = data.get('groups', [])
|
||||
for group_data in groups:
|
||||
|
@ -100,7 +101,6 @@ class Host:
|
|||
def get_name(self):
|
||||
return self.name
|
||||
|
||||
|
||||
def populate_ancestors(self):
|
||||
# populate ancestors
|
||||
for group in self.groups:
|
||||
|
@ -131,9 +131,8 @@ class Host:
|
|||
else:
|
||||
self.remove_group(oldg)
|
||||
|
||||
|
||||
def set_variable(self, key, value):
|
||||
self.vars[key]=value
|
||||
self.vars[key] = value
|
||||
|
||||
def get_groups(self):
|
||||
return self.groups
|
||||
|
@ -142,10 +141,9 @@ class Host:
|
|||
results = {}
|
||||
results['inventory_hostname'] = self.name
|
||||
results['inventory_hostname_short'] = self.name.split('.')[0]
|
||||
results['group_names'] = sorted([ g.name for g in self.get_groups() if g.name != 'all'])
|
||||
results['group_names'] = sorted([g.name for g in self.get_groups() if g.name != 'all'])
|
||||
|
||||
return combine_vars(self.vars, results)
|
||||
|
||||
def get_vars(self):
|
||||
return combine_vars(self.vars, self.get_magic_vars())
|
||||
|
||||
|
|
|
@ -42,11 +42,12 @@ except ImportError:
|
|||
HOSTS_PATTERNS_CACHE = {}
|
||||
|
||||
IGNORED_ALWAYS = [b"^\.", b"^host_vars$", b"^group_vars$", b"^vars_plugins$"]
|
||||
IGNORED_PATTERNS = [ to_bytes(x) for x in C.INVENTORY_IGNORE_PATTERNS ]
|
||||
IGNORED_PATTERNS = [to_bytes(x) for x in C.INVENTORY_IGNORE_PATTERNS]
|
||||
IGNORED_EXTS = [b'%s$' % to_bytes(re.escape(x)) for x in C.INVENTORY_IGNORE_EXTS]
|
||||
|
||||
IGNORED = re.compile(b'|'.join(IGNORED_ALWAYS + IGNORED_PATTERNS + IGNORED_EXTS))
|
||||
|
||||
|
||||
def order_patterns(patterns):
|
||||
''' takes a list of patterns and reorders them by modifier to apply them consistently '''
|
||||
|
||||
|
@ -114,6 +115,7 @@ def split_host_pattern(pattern):
|
|||
|
||||
return [p.strip() for p in patterns]
|
||||
|
||||
|
||||
class InventoryManager(object):
|
||||
''' Creates and manages inventory '''
|
||||
|
||||
|
@ -135,7 +137,7 @@ class InventoryManager(object):
|
|||
if sources is None:
|
||||
self._sources = []
|
||||
elif isinstance(sources, string_types):
|
||||
self._sources = [ sources ]
|
||||
self._sources = [sources]
|
||||
else:
|
||||
self._sources = sources
|
||||
|
||||
|
@ -175,7 +177,7 @@ class InventoryManager(object):
|
|||
def _setup_inventory_plugins(self):
|
||||
''' sets up loaded inventory plugins for usage '''
|
||||
|
||||
inventory_loader = PluginLoader( 'InventoryModule', 'ansible.plugins.inventory', 'inventory_plugins', 'inventory_plugins')
|
||||
inventory_loader = PluginLoader('InventoryModule', 'ansible.plugins.inventory', 'inventory_plugins', 'inventory_plugins')
|
||||
display.vvvv('setting up inventory plugins')
|
||||
|
||||
for name in C.INVENTORY_ENABLED:
|
||||
|
@ -257,7 +259,7 @@ class InventoryManager(object):
|
|||
display.vvv(u'Parsed %s inventory source with %s plugin' % (to_text(source), plugin_name))
|
||||
break
|
||||
except AnsibleParserError as e:
|
||||
failures.append(u'\n* Failed to parse %s with %s inventory plugin: %s\n' %(to_text(source), plugin_name, to_text(e)))
|
||||
failures.append(u'\n* Failed to parse %s with %s inventory plugin: %s\n' % (to_text(source), plugin_name, to_text(e)))
|
||||
else:
|
||||
display.debug(u'%s did not meet %s requirements' % (to_text(source), plugin_name))
|
||||
else:
|
||||
|
@ -279,7 +281,7 @@ class InventoryManager(object):
|
|||
global HOSTS_PATTERNS_CACHE
|
||||
HOSTS_PATTERNS_CACHE = {}
|
||||
self._pattern_cache = {}
|
||||
#FIXME: flush inventory cache
|
||||
# FIXME: flush inventory cache
|
||||
|
||||
def refresh_inventory(self):
|
||||
''' recalculate inventory '''
|
||||
|
@ -340,11 +342,11 @@ class InventoryManager(object):
|
|||
if not ignore_limits and self._subset:
|
||||
# exclude hosts not in a subset, if defined
|
||||
subset = self._evaluate_patterns(self._subset)
|
||||
hosts = [ h for h in hosts if h in subset ]
|
||||
hosts = [h for h in hosts if h in subset]
|
||||
|
||||
if not ignore_restrictions and self._restriction:
|
||||
# exclude hosts mentioned in any restriction (ex: failed hosts)
|
||||
hosts = [ h for h in hosts if h.name in self._restriction ]
|
||||
hosts = [h for h in hosts if h.name in self._restriction]
|
||||
|
||||
seen = set()
|
||||
HOSTS_PATTERNS_CACHE[pattern_hash] = [x for x in hosts if x not in seen and not seen.add(x)]
|
||||
|
@ -365,7 +367,6 @@ class InventoryManager(object):
|
|||
|
||||
return hosts
|
||||
|
||||
|
||||
def _evaluate_patterns(self, patterns):
|
||||
"""
|
||||
Takes a list of patterns and returns a list of matching host names,
|
||||
|
@ -382,11 +383,11 @@ class InventoryManager(object):
|
|||
else:
|
||||
that = self._match_one_pattern(p)
|
||||
if p.startswith("!"):
|
||||
hosts = [ h for h in hosts if h not in that ]
|
||||
hosts = [h for h in hosts if h not in that]
|
||||
elif p.startswith("&"):
|
||||
hosts = [ h for h in hosts if h in that ]
|
||||
hosts = [h for h in hosts if h in that]
|
||||
else:
|
||||
to_append = [ h for h in that if h.name not in [ y.name for y in hosts ] ]
|
||||
to_append = [h for h in that if h.name not in [y.name for y in hosts]]
|
||||
hosts.extend(to_append)
|
||||
return hosts
|
||||
|
||||
|
@ -500,10 +501,10 @@ class InventoryManager(object):
|
|||
|
||||
if end:
|
||||
if end == -1:
|
||||
end = len(hosts)-1
|
||||
return hosts[start:end+1]
|
||||
end = len(hosts) - 1
|
||||
return hosts[start:end + 1]
|
||||
else:
|
||||
return [ hosts[start] ]
|
||||
return [hosts[start]]
|
||||
|
||||
def _enumerate_matches(self, pattern):
|
||||
"""
|
||||
|
@ -544,8 +545,8 @@ class InventoryManager(object):
|
|||
|
||||
def list_hosts(self, pattern="all"):
|
||||
""" return a list of hostnames for a pattern """
|
||||
#FIXME: cache?
|
||||
result = [ h for h in self.get_hosts(pattern) ]
|
||||
# FIXME: cache?
|
||||
result = [h for h in self.get_hosts(pattern)]
|
||||
|
||||
# allow implicit localhost if pattern matches and no other results
|
||||
if len(result) == 0 and pattern in C.LOCALHOST:
|
||||
|
@ -554,7 +555,7 @@ class InventoryManager(object):
|
|||
return result
|
||||
|
||||
def list_groups(self):
|
||||
#FIXME: cache?
|
||||
# FIXME: cache?
|
||||
return sorted(self._inventory.groups.keys(), key=lambda x: x)
|
||||
|
||||
def restrict_to_hosts(self, restriction):
|
||||
|
@ -566,8 +567,8 @@ class InventoryManager(object):
|
|||
if restriction is None:
|
||||
return
|
||||
elif not isinstance(restriction, list):
|
||||
restriction = [ restriction ]
|
||||
self._restriction = [ h.name for h in restriction ]
|
||||
restriction = [restriction]
|
||||
self._restriction = [h.name for h in restriction]
|
||||
|
||||
def subset(self, subset_pattern):
|
||||
"""
|
||||
|
|
|
@ -44,6 +44,7 @@ AXAPI_VPORT_PROTOCOLS = {
|
|||
'https': 12,
|
||||
}
|
||||
|
||||
|
||||
def a10_argument_spec():
|
||||
return dict(
|
||||
host=dict(type='str', required=True),
|
||||
|
@ -52,11 +53,13 @@ def a10_argument_spec():
|
|||
write_config=dict(type='bool', default=False)
|
||||
)
|
||||
|
||||
|
||||
def axapi_failure(result):
|
||||
if 'response' in result and result['response'].get('status') == 'fail':
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def axapi_call(module, url, post=None):
|
||||
'''
|
||||
Returns a datastructure based on the result of the API call
|
||||
|
@ -81,6 +84,7 @@ def axapi_call(module, url, post=None):
|
|||
rsp.close()
|
||||
return data
|
||||
|
||||
|
||||
def axapi_authenticate(module, base_url, username, password):
|
||||
url = '%s&method=authenticate&username=%s&password=%s' % (base_url, username, password)
|
||||
result = axapi_call(module, url)
|
||||
|
@ -89,6 +93,7 @@ def axapi_authenticate(module, base_url, username, password):
|
|||
sessid = result['session_id']
|
||||
return base_url + '&session_id=' + sessid
|
||||
|
||||
|
||||
def axapi_authenticate_v3(module, base_url, username, password):
|
||||
url = base_url
|
||||
auth_payload = {"credentials": {"username": username, "password": password}}
|
||||
|
@ -98,6 +103,7 @@ def axapi_authenticate_v3(module, base_url, username, password):
|
|||
signature = result['authresponse']['signature']
|
||||
return signature
|
||||
|
||||
|
||||
def axapi_call_v3(module, url, method=None, body=None, signature=None):
|
||||
'''
|
||||
Returns a datastructure based on the result of the API call
|
||||
|
@ -126,6 +132,7 @@ def axapi_call_v3(module, url, method=None, body=None, signature=None):
|
|||
rsp.close()
|
||||
return data
|
||||
|
||||
|
||||
def axapi_enabled_disabled(flag):
|
||||
'''
|
||||
The axapi uses 0/1 integer values for flags, rather than strings
|
||||
|
@ -137,8 +144,10 @@ def axapi_enabled_disabled(flag):
|
|||
else:
|
||||
return 0
|
||||
|
||||
|
||||
def axapi_get_port_protocol(protocol):
|
||||
return AXAPI_PORT_PROTOCOLS.get(protocol.lower(), None)
|
||||
|
||||
|
||||
def axapi_get_vport_protocol(protocol):
|
||||
return AXAPI_VPORT_PROTOCOLS.get(protocol.lower(), None)
|
||||
|
|
|
@ -79,9 +79,9 @@ def tower_check_mode(module):
|
|||
|
||||
def tower_argument_spec():
|
||||
return dict(
|
||||
tower_host = dict(),
|
||||
tower_username = dict(),
|
||||
tower_password = dict(no_log=True),
|
||||
tower_verify_ssl = dict(type='bool', default=True),
|
||||
tower_config_file = dict(type='path'),
|
||||
tower_host=dict(),
|
||||
tower_username=dict(),
|
||||
tower_password=dict(no_log=True),
|
||||
tower_verify_ssl=dict(type='bool', default=True),
|
||||
tower_config_file=dict(type='path'),
|
||||
)
|
||||
|
|
|
@ -37,8 +37,8 @@ from ansible.module_utils.aos import *
|
|||
"""
|
||||
import json
|
||||
|
||||
from distutils.version import LooseVersion
|
||||
from ansible.module_utils.pycompat24 import get_exception
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
try:
|
||||
import yaml
|
||||
|
@ -53,6 +53,7 @@ try:
|
|||
except ImportError:
|
||||
HAS_AOS_PYEZ = False
|
||||
|
||||
|
||||
def check_aos_version(module, min=False):
|
||||
"""
|
||||
Check if the library aos-pyez is present.
|
||||
|
@ -71,6 +72,7 @@ def check_aos_version(module, min=False):
|
|||
|
||||
return True
|
||||
|
||||
|
||||
def get_aos_session(module, auth):
|
||||
"""
|
||||
Resume an existing session and return an AOS object.
|
||||
|
@ -94,6 +96,7 @@ def get_aos_session(module, auth):
|
|||
|
||||
return aos
|
||||
|
||||
|
||||
def find_collection_item(collection, item_name=False, item_id=False):
|
||||
"""
|
||||
Find collection_item based on name or id from a collection object
|
||||
|
@ -114,6 +117,7 @@ def find_collection_item(collection, item_name=False, item_id=False):
|
|||
else:
|
||||
return my_dict
|
||||
|
||||
|
||||
def content_to_dict(module, content):
|
||||
"""
|
||||
Convert 'content' into a Python Dict based on 'content_format'
|
||||
|
@ -144,12 +148,12 @@ def content_to_dict(module, content):
|
|||
except:
|
||||
module.fail_json(msg="Unable to convert 'content' to a dict, please check if valid")
|
||||
|
||||
|
||||
# replace the string with the dict
|
||||
module.params['content'] = content_dict
|
||||
|
||||
return content_dict
|
||||
|
||||
|
||||
def do_load_resource(module, collection, name):
|
||||
"""
|
||||
Create a new object (collection.item) by loading a datastructure directly
|
||||
|
@ -161,10 +165,7 @@ def do_load_resource(module, collection, name):
|
|||
module.fail_json(msg="An error occurred while running 'find_collection_item'")
|
||||
|
||||
if item.exists:
|
||||
module.exit_json( changed=False,
|
||||
name=item.name,
|
||||
id=item.id,
|
||||
value=item.value )
|
||||
module.exit_json(changed=False, name=item.name, id=item.id, value=item.value)
|
||||
|
||||
# If not in check mode, apply the changes
|
||||
if not module.check_mode:
|
||||
|
@ -175,7 +176,4 @@ def do_load_resource(module, collection, name):
|
|||
e = get_exception()
|
||||
module.fail_json(msg="Unable to write item content : %r" % e)
|
||||
|
||||
module.exit_json( changed=True,
|
||||
name=item.name,
|
||||
id=item.id,
|
||||
value=item.value )
|
||||
module.exit_json(changed=True, name=item.name, id=item.id, value=item.value)
|
||||
|
|
|
@ -40,6 +40,7 @@ The 'api' module provides the following common argument specs:
|
|||
"""
|
||||
import time
|
||||
|
||||
|
||||
def rate_limit_argument_spec(spec=None):
|
||||
"""Creates an argument spec for working with rate limiting"""
|
||||
arg_spec = (dict(
|
||||
|
@ -50,6 +51,7 @@ def rate_limit_argument_spec(spec=None):
|
|||
arg_spec.update(spec)
|
||||
return arg_spec
|
||||
|
||||
|
||||
def retry_argument_spec(spec=None):
|
||||
"""Creates an argument spec for working with retrying"""
|
||||
arg_spec = (dict(
|
||||
|
@ -60,41 +62,48 @@ def retry_argument_spec(spec=None):
|
|||
arg_spec.update(spec)
|
||||
return arg_spec
|
||||
|
||||
|
||||
def basic_auth_argument_spec(spec=None):
|
||||
arg_spec = (dict(
|
||||
api_username=dict(type='str', required=False),
|
||||
api_password=dict(type='str', required=False, no_log=True),
|
||||
api_url=dict(type='str', required=False),
|
||||
api_username=dict(type='str'),
|
||||
api_password=dict(type='str', no_log=True),
|
||||
api_url=dict(type='str'),
|
||||
validate_certs=dict(type='bool', default=True)
|
||||
))
|
||||
if spec:
|
||||
arg_spec.update(spec)
|
||||
return arg_spec
|
||||
|
||||
|
||||
def rate_limit(rate=None, rate_limit=None):
|
||||
"""rate limiting decorator"""
|
||||
minrate = None
|
||||
if rate is not None and rate_limit is not None:
|
||||
minrate = float(rate_limit) / float(rate)
|
||||
|
||||
def wrapper(f):
|
||||
last = [0.0]
|
||||
def ratelimited(*args,**kwargs):
|
||||
|
||||
def ratelimited(*args, **kwargs):
|
||||
if minrate is not None:
|
||||
elapsed = time.clock() - last[0]
|
||||
left = minrate - elapsed
|
||||
if left > 0:
|
||||
time.sleep(left)
|
||||
last[0] = time.clock()
|
||||
ret = f(*args,**kwargs)
|
||||
ret = f(*args, **kwargs)
|
||||
return ret
|
||||
|
||||
return ratelimited
|
||||
return wrapper
|
||||
|
||||
|
||||
def retry(retries=None, retry_pause=1):
|
||||
"""Retry decorator"""
|
||||
def wrapper(f):
|
||||
retry_count = 0
|
||||
def retried(*args,**kwargs):
|
||||
|
||||
def retried(*args, **kwargs):
|
||||
if retries is not None:
|
||||
ret = None
|
||||
while True:
|
||||
|
@ -102,13 +111,13 @@ def retry(retries=None, retry_pause=1):
|
|||
if retry_count >= retries:
|
||||
raise Exception("Retry limit exceeded: %d" % retries)
|
||||
try:
|
||||
ret = f(*args,**kwargs)
|
||||
ret = f(*args, **kwargs)
|
||||
except:
|
||||
pass
|
||||
if ret:
|
||||
break
|
||||
time.sleep(retry_pause)
|
||||
return ret
|
||||
|
||||
return retried
|
||||
return wrapper
|
||||
|
||||
|
|
|
@ -25,7 +25,6 @@
|
|||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
|
||||
# This module initially matched the namespace of network module avi. However,
|
||||
# that causes namespace import error when other modules from avi namespaces
|
||||
|
@ -40,8 +39,7 @@ HAS_AVI = True
|
|||
try:
|
||||
import avi.sdk
|
||||
sdk_version = getattr(avi.sdk, '__version__', None)
|
||||
if ((sdk_version is None) or (sdk_version and
|
||||
(parse_version(sdk_version) < parse_version('17.1')))):
|
||||
if ((sdk_version is None) or (sdk_version and (parse_version(sdk_version) < parse_version('17.1')))):
|
||||
# It allows the __version__ to be '' as that value is used in development builds
|
||||
raise ImportError
|
||||
from avi.sdk.utils.ansible_utils import avi_ansible_api
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
#
|
||||
# Copyright (c) 2016 Matt Davis, <mdavis@ansible.com>
|
||||
# Chris Houseknecht, <house@redhat.com>
|
||||
#
|
||||
|
@ -16,7 +15,6 @@
|
|||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
import json
|
||||
import os
|
||||
|
@ -102,6 +100,7 @@ except ImportError as exc:
|
|||
HAS_AZURE_EXC = exc
|
||||
HAS_AZURE = False
|
||||
|
||||
|
||||
def azure_id_to_dict(id):
|
||||
pieces = re.sub(r'^\/', '', id).split('/')
|
||||
result = {}
|
||||
|
@ -121,6 +120,7 @@ AZURE_EXPECTED_VERSIONS = dict(
|
|||
|
||||
AZURE_MIN_RELEASE = '2.0.0rc5'
|
||||
|
||||
|
||||
class AzureRMModuleBase(object):
|
||||
|
||||
def __init__(self, derived_arg_spec, bypass_checks=False, no_log=False,
|
||||
|
@ -220,7 +220,7 @@ class AzureRMModuleBase(object):
|
|||
def log(self, msg, pretty_print=False):
|
||||
pass
|
||||
# Use only during module development
|
||||
#if self.debug:
|
||||
# if self.debug:
|
||||
# log_file = open('azure_rm.log', 'a')
|
||||
# if pretty_print:
|
||||
# log_file.write(json.dumps(msg, indent=4, sort_keys=True))
|
||||
|
|
|
@ -32,7 +32,17 @@ BOOLEANS_TRUE = ['y', 'yes', 'on', '1', 'true', 1, True]
|
|||
BOOLEANS_FALSE = ['n', 'no', 'off', '0', 'false', 0, False]
|
||||
BOOLEANS = BOOLEANS_TRUE + BOOLEANS_FALSE
|
||||
|
||||
SIZE_RANGES = { 'Y': 1<<80, 'Z': 1<<70, 'E': 1<<60, 'P': 1<<50, 'T': 1<<40, 'G': 1<<30, 'M': 1<<20, 'K': 1<<10, 'B': 1 }
|
||||
SIZE_RANGES = {
|
||||
'Y': 1 << 80,
|
||||
'Z': 1 << 70,
|
||||
'E': 1 << 60,
|
||||
'P': 1 << 50,
|
||||
'T': 1 << 40,
|
||||
'G': 1 << 30,
|
||||
'M': 1 << 20,
|
||||
'K': 1 << 10,
|
||||
'B': 1,
|
||||
}
|
||||
|
||||
FILE_ATTRIBUTES = {
|
||||
'A': 'noatime',
|
||||
|
@ -83,9 +93,9 @@ from itertools import repeat, chain
|
|||
|
||||
try:
|
||||
import syslog
|
||||
HAS_SYSLOG=True
|
||||
HAS_SYSLOG = True
|
||||
except ImportError:
|
||||
HAS_SYSLOG=False
|
||||
HAS_SYSLOG = False
|
||||
|
||||
try:
|
||||
from systemd import journal
|
||||
|
@ -93,10 +103,10 @@ try:
|
|||
except ImportError:
|
||||
has_journal = False
|
||||
|
||||
HAVE_SELINUX=False
|
||||
HAVE_SELINUX = False
|
||||
try:
|
||||
import selinux
|
||||
HAVE_SELINUX=True
|
||||
HAVE_SELINUX = True
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
@ -161,8 +171,16 @@ except ImportError:
|
|||
pass
|
||||
|
||||
from ansible.module_utils.pycompat24 import get_exception, literal_eval
|
||||
from ansible.module_utils.six import (PY2, PY3, b, binary_type, integer_types,
|
||||
iteritems, text_type, string_types)
|
||||
from ansible.module_utils.six import (
|
||||
PY2,
|
||||
PY3,
|
||||
b,
|
||||
binary_type,
|
||||
integer_types,
|
||||
iteritems,
|
||||
string_types,
|
||||
text_type,
|
||||
)
|
||||
from ansible.module_utils.six.moves import map, reduce, shlex_quote
|
||||
from ansible.module_utils._text import to_native, to_bytes, to_text
|
||||
|
||||
|
@ -208,26 +226,26 @@ _literal_eval = literal_eval
|
|||
# is an internal implementation detail
|
||||
_ANSIBLE_ARGS = None
|
||||
|
||||
FILE_COMMON_ARGUMENTS=dict(
|
||||
src = dict(),
|
||||
mode = dict(type='raw'),
|
||||
owner = dict(),
|
||||
group = dict(),
|
||||
seuser = dict(),
|
||||
serole = dict(),
|
||||
selevel = dict(),
|
||||
setype = dict(),
|
||||
follow = dict(type='bool', default=False),
|
||||
FILE_COMMON_ARGUMENTS = dict(
|
||||
src=dict(),
|
||||
mode=dict(type='raw'),
|
||||
owner=dict(),
|
||||
group=dict(),
|
||||
seuser=dict(),
|
||||
serole=dict(),
|
||||
selevel=dict(),
|
||||
setype=dict(),
|
||||
follow=dict(type='bool', default=False),
|
||||
# not taken by the file module, but other modules call file so it must ignore them.
|
||||
content = dict(no_log=True),
|
||||
backup = dict(),
|
||||
force = dict(),
|
||||
remote_src = dict(), # used by assemble
|
||||
regexp = dict(), # used by assemble
|
||||
delimiter = dict(), # used by assemble
|
||||
directory_mode = dict(), # used by copy
|
||||
unsafe_writes = dict(type='bool'), # should be available to any module using atomic_move
|
||||
attributes = dict(aliases=['attr']),
|
||||
content=dict(no_log=True),
|
||||
backup=dict(),
|
||||
force=dict(),
|
||||
remote_src=dict(), # used by assemble
|
||||
regexp=dict(), # used by assemble
|
||||
delimiter=dict(), # used by assemble
|
||||
directory_mode=dict(), # used by copy
|
||||
unsafe_writes=dict(type='bool'), # should be available to any module using atomic_move
|
||||
attributes=dict(aliases=['attr']),
|
||||
)
|
||||
|
||||
PASSWD_ARG_RE = re.compile(r'^[-]{0,2}pass[-]?(word|wd)?')
|
||||
|
@ -242,11 +260,12 @@ def get_platform():
|
|||
''' what's the platform? example: Linux is a platform. '''
|
||||
return platform.system()
|
||||
|
||||
|
||||
def get_distribution():
|
||||
''' return the distribution name '''
|
||||
if platform.system() == 'Linux':
|
||||
try:
|
||||
supported_dists = platform._supported_dists + ('arch','alpine')
|
||||
supported_dists = platform._supported_dists + ('arch', 'alpine')
|
||||
distribution = platform.linux_distribution(supported_dists=supported_dists)[0].capitalize()
|
||||
if not distribution and os.path.isfile('/etc/system-release'):
|
||||
distribution = platform.linux_distribution(supported_dists=['system'])[0].capitalize()
|
||||
|
@ -261,6 +280,7 @@ def get_distribution():
|
|||
distribution = None
|
||||
return distribution
|
||||
|
||||
|
||||
def get_distribution_version():
|
||||
''' return the distribution version '''
|
||||
if platform.system() == 'Linux':
|
||||
|
@ -275,6 +295,7 @@ def get_distribution_version():
|
|||
distribution_version = None
|
||||
return distribution_version
|
||||
|
||||
|
||||
def get_all_subclasses(cls):
|
||||
'''
|
||||
used by modules like Hardware or Network fact classes to retrieve all subclasses of a given class.
|
||||
|
@ -338,6 +359,7 @@ def json_dict_unicode_to_bytes(d, encoding='utf-8', errors='surrogate_or_strict'
|
|||
else:
|
||||
return d
|
||||
|
||||
|
||||
def json_dict_bytes_to_unicode(d, encoding='utf-8', errors='surrogate_or_strict'):
|
||||
''' Recursively convert dict keys and values to byte str
|
||||
|
||||
|
@ -357,6 +379,7 @@ def json_dict_bytes_to_unicode(d, encoding='utf-8', errors='surrogate_or_strict'
|
|||
else:
|
||||
return d
|
||||
|
||||
|
||||
def return_values(obj):
|
||||
""" Return native stringified values from datastructures.
|
||||
|
||||
|
@ -381,6 +404,7 @@ def return_values(obj):
|
|||
else:
|
||||
raise TypeError('Unknown parameter type: %s, %s' % (type(obj), obj))
|
||||
|
||||
|
||||
def remove_values(value, no_log_strings):
|
||||
""" Remove strings in no_log_strings from value. If value is a container
|
||||
type, then remove a lot more"""
|
||||
|
@ -489,6 +513,7 @@ def heuristic_log_sanitize(data, no_log_values=None):
|
|||
output = remove_values(output, no_log_values)
|
||||
return output
|
||||
|
||||
|
||||
def bytes_to_human(size, isbits=False, unit=None):
|
||||
|
||||
base = 'Bytes'
|
||||
|
@ -505,7 +530,8 @@ def bytes_to_human(size, isbits=False, unit=None):
|
|||
else:
|
||||
suffix = base
|
||||
|
||||
return '%.2f %s' % (float(size)/ limit, suffix)
|
||||
return '%.2f %s' % (float(size) / limit, suffix)
|
||||
|
||||
|
||||
def human_to_bytes(number, default_unit=None, isbits=False):
|
||||
|
||||
|
@ -555,6 +581,7 @@ def human_to_bytes(number, default_unit=None, isbits=False):
|
|||
|
||||
return int(round(num * limit))
|
||||
|
||||
|
||||
def is_executable(path):
|
||||
'''is the given path executable?
|
||||
|
||||
|
@ -568,6 +595,7 @@ def is_executable(path):
|
|||
# execute bits are set.
|
||||
return ((stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) & os.stat(path)[stat.ST_MODE])
|
||||
|
||||
|
||||
def _load_params():
|
||||
''' read the modules parameters and store them globally.
|
||||
|
||||
|
@ -623,6 +651,7 @@ def _load_params():
|
|||
'"failed": true}')
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def env_fallback(*args, **kwargs):
|
||||
''' Load value from environment '''
|
||||
for arg in args:
|
||||
|
@ -631,6 +660,7 @@ def env_fallback(*args, **kwargs):
|
|||
else:
|
||||
raise AnsibleFallbackNotFound
|
||||
|
||||
|
||||
def _lenient_lowercase(lst):
|
||||
"""Lowercase elements of a list.
|
||||
|
||||
|
@ -644,6 +674,7 @@ def _lenient_lowercase(lst):
|
|||
lowered.append(value)
|
||||
return lowered
|
||||
|
||||
|
||||
def format_attributes(attributes):
|
||||
attribute_list = []
|
||||
for attr in attributes:
|
||||
|
@ -651,13 +682,15 @@ def format_attributes(attributes):
|
|||
attribute_list.append(FILE_ATTRIBUTES[attr])
|
||||
return attribute_list
|
||||
|
||||
|
||||
def get_flags_from_attributes(attributes):
|
||||
flags = []
|
||||
for key,attr in FILE_ATTRIBUTES.items():
|
||||
for key, attr in FILE_ATTRIBUTES.items():
|
||||
if attr in attributes:
|
||||
flags.append(key)
|
||||
return ''.join(flags)
|
||||
|
||||
|
||||
class AnsibleFallbackNotFound(Exception):
|
||||
pass
|
||||
|
||||
|
@ -674,7 +707,7 @@ class AnsibleModule(object):
|
|||
see library/* for examples
|
||||
'''
|
||||
|
||||
self._name = os.path.basename(__file__) #initialize name until we can parse from options
|
||||
self._name = os.path.basename(__file__) # initialize name until we can parse from options
|
||||
self.argument_spec = argument_spec
|
||||
self.supports_check_mode = supports_check_mode
|
||||
self.check_mode = False
|
||||
|
@ -834,7 +867,6 @@ class AnsibleModule(object):
|
|||
selevel=selevel, secontext=secontext, attributes=attributes,
|
||||
)
|
||||
|
||||
|
||||
# Detect whether using selinux that is MLS-aware.
|
||||
# While this means you can set the level/range with
|
||||
# selinux.lsetfilecon(), it may or may not mean that you
|
||||
|
@ -853,7 +885,7 @@ class AnsibleModule(object):
|
|||
if not HAVE_SELINUX:
|
||||
seenabled = self.get_bin_path('selinuxenabled')
|
||||
if seenabled is not None:
|
||||
(rc,out,err) = self.run_command(seenabled)
|
||||
(rc, out, err) = self.run_command(seenabled)
|
||||
if rc == 0:
|
||||
self.fail_json(msg="Aborting, target uses selinux but python bindings (libselinux-python) aren't installed!")
|
||||
return False
|
||||
|
@ -1154,7 +1186,7 @@ class AnsibleModule(object):
|
|||
|
||||
existing = self.get_file_attributes(b_path)
|
||||
|
||||
if existing.get('attr_flags','') != attributes:
|
||||
if existing.get('attr_flags', '') != attributes:
|
||||
attrcmd = self.get_bin_path('chattr')
|
||||
if attrcmd:
|
||||
attrcmd = [attrcmd, '=%s' % attributes, b_path]
|
||||
|
@ -1187,14 +1219,13 @@ class AnsibleModule(object):
|
|||
rc, out, err = self.run_command(attrcmd)
|
||||
if rc == 0:
|
||||
res = out.split(' ')[0:2]
|
||||
output['attr_flags'] = res[1].replace('-','').strip()
|
||||
output['attr_flags'] = res[1].replace('-', '').strip()
|
||||
output['version'] = res[0].strip()
|
||||
output['attributes'] = format_attributes(output['attr_flags'])
|
||||
except:
|
||||
pass
|
||||
return output
|
||||
|
||||
|
||||
def _symbolic_mode_to_octal(self, path_stat, symbolic_mode):
|
||||
new_mode = stat.S_IMODE(path_stat.st_mode)
|
||||
|
||||
|
@ -1247,13 +1278,13 @@ class AnsibleModule(object):
|
|||
X_perms = {
|
||||
'u': {'X': stat.S_IXUSR},
|
||||
'g': {'X': stat.S_IXGRP},
|
||||
'o': {'X': stat.S_IXOTH}
|
||||
'o': {'X': stat.S_IXOTH},
|
||||
}
|
||||
else:
|
||||
X_perms = {
|
||||
'u': {'X': 0},
|
||||
'g': {'X': 0},
|
||||
'o': {'X': 0}
|
||||
'o': {'X': 0},
|
||||
}
|
||||
|
||||
user_perms_to_modes = {
|
||||
|
@ -1265,7 +1296,8 @@ class AnsibleModule(object):
|
|||
't': 0,
|
||||
'u': prev_mode & stat.S_IRWXU,
|
||||
'g': (prev_mode & stat.S_IRWXG) << 3,
|
||||
'o': (prev_mode & stat.S_IRWXO) << 6 },
|
||||
'o': (prev_mode & stat.S_IRWXO) << 6,
|
||||
},
|
||||
'g': {
|
||||
'r': stat.S_IRGRP,
|
||||
'w': stat.S_IWGRP,
|
||||
|
@ -1274,7 +1306,8 @@ class AnsibleModule(object):
|
|||
't': 0,
|
||||
'u': (prev_mode & stat.S_IRWXU) >> 3,
|
||||
'g': prev_mode & stat.S_IRWXG,
|
||||
'o': (prev_mode & stat.S_IRWXO) << 3 },
|
||||
'o': (prev_mode & stat.S_IRWXO) << 3,
|
||||
},
|
||||
'o': {
|
||||
'r': stat.S_IROTH,
|
||||
'w': stat.S_IWOTH,
|
||||
|
@ -1283,14 +1316,17 @@ class AnsibleModule(object):
|
|||
't': stat.S_ISVTX,
|
||||
'u': (prev_mode & stat.S_IRWXU) >> 6,
|
||||
'g': (prev_mode & stat.S_IRWXG) >> 3,
|
||||
'o': prev_mode & stat.S_IRWXO }
|
||||
'o': prev_mode & stat.S_IRWXO,
|
||||
}
|
||||
}
|
||||
|
||||
# Insert X_perms into user_perms_to_modes
|
||||
for key, value in X_perms.items():
|
||||
user_perms_to_modes[key].update(value)
|
||||
|
||||
or_reduce = lambda mode, perm: mode | user_perms_to_modes[user][perm]
|
||||
def or_reduce(mode, perm):
|
||||
return mode | user_perms_to_modes[user][perm]
|
||||
|
||||
return reduce(or_reduce, perms, 0)
|
||||
|
||||
def set_fs_attributes_if_different(self, file_args, changed, diff=None, expand=True):
|
||||
|
@ -1383,10 +1419,10 @@ class AnsibleModule(object):
|
|||
|
||||
def _handle_aliases(self, spec=None):
|
||||
# this uses exceptions as it happens before we can safely call fail_json
|
||||
aliases_results = {} #alias:canon
|
||||
aliases_results = {} # alias:canon
|
||||
if spec is None:
|
||||
spec = self.argument_spec
|
||||
for (k,v) in spec.items():
|
||||
for (k, v) in spec.items():
|
||||
self._legal_inputs.append(k)
|
||||
aliases = v.get('aliases', None)
|
||||
default = v.get('default', None)
|
||||
|
@ -1409,7 +1445,7 @@ class AnsibleModule(object):
|
|||
def _check_arguments(self, check_invalid_arguments):
|
||||
self._syslog_facility = 'LOG_USER'
|
||||
unsupported_parameters = set()
|
||||
for (k,v) in list(self.params.items()):
|
||||
for (k, v) in list(self.params.items()):
|
||||
|
||||
if k == '_ansible_check_mode' and v:
|
||||
self.check_mode = True
|
||||
|
@ -1444,7 +1480,7 @@ class AnsibleModule(object):
|
|||
elif check_invalid_arguments and k not in self._legal_inputs:
|
||||
unsupported_parameters.add(k)
|
||||
|
||||
#clean up internal params:
|
||||
# clean up internal params:
|
||||
if k.startswith('_ansible_'):
|
||||
del self.params[k]
|
||||
|
||||
|
@ -1482,20 +1518,20 @@ class AnsibleModule(object):
|
|||
if spec is None:
|
||||
return
|
||||
for check in spec:
|
||||
counts = [ self._count_terms([field]) for field in check ]
|
||||
non_zero = [ c for c in counts if c > 0 ]
|
||||
counts = [self._count_terms([field]) for field in check]
|
||||
non_zero = [c for c in counts if c > 0]
|
||||
if len(non_zero) > 0:
|
||||
if 0 in counts:
|
||||
self.fail_json(msg="parameters are required together: %s" % (check,))
|
||||
|
||||
def _check_required_arguments(self, spec=None, param=None ):
|
||||
def _check_required_arguments(self, spec=None, param=None):
|
||||
''' ensure all required arguments are present '''
|
||||
missing = []
|
||||
if spec is None:
|
||||
spec = self.argument_spec
|
||||
if param is None:
|
||||
param = self.params
|
||||
for (k,v) in spec.items():
|
||||
for (k, v) in spec.items():
|
||||
required = v.get('required', False)
|
||||
if required and k not in param:
|
||||
missing.append(k)
|
||||
|
@ -1534,8 +1570,8 @@ class AnsibleModule(object):
|
|||
spec = self.argument_spec
|
||||
if param is None:
|
||||
param = self.params
|
||||
for (k,v) in spec.items():
|
||||
choices = v.get('choices',None)
|
||||
for (k, v) in spec.items():
|
||||
choices = v.get('choices', None)
|
||||
if choices is None:
|
||||
continue
|
||||
if isinstance(choices, SEQUENCETYPE) and not isinstance(choices, (binary_type, text_type)):
|
||||
|
@ -1561,8 +1597,8 @@ class AnsibleModule(object):
|
|||
(param[k],) = overlap
|
||||
|
||||
if param[k] not in choices:
|
||||
choices_str=",".join([to_native(c) for c in choices])
|
||||
msg="value of %s must be one of: %s, got: %s" % (k, choices_str, param[k])
|
||||
choices_str = ",".join([to_native(c) for c in choices])
|
||||
msg = "value of %s must be one of: %s, got: %s" % (k, choices_str, param[k])
|
||||
self.fail_json(msg=msg)
|
||||
else:
|
||||
self.fail_json(msg="internal error: choices for argument %s are not iterable: %s" % (k, choices))
|
||||
|
@ -1610,7 +1646,7 @@ class AnsibleModule(object):
|
|||
if isinstance(value, string_types):
|
||||
return value.split(",")
|
||||
elif isinstance(value, int) or isinstance(value, float):
|
||||
return [ str(value) ]
|
||||
return [str(value)]
|
||||
|
||||
raise TypeError('%s cannot be converted to a list' % type(value))
|
||||
|
||||
|
@ -1703,14 +1739,12 @@ class AnsibleModule(object):
|
|||
def _check_type_raw(self, value):
|
||||
return value
|
||||
|
||||
|
||||
def _check_type_bytes(self, value):
|
||||
try:
|
||||
self.human_to_bytes(value)
|
||||
except ValueError:
|
||||
raise TypeError('%s cannot be converted to a Byte value' % type(value))
|
||||
|
||||
|
||||
def _check_type_bits(self, value):
|
||||
try:
|
||||
self.human_to_bytes(value, isbits=True)
|
||||
|
@ -1761,7 +1795,7 @@ class AnsibleModule(object):
|
|||
self._check_argument_values(spec, param[k])
|
||||
|
||||
def _set_defaults(self, pre=True):
|
||||
for (k,v) in self.argument_spec.items():
|
||||
for (k, v) in self.argument_spec.items():
|
||||
default = v.get('default', None)
|
||||
if pre is True:
|
||||
# this prevents setting defaults on required items
|
||||
|
@ -1773,7 +1807,7 @@ class AnsibleModule(object):
|
|||
self.params[k] = default
|
||||
|
||||
def _set_fallbacks(self):
|
||||
for k,v in self.argument_spec.items():
|
||||
for (k, v) in self.argument_spec.items():
|
||||
fallback = v.get('fallback', (None,))
|
||||
fallback_strategy = fallback[0]
|
||||
fallback_args = []
|
||||
|
@ -1863,9 +1897,7 @@ class AnsibleModule(object):
|
|||
if self.boolean(no_log):
|
||||
log_args[param] = 'NOT_LOGGING_PARAMETER'
|
||||
# try to capture all passwords/passphrase named fields missed by no_log
|
||||
elif PASSWORD_MATCH.search(param) and \
|
||||
arg_opts.get('type', 'str') != 'bool' and \
|
||||
not arg_opts.get('choices', False):
|
||||
elif PASSWORD_MATCH.search(param) and arg_opts.get('type', 'str') != 'bool' and not arg_opts.get('choices', False):
|
||||
# skip boolean and enums as they are about 'password' state
|
||||
log_args[param] = 'NOT_LOGGING_PASSWORD'
|
||||
self.warn('Module did not set no_log for %s' % param)
|
||||
|
@ -1885,11 +1917,10 @@ class AnsibleModule(object):
|
|||
|
||||
self.log(msg, log_args=log_args)
|
||||
|
||||
|
||||
def _set_cwd(self):
|
||||
try:
|
||||
cwd = os.getcwd()
|
||||
if not os.access(cwd, os.F_OK|os.R_OK):
|
||||
if not os.access(cwd, os.F_OK | os.R_OK):
|
||||
raise Exception()
|
||||
return cwd
|
||||
except:
|
||||
|
@ -1897,7 +1928,7 @@ class AnsibleModule(object):
|
|||
# Try and move to a neutral location to prevent errors
|
||||
for cwd in [os.path.expandvars('$HOME'), tempfile.gettempdir()]:
|
||||
try:
|
||||
if os.access(cwd, os.F_OK|os.R_OK):
|
||||
if os.access(cwd, os.F_OK | os.R_OK):
|
||||
os.chdir(cwd)
|
||||
return cwd
|
||||
except:
|
||||
|
@ -2011,7 +2042,7 @@ class AnsibleModule(object):
|
|||
def exit_json(self, **kwargs):
|
||||
''' return from the module, without error '''
|
||||
|
||||
if not 'changed' in kwargs:
|
||||
if 'changed' not in kwargs:
|
||||
kwargs['changed'] = False
|
||||
|
||||
self.do_cleanup_files()
|
||||
|
@ -2024,7 +2055,7 @@ class AnsibleModule(object):
|
|||
assert 'msg' in kwargs, "implementation error -- msg to explain the error is required"
|
||||
kwargs['failed'] = True
|
||||
|
||||
if not 'changed' in kwargs:
|
||||
if 'changed' not in kwargs:
|
||||
kwargs['changed'] = False
|
||||
|
||||
self.do_cleanup_files()
|
||||
|
@ -2175,7 +2206,7 @@ class AnsibleModule(object):
|
|||
native_suffix = os.path.basename(b_dest)
|
||||
native_prefix = b('.ansible_tmp')
|
||||
try:
|
||||
tmp_dest_fd, tmp_dest_name = tempfile.mkstemp( prefix=native_prefix, dir=native_dest_dir, suffix=native_suffix)
|
||||
tmp_dest_fd, tmp_dest_name = tempfile.mkstemp(prefix=native_prefix, dir=native_dest_dir, suffix=native_suffix)
|
||||
except (OSError, IOError):
|
||||
e = get_exception()
|
||||
self.fail_json(msg='The destination directory (%s) is not writable by the current user. Error was: %s' % (os.path.dirname(dest), e))
|
||||
|
@ -2261,7 +2292,6 @@ class AnsibleModule(object):
|
|||
e = get_exception()
|
||||
self.fail_json(msg='Could not write data to file (%s) from (%s): %s' % (dest, src, e), exception=traceback.format_exc())
|
||||
|
||||
|
||||
def _read_from_pipes(self, rpipes, rfds, file_descriptor):
|
||||
data = b('')
|
||||
if file_descriptor in rfds:
|
||||
|
@ -2359,7 +2389,7 @@ class AnsibleModule(object):
|
|||
|
||||
# expand things like $HOME and ~
|
||||
if not shell:
|
||||
args = [ os.path.expanduser(os.path.expandvars(x)) for x in args if x is not None ]
|
||||
args = [os.path.expanduser(os.path.expandvars(x)) for x in args if x is not None]
|
||||
|
||||
rc = 0
|
||||
msg = None
|
||||
|
@ -2387,9 +2417,9 @@ class AnsibleModule(object):
|
|||
# Clean out python paths set by ansiballz
|
||||
if 'PYTHONPATH' in os.environ:
|
||||
pypaths = os.environ['PYTHONPATH'].split(':')
|
||||
pypaths = [x for x in pypaths \
|
||||
if not x.endswith('/ansible_modlib.zip') \
|
||||
and not x.endswith('/debug_dir')]
|
||||
pypaths = [x for x in pypaths
|
||||
if not x.endswith('/ansible_modlib.zip') and
|
||||
not x.endswith('/debug_dir')]
|
||||
os.environ['PYTHONPATH'] = ':'.join(pypaths)
|
||||
if not os.environ['PYTHONPATH']:
|
||||
del os.environ['PYTHONPATH']
|
||||
|
@ -2510,7 +2540,7 @@ class AnsibleModule(object):
|
|||
self.fail_json(rc=e.errno, msg=to_native(e), cmd=clean_args)
|
||||
except Exception:
|
||||
e = get_exception()
|
||||
self.log("Error Executing CMD:%s Exception:%s" % (clean_args,to_native(traceback.format_exc())))
|
||||
self.log("Error Executing CMD:%s Exception:%s" % (clean_args, to_native(traceback.format_exc())))
|
||||
self.fail_json(rc=257, msg=to_native(e), exception=traceback.format_exc(), cmd=clean_args)
|
||||
|
||||
# Restore env settings
|
||||
|
|
|
@ -25,9 +25,12 @@
|
|||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import json
|
||||
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
|
||||
|
||||
class Response(object):
|
||||
|
||||
def __init__(self, resp, info):
|
||||
|
|
|
@ -25,18 +25,20 @@
|
|||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
|
||||
import signal
|
||||
import socket
|
||||
import struct
|
||||
import signal
|
||||
|
||||
from ansible.module_utils.basic import get_exception
|
||||
from ansible.module_utils._text import to_bytes, to_native
|
||||
|
||||
|
||||
def send_data(s, data):
|
||||
packed_len = struct.pack('!Q',len(data))
|
||||
packed_len = struct.pack('!Q', len(data))
|
||||
return s.sendall(packed_len + data)
|
||||
|
||||
|
||||
def recv_data(s):
|
||||
header_len = 8 # size of a packed unsigned long long
|
||||
data = to_bytes("")
|
||||
|
@ -45,7 +47,7 @@ def recv_data(s):
|
|||
if not d:
|
||||
return None
|
||||
data += d
|
||||
data_len = struct.unpack('!Q',data[:header_len])[0]
|
||||
data_len = struct.unpack('!Q', data[:header_len])[0]
|
||||
data = data[header_len:]
|
||||
while len(data) < data_len:
|
||||
d = s.recv(data_len - len(data))
|
||||
|
@ -54,6 +56,7 @@ def recv_data(s):
|
|||
data += d
|
||||
return data
|
||||
|
||||
|
||||
def exec_command(module, command):
|
||||
try:
|
||||
sf = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
|
||||
|
|
|
@ -26,9 +26,11 @@
|
|||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
|
||||
class SQLParseError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class UnclosedQuoteError(SQLParseError):
|
||||
pass
|
||||
|
||||
|
@ -38,6 +40,7 @@ class UnclosedQuoteError(SQLParseError):
|
|||
_PG_IDENTIFIER_TO_DOT_LEVEL = dict(database=1, schema=2, table=3, column=4, role=1)
|
||||
_MYSQL_IDENTIFIER_TO_DOT_LEVEL = dict(database=1, table=2, column=3, role=1, vars=1)
|
||||
|
||||
|
||||
def _find_end_quote(identifier, quote_char):
|
||||
accumulate = 0
|
||||
while True:
|
||||
|
@ -47,12 +50,12 @@ def _find_end_quote(identifier, quote_char):
|
|||
raise UnclosedQuoteError
|
||||
accumulate = accumulate + quote
|
||||
try:
|
||||
next_char = identifier[quote+1]
|
||||
next_char = identifier[quote + 1]
|
||||
except IndexError:
|
||||
return accumulate
|
||||
if next_char == quote_char:
|
||||
try:
|
||||
identifier = identifier[quote+2:]
|
||||
identifier = identifier[quote + 2:]
|
||||
accumulate = accumulate + 2
|
||||
except IndexError:
|
||||
raise UnclosedQuoteError
|
||||
|
@ -73,10 +76,10 @@ def _identifier_parse(identifier, quote_char):
|
|||
already_quoted = False
|
||||
else:
|
||||
if end_quote < len(identifier) - 1:
|
||||
if identifier[end_quote+1] == '.':
|
||||
if identifier[end_quote + 1] == '.':
|
||||
dot = end_quote + 1
|
||||
first_identifier = identifier[:dot]
|
||||
next_identifier = identifier[dot+1:]
|
||||
next_identifier = identifier[dot + 1:]
|
||||
further_identifiers = _identifier_parse(next_identifier, quote_char)
|
||||
further_identifiers.insert(0, first_identifier)
|
||||
else:
|
||||
|
@ -88,19 +91,19 @@ def _identifier_parse(identifier, quote_char):
|
|||
try:
|
||||
dot = identifier.index('.')
|
||||
except ValueError:
|
||||
identifier = identifier.replace(quote_char, quote_char*2)
|
||||
identifier = identifier.replace(quote_char, quote_char * 2)
|
||||
identifier = ''.join((quote_char, identifier, quote_char))
|
||||
further_identifiers = [identifier]
|
||||
else:
|
||||
if dot == 0 or dot >= len(identifier) - 1:
|
||||
identifier = identifier.replace(quote_char, quote_char*2)
|
||||
identifier = identifier.replace(quote_char, quote_char * 2)
|
||||
identifier = ''.join((quote_char, identifier, quote_char))
|
||||
further_identifiers = [identifier]
|
||||
else:
|
||||
first_identifier = identifier[:dot]
|
||||
next_identifier = identifier[dot+1:]
|
||||
next_identifier = identifier[dot + 1:]
|
||||
further_identifiers = _identifier_parse(next_identifier, quote_char)
|
||||
first_identifier = first_identifier.replace(quote_char, quote_char*2)
|
||||
first_identifier = first_identifier.replace(quote_char, quote_char * 2)
|
||||
first_identifier = ''.join((quote_char, first_identifier, quote_char))
|
||||
further_identifiers.insert(0, first_identifier)
|
||||
|
||||
|
@ -113,6 +116,7 @@ def pg_quote_identifier(identifier, id_type):
|
|||
raise SQLParseError('PostgreSQL does not support %s with more than %i dots' % (id_type, _PG_IDENTIFIER_TO_DOT_LEVEL[id_type]))
|
||||
return '.'.join(identifier_fragments)
|
||||
|
||||
|
||||
def mysql_quote_identifier(identifier, id_type):
|
||||
identifier_fragments = _identifier_parse(identifier, quote_char='`')
|
||||
if len(identifier_fragments) > _MYSQL_IDENTIFIER_TO_DOT_LEVEL[id_type]:
|
||||
|
|
|
@ -327,11 +327,10 @@ class AnsibleDockerClient(Client):
|
|||
def _handle_ssl_error(self, error):
|
||||
match = re.match(r"hostname.*doesn\'t match (\'.*\')", str(error))
|
||||
if match:
|
||||
msg = "You asked for verification that Docker host name matches %s. The actual hostname is %s. " \
|
||||
"Most likely you need to set DOCKER_TLS_HOSTNAME or pass tls_hostname with a value of %s. " \
|
||||
"You may also use TLS without verification by setting the tls parameter to true." \
|
||||
% (self.auth_params['tls_hostname'], match.group(1), match.group(1))
|
||||
self.fail(msg)
|
||||
self.fail("You asked for verification that Docker host name matches %s. The actual hostname is %s. "
|
||||
"Most likely you need to set DOCKER_TLS_HOSTNAME or pass tls_hostname with a value of %s. "
|
||||
"You may also use TLS without verification by setting the tls parameter to true."
|
||||
% (self.auth_params['tls_hostname'], match.group(1), match.group(1)))
|
||||
self.fail("SSL Exception: %s" % (error))
|
||||
|
||||
def get_container(self, name=None):
|
||||
|
@ -448,5 +447,3 @@ class AnsibleDockerClient(Client):
|
|||
new_tag = self.find_image(name, tag)
|
||||
|
||||
return new_tag, old_tag == new_tag
|
||||
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ from ansible.module_utils.cloud import CloudRetry
|
|||
|
||||
try:
|
||||
import boto
|
||||
import boto.ec2 #boto does weird import stuff
|
||||
import boto.ec2 # boto does weird import stuff
|
||||
HAS_BOTO = True
|
||||
except ImportError:
|
||||
HAS_BOTO = False
|
||||
|
@ -54,6 +54,7 @@ except:
|
|||
|
||||
from ansible.module_utils.six import string_types, binary_type, text_type
|
||||
|
||||
|
||||
class AnsibleAWSError(Exception):
|
||||
pass
|
||||
|
||||
|
@ -98,6 +99,7 @@ def boto3_conn(module, conn_type=None, resource=None, region=None, endpoint=None
|
|||
module.fail_json(msg='There is an issue in the code of the module. You must specify either both, resource or client to the conn_type '
|
||||
'parameter in the boto3_conn function call')
|
||||
|
||||
|
||||
def _boto3_conn(conn_type=None, resource=None, region=None, endpoint=None, **params):
|
||||
profile = params.pop('profile_name', None)
|
||||
|
||||
|
@ -120,6 +122,7 @@ def _boto3_conn(conn_type=None, resource=None, region=None, endpoint=None, **par
|
|||
|
||||
boto3_inventory_conn = _boto3_conn
|
||||
|
||||
|
||||
def aws_common_argument_spec():
|
||||
return dict(
|
||||
ec2_url=dict(),
|
||||
|
@ -291,6 +294,7 @@ def ec2_connect(module):
|
|||
|
||||
return ec2
|
||||
|
||||
|
||||
def paging(pause=0, marker_property='marker'):
|
||||
""" Adds paging to boto retrieval functions that support a 'marker'
|
||||
this is configurable as not all boto functions seem to use the
|
||||
|
@ -330,7 +334,6 @@ def camel_dict_to_snake_dict(camel_dict):
|
|||
|
||||
return all_cap_re.sub(r'\1_\2', s1).lower()
|
||||
|
||||
|
||||
def value_is_list(camel_list):
|
||||
|
||||
checked_list = []
|
||||
|
@ -344,7 +347,6 @@ def camel_dict_to_snake_dict(camel_dict):
|
|||
|
||||
return checked_list
|
||||
|
||||
|
||||
snake_dict = {}
|
||||
for k, v in camel_dict.items():
|
||||
if isinstance(v, dict):
|
||||
|
@ -403,7 +405,7 @@ def ansible_dict_to_boto3_filter_list(filters_dict):
|
|||
"""
|
||||
|
||||
filters_list = []
|
||||
for k,v in filters_dict.items():
|
||||
for k, v in filters_dict.items():
|
||||
filter_dict = {'Name': k}
|
||||
if isinstance(v, string_types):
|
||||
filter_dict['Values'] = [v]
|
||||
|
@ -470,7 +472,7 @@ def ansible_dict_to_boto3_tag_list(tags_dict, tag_name_key_name='Key', tag_value
|
|||
"""
|
||||
|
||||
tags_list = []
|
||||
for k,v in tags_dict.items():
|
||||
for k, v in tags_dict.items():
|
||||
tags_list.append({tag_name_key_name: k, tag_value_key_name: v})
|
||||
|
||||
return tags_list
|
||||
|
@ -491,7 +493,6 @@ def get_ec2_security_group_ids_from_names(sec_group_list, ec2_connection, vpc_id
|
|||
else:
|
||||
return sg.name
|
||||
|
||||
|
||||
def get_sg_id(sg, boto3):
|
||||
|
||||
if boto3:
|
||||
|
@ -520,7 +521,7 @@ def get_ec2_security_group_ids_from_names(sec_group_list, ec2_connection, vpc_id
|
|||
all_sec_groups = ec2_connection.describe_security_groups()['SecurityGroups']
|
||||
else:
|
||||
if vpc_id:
|
||||
filters = { 'vpc-id': vpc_id }
|
||||
filters = {'vpc-id': vpc_id}
|
||||
all_sec_groups = ec2_connection.get_all_security_groups(filters=filters)
|
||||
else:
|
||||
all_sec_groups = ec2_connection.get_all_security_groups()
|
||||
|
@ -536,7 +537,7 @@ def get_ec2_security_group_ids_from_names(sec_group_list, ec2_connection, vpc_id
|
|||
if len(still_unmatched) > 0:
|
||||
raise ValueError("The following group names are not valid: %s" % ', '.join(still_unmatched))
|
||||
|
||||
sec_group_id_list += [ str(get_sg_id(all_sg, boto3)) for all_sg in all_sec_groups if str(get_sg_name(all_sg, boto3)) in sec_group_name_list ]
|
||||
sec_group_id_list += [str(get_sg_id(all_sg, boto3)) for all_sg in all_sec_groups if str(get_sg_name(all_sg, boto3)) in sec_group_name_list]
|
||||
|
||||
return sec_group_id_list
|
||||
|
||||
|
|
|
@ -56,7 +56,6 @@ def f5_argument_spec():
|
|||
server_port=dict(
|
||||
type='int',
|
||||
default=443,
|
||||
required=False,
|
||||
fallback=(env_fallback, ['F5_SERVER_PORT'])
|
||||
),
|
||||
state=dict(
|
||||
|
@ -80,7 +79,7 @@ def f5_parse_arguments(module):
|
|||
import ssl
|
||||
if not hasattr(ssl, 'SSLContext'):
|
||||
module.fail_json(
|
||||
msg="bigsuds does not support verifying certificates with python < 2.7.9." \
|
||||
msg="bigsuds does not support verifying certificates with python < 2.7.9."
|
||||
"Either update python or set validate_certs=False on the task'")
|
||||
|
||||
return (
|
||||
|
@ -122,26 +121,22 @@ def bigip_api(bigip, user, password, validate_certs, port=443):
|
|||
|
||||
|
||||
# Fully Qualified name (with the partition)
|
||||
def fq_name(partition,name):
|
||||
def fq_name(partition, name):
|
||||
if name is not None and not name.startswith('/'):
|
||||
return '/%s/%s' % (partition,name)
|
||||
return '/%s/%s' % (partition, name)
|
||||
return name
|
||||
|
||||
|
||||
# Fully Qualified name (with partition) for a list
|
||||
def fq_list_names(partition,list_names):
|
||||
def fq_list_names(partition, list_names):
|
||||
if list_names is None:
|
||||
return None
|
||||
return map(lambda x: fq_name(partition,x),list_names)
|
||||
|
||||
|
||||
|
||||
return map(lambda x: fq_name(partition, x), list_names)
|
||||
|
||||
|
||||
# New style
|
||||
|
||||
from abc import ABCMeta, abstractproperty
|
||||
from ansible.module_utils.six import with_metaclass
|
||||
from collections import defaultdict
|
||||
|
||||
try:
|
||||
|
@ -158,7 +153,7 @@ except ImportError:
|
|||
|
||||
|
||||
from ansible.module_utils.basic import *
|
||||
from ansible.module_utils.six import iteritems
|
||||
from ansible.module_utils.six import iteritems, with_metaclass
|
||||
|
||||
|
||||
F5_COMMON_ARGS = dict(
|
||||
|
@ -187,7 +182,6 @@ F5_COMMON_ARGS = dict(
|
|||
server_port=dict(
|
||||
type='int',
|
||||
default=443,
|
||||
required=False,
|
||||
fallback=(env_fallback, ['F5_SERVER_PORT'])
|
||||
),
|
||||
state=dict(
|
||||
|
@ -286,7 +280,7 @@ class AnsibleF5Parameters(object):
|
|||
def __init__(self, params=None):
|
||||
self._values = defaultdict(lambda: None)
|
||||
if params:
|
||||
for k,v in iteritems(params):
|
||||
for k, v in iteritems(params):
|
||||
if self.api_map is not None and k in self.api_map:
|
||||
dict_to_use = self.api_map
|
||||
map_key = self.api_map[k]
|
||||
|
|
|
@ -33,31 +33,31 @@ from ansible.module_utils.basic import AnsibleModule
|
|||
from ansible.module_utils.pycompat24 import get_exception
|
||||
|
||||
|
||||
#check for pyFG lib
|
||||
# check for pyFG lib
|
||||
try:
|
||||
from pyFG import FortiOS, FortiConfig
|
||||
from pyFG.exceptions import CommandExecutionException, FailedCommit
|
||||
HAS_PYFG=True
|
||||
HAS_PYFG = True
|
||||
except ImportError:
|
||||
HAS_PYFG=False
|
||||
HAS_PYFG = False
|
||||
|
||||
fortios_argument_spec = dict(
|
||||
file_mode = dict(type='bool', default=False),
|
||||
config_file = dict(type='path'),
|
||||
host = dict( ),
|
||||
username = dict( ),
|
||||
password = dict(type='str', no_log=True ),
|
||||
timeout = dict(type='int', default=60),
|
||||
vdom = dict(type='str', default=None ),
|
||||
backup = dict(type='bool', default=False),
|
||||
backup_path = dict(type='path'),
|
||||
backup_filename = dict(type='str'),
|
||||
file_mode=dict(type='bool', default=False),
|
||||
config_file=dict(type='path'),
|
||||
host=dict(),
|
||||
username=dict(),
|
||||
password=dict(type='str', no_log=True),
|
||||
timeout=dict(type='int', default=60),
|
||||
vdom=dict(type='str'),
|
||||
backup=dict(type='bool', default=False),
|
||||
backup_path=dict(type='path'),
|
||||
backup_filename=dict(type='str'),
|
||||
)
|
||||
|
||||
fortios_required_if = [
|
||||
['file_mode', False, ['host', 'username', 'password']],
|
||||
['file_mode', True, ['config_file']],
|
||||
['backup', True , ['backup_path'] ],
|
||||
['backup', True, ['backup_path']],
|
||||
]
|
||||
|
||||
fortios_mutually_exclusive = [
|
||||
|
@ -67,12 +67,12 @@ fortios_mutually_exclusive = [
|
|||
]
|
||||
|
||||
fortios_error_codes = {
|
||||
'-3':"Object not found",
|
||||
'-61':"Command error"
|
||||
'-3': "Object not found",
|
||||
'-61': "Command error"
|
||||
}
|
||||
|
||||
|
||||
def backup(module,running_config):
|
||||
def backup(module, running_config):
|
||||
backup_path = module.params['backup_path']
|
||||
backup_filename = module.params['backup_filename']
|
||||
if not os.path.exists(backup_path):
|
||||
|
@ -91,8 +91,6 @@ def backup(module,running_config):
|
|||
module.fail_json(msg="Can't create backup file {0} Permission denied ?".format(filename))
|
||||
|
||||
|
||||
|
||||
|
||||
class AnsibleFortios(object):
|
||||
def __init__(self, module):
|
||||
if not HAS_PYFG:
|
||||
|
@ -103,7 +101,6 @@ class AnsibleFortios(object):
|
|||
}
|
||||
self.module = module
|
||||
|
||||
|
||||
def _connect(self):
|
||||
if self.module.params['file_mode']:
|
||||
self.forti_device = FortiOS('')
|
||||
|
@ -122,11 +119,10 @@ class AnsibleFortios(object):
|
|||
e = get_exception()
|
||||
self.module.fail_json(msg='Error connecting device. %s' % e)
|
||||
|
||||
|
||||
def load_config(self, path):
|
||||
self.path = path
|
||||
self._connect()
|
||||
#load in file_mode
|
||||
# load in file_mode
|
||||
if self.module.params['file_mode']:
|
||||
try:
|
||||
f = open(self.module.params['config_file'], 'r')
|
||||
|
@ -135,10 +131,10 @@ class AnsibleFortios(object):
|
|||
except IOError:
|
||||
e = get_exception()
|
||||
self.module.fail_json(msg='Error reading configuration file. %s' % e)
|
||||
self.forti_device.load_config(config_text=running, path = path)
|
||||
self.forti_device.load_config(config_text=running, path=path)
|
||||
|
||||
else:
|
||||
#get config
|
||||
# get config
|
||||
try:
|
||||
self.forti_device.load_config(path=path)
|
||||
except Exception:
|
||||
|
@ -146,22 +142,21 @@ class AnsibleFortios(object):
|
|||
e = get_exception()
|
||||
self.module.fail_json(msg='Error reading running config. %s' % e)
|
||||
|
||||
#set configs in object
|
||||
# set configs in object
|
||||
self.result['running_config'] = self.forti_device.running_config.to_text()
|
||||
self.candidate_config = self.forti_device.candidate_config
|
||||
|
||||
#backup if needed
|
||||
# backup if needed
|
||||
if self.module.params['backup']:
|
||||
backup(self.module, self.forti_device.running_config.to_text())
|
||||
|
||||
|
||||
def apply_changes(self):
|
||||
change_string = self.forti_device.compare_config()
|
||||
if change_string:
|
||||
self.result['change_string'] = change_string
|
||||
self.result['changed'] = True
|
||||
|
||||
#Commit if not check mode
|
||||
# Commit if not check mode
|
||||
if change_string and not self.module.check_mode:
|
||||
if self.module.params['file_mode']:
|
||||
try:
|
||||
|
@ -175,35 +170,31 @@ class AnsibleFortios(object):
|
|||
try:
|
||||
self.forti_device.commit()
|
||||
except FailedCommit:
|
||||
#Something's wrong (rollback is automatic)
|
||||
# Something's wrong (rollback is automatic)
|
||||
self.forti_device.close()
|
||||
e = get_exception()
|
||||
error_list = self.get_error_infos(e)
|
||||
self.module.fail_json(msg_error_list=error_list, msg="Unable to commit change, check your args, the error was %s" % e.message )
|
||||
self.module.fail_json(msg_error_list=error_list, msg="Unable to commit change, check your args, the error was %s" % e.message)
|
||||
|
||||
self.forti_device.close()
|
||||
self.module.exit_json(**self.result)
|
||||
|
||||
|
||||
def del_block(self, block_id):
|
||||
self.forti_device.candidate_config[self.path].del_block(block_id)
|
||||
|
||||
|
||||
def add_block(self, block_id, block):
|
||||
self.forti_device.candidate_config[self.path][block_id] = block
|
||||
|
||||
|
||||
def get_error_infos(self, cli_errors):
|
||||
error_list = []
|
||||
for errors in cli_errors.args:
|
||||
for error in errors:
|
||||
error_code = error[0]
|
||||
error_string = error[1]
|
||||
error_type = fortios_error_codes.get(error_code,"unknown")
|
||||
error_list.append(dict(error_code=error_code, error_type=error_type, error_string= error_string))
|
||||
error_type = fortios_error_codes.get(error_code, "unknown")
|
||||
error_list.append(dict(error_code=error_code, error_type=error_type, error_string=error_string))
|
||||
|
||||
return error_list
|
||||
|
||||
def get_empty_configuration_block(self, block_name, block_type):
|
||||
return FortiConfig(block_name, block_type)
|
||||
|
||||
|
|
|
@ -27,9 +27,6 @@
|
|||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
|
||||
from ansible.module_utils.gcp import gcp_connect
|
||||
from ansible.module_utils.gcp import unexpected_error_msg as gcp_error
|
||||
|
||||
try:
|
||||
from libcloud.dns.types import Provider
|
||||
from libcloud.dns.providers import get_driver
|
||||
|
@ -37,9 +34,13 @@ try:
|
|||
except ImportError:
|
||||
HAS_LIBCLOUD_BASE = False
|
||||
|
||||
from ansible.module_utils.gcp import gcp_connect
|
||||
from ansible.module_utils.gcp import unexpected_error_msg as gcp_error
|
||||
|
||||
USER_AGENT_PRODUCT = "Ansible-gcdns"
|
||||
USER_AGENT_VERSION = "v1"
|
||||
|
||||
|
||||
def gcdns_connect(module, provider=None):
|
||||
"""Return a GCP connection for Google Cloud DNS."""
|
||||
if not HAS_LIBCLOUD_BASE:
|
||||
|
@ -48,6 +49,7 @@ def gcdns_connect(module, provider=None):
|
|||
provider = provider or Provider.GOOGLE
|
||||
return gcp_connect(module, provider, get_driver, USER_AGENT_PRODUCT, USER_AGENT_VERSION)
|
||||
|
||||
|
||||
def unexpected_error_msg(error):
|
||||
"""Create an error string based on passed in error."""
|
||||
return gcp_error(error)
|
||||
|
|
|
@ -25,10 +25,6 @@
|
|||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
|
||||
from ansible.module_utils.gcp import gcp_connect
|
||||
from ansible.module_utils.gcp import unexpected_error_msg as gcp_error
|
||||
|
||||
try:
|
||||
from libcloud.compute.types import Provider
|
||||
|
@ -37,9 +33,13 @@ try:
|
|||
except ImportError:
|
||||
HAS_LIBCLOUD_BASE = False
|
||||
|
||||
from ansible.module_utils.gcp import gcp_connect
|
||||
from ansible.module_utils.gcp import unexpected_error_msg as gcp_error
|
||||
|
||||
USER_AGENT_PRODUCT = "Ansible-gce"
|
||||
USER_AGENT_VERSION = "v1"
|
||||
|
||||
|
||||
def gce_connect(module, provider=None):
|
||||
"""Return a GCP connection for Google Compute Engine."""
|
||||
if not HAS_LIBCLOUD_BASE:
|
||||
|
@ -48,6 +48,7 @@ def gce_connect(module, provider=None):
|
|||
|
||||
return gcp_connect(module, provider, get_driver, USER_AGENT_PRODUCT, USER_AGENT_VERSION)
|
||||
|
||||
|
||||
def unexpected_error_msg(error):
|
||||
"""Create an error string based on passed in error."""
|
||||
return gcp_error(error)
|
||||
|
|
|
@ -502,8 +502,7 @@ class GCPUtils(object):
|
|||
|
||||
@staticmethod
|
||||
def underscore_to_camel(txt):
|
||||
return txt.split('_')[0] + ''.join(x.capitalize()
|
||||
or '_' for x in txt.split('_')[1:])
|
||||
return txt.split('_')[0] + ''.join(x.capitalize() or '_' for x in txt.split('_')[1:])
|
||||
|
||||
@staticmethod
|
||||
def remove_non_gcp_params(params):
|
||||
|
@ -626,7 +625,7 @@ class GCPUtils(object):
|
|||
# TODO(supertom): Isolate 'build-new-request' stuff.
|
||||
resource_name_singular = GCPUtils.get_entity_name_from_resource_name(
|
||||
resource_name)
|
||||
if op_resp['operationType'] == 'insert' or not 'entity_name' in parsed_url:
|
||||
if op_resp['operationType'] == 'insert' or 'entity_name' not in parsed_url:
|
||||
parsed_url['entity_name'] = GCPUtils.parse_gcp_url(op_resp['targetLink'])[
|
||||
'entity_name']
|
||||
args = {'project': project_id,
|
||||
|
|
|
@ -82,9 +82,9 @@ def infinibox_argument_spec():
|
|||
"""Return standard base dictionary used for the argument_spec argument in AnsibleModule"""
|
||||
|
||||
return dict(
|
||||
system = dict(required=True),
|
||||
user = dict(),
|
||||
password = dict(no_log=True),
|
||||
system=dict(required=True),
|
||||
user=dict(),
|
||||
password=dict(no_log=True),
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@ try:
|
|||
except ImportError:
|
||||
import simplejson as json
|
||||
|
||||
|
||||
# NB: a copy of this function exists in ../../modules/core/async_wrapper.py. Ensure any
|
||||
# changes are propagated there.
|
||||
def _filter_non_json_lines(data):
|
||||
|
|
|
@ -47,6 +47,7 @@ ARGS_DEFAULT_VALUE = {
|
|||
'timeout': 10
|
||||
}
|
||||
|
||||
|
||||
def check_args(module, warnings):
|
||||
provider = module.params['provider'] or {}
|
||||
for key in junos_argument_spec:
|
||||
|
@ -66,6 +67,7 @@ def check_args(module, warnings):
|
|||
if provider.get(param):
|
||||
module.no_log_values.update(return_values(provider[param]))
|
||||
|
||||
|
||||
def _validate_rollback_id(module, value):
|
||||
try:
|
||||
if not 0 <= int(value) <= 49:
|
||||
|
@ -73,6 +75,7 @@ def _validate_rollback_id(module, value):
|
|||
except ValueError:
|
||||
module.fail_json(msg='rollback must be between 0 and 49')
|
||||
|
||||
|
||||
def load_configuration(module, candidate=None, action='merge', rollback=None, format='xml'):
|
||||
|
||||
if all((candidate is None, rollback is None)):
|
||||
|
@ -117,6 +120,7 @@ def load_configuration(module, candidate=None, action='merge', rollback=None, fo
|
|||
cfg.append(candidate)
|
||||
return send_request(module, obj)
|
||||
|
||||
|
||||
def get_configuration(module, compare=False, format='xml', rollback='0'):
|
||||
if format not in CONFIG_FORMATS:
|
||||
module.fail_json(msg='invalid config format specified')
|
||||
|
@ -127,6 +131,7 @@ def get_configuration(module, compare=False, format='xml', rollback='0'):
|
|||
xattrs['rollback'] = str(rollback)
|
||||
return send_request(module, Element('get-configuration', xattrs))
|
||||
|
||||
|
||||
def commit_configuration(module, confirm=False, check=False, comment=None, confirm_timeout=None):
|
||||
obj = Element('commit-configuration')
|
||||
if confirm:
|
||||
|
@ -141,6 +146,7 @@ def commit_configuration(module, confirm=False, check=False, comment=None, confi
|
|||
subele.text = str(confirm_timeout)
|
||||
return send_request(module, obj)
|
||||
|
||||
|
||||
def command(module, command, format='text', rpc_only=False):
|
||||
xattrs = {'format': format}
|
||||
if rpc_only:
|
||||
|
@ -148,8 +154,14 @@ def command(module, command, format='text', rpc_only=False):
|
|||
xattrs['format'] = 'text'
|
||||
return send_request(module, Element('command', xattrs, text=command))
|
||||
|
||||
lock_configuration = lambda x: send_request(x, Element('lock-configuration'))
|
||||
unlock_configuration = lambda x: send_request(x, Element('unlock-configuration'))
|
||||
|
||||
def lock_configuration(x):
|
||||
return send_request(x, Element('lock-configuration'))
|
||||
|
||||
|
||||
def unlock_configuration(x):
|
||||
return send_request(x, Element('unlock-configuration'))
|
||||
|
||||
|
||||
@contextmanager
|
||||
def locked_config(module):
|
||||
|
@ -159,6 +171,7 @@ def locked_config(module):
|
|||
finally:
|
||||
unlock_configuration(module)
|
||||
|
||||
|
||||
def get_diff(module):
|
||||
|
||||
reply = get_configuration(module, compare=True, format='text')
|
||||
|
@ -166,6 +179,7 @@ def get_diff(module):
|
|||
if output is not None:
|
||||
return to_text(output.text, encoding='latin1').strip()
|
||||
|
||||
|
||||
def load_config(module, candidate, warnings, action='merge', commit=False, format='xml',
|
||||
comment=None, confirm=False, confirm_timeout=None):
|
||||
|
||||
|
@ -192,5 +206,6 @@ def load_config(module, candidate, warnings, action='merge', commit=False, forma
|
|||
|
||||
return diff
|
||||
|
||||
|
||||
def get_param(module, key):
|
||||
return module.params[key] or module.params['provider'].get(key)
|
||||
|
|
|
@ -153,7 +153,7 @@ def not_in_host_file(self, host):
|
|||
if tokens[0].find(HASHED_KEY_MAGIC) == 0:
|
||||
# this is a hashed known host entry
|
||||
try:
|
||||
(kn_salt,kn_host) = tokens[0][len(HASHED_KEY_MAGIC):].split("|",2)
|
||||
(kn_salt, kn_host) = tokens[0][len(HASHED_KEY_MAGIC):].split("|", 2)
|
||||
hash = hmac.new(kn_salt.decode('base64'), digestmod=sha1)
|
||||
hash.update(host)
|
||||
if hash.digest() == kn_host.decode('base64'):
|
||||
|
|
|
@ -38,12 +38,14 @@ except ImportError:
|
|||
# httplib/http.client connection using unix domain socket
|
||||
import socket
|
||||
import ssl
|
||||
|
||||
try:
|
||||
from httplib import HTTPConnection, HTTPSConnection
|
||||
except ImportError:
|
||||
# Python 3
|
||||
from http.client import HTTPConnection, HTTPSConnection
|
||||
|
||||
|
||||
class UnixHTTPConnection(HTTPConnection):
|
||||
def __init__(self, path):
|
||||
HTTPConnection.__init__(self, 'localhost')
|
||||
|
@ -54,11 +56,13 @@ class UnixHTTPConnection(HTTPConnection):
|
|||
sock.connect(self.path)
|
||||
self.sock = sock
|
||||
|
||||
|
||||
class LXDClientException(Exception):
|
||||
def __init__(self, msg, **kwargs):
|
||||
self.msg = msg
|
||||
self.kwargs = kwargs
|
||||
|
||||
|
||||
class LXDClient(object):
|
||||
def __init__(self, url, key_file=None, cert_file=None, debug=False):
|
||||
"""LXD Client.
|
||||
|
|
|
@ -35,6 +35,7 @@ try:
|
|||
except ImportError:
|
||||
mysqldb_found = False
|
||||
|
||||
|
||||
def mysql_connect(module, login_user=None, login_password=None, config_file='', ssl_cert=None, ssl_key=None, ssl_ca=None, db=None, cursor_class=None,
|
||||
connect_timeout=30):
|
||||
config = {}
|
||||
|
|
|
@ -92,13 +92,20 @@ class ConfigLine(object):
|
|||
assert isinstance(obj, ConfigLine), 'child must be of type `ConfigLine`'
|
||||
self._children.append(obj)
|
||||
|
||||
|
||||
def ignore_line(text, tokens=None):
|
||||
for item in (tokens or DEFAULT_COMMENT_TOKENS):
|
||||
if text.startswith(item):
|
||||
return True
|
||||
|
||||
_obj_to_text = lambda x: [o.text for o in x]
|
||||
_obj_to_raw = lambda x: [o.raw for o in x]
|
||||
|
||||
def _obj_to_text(x):
|
||||
return [o.text for o in x]
|
||||
|
||||
|
||||
def _obj_to_raw(x):
|
||||
return [o.raw for o in x]
|
||||
|
||||
|
||||
def _obj_to_block(objects, visited=None):
|
||||
items = list()
|
||||
|
@ -110,6 +117,7 @@ def _obj_to_block(objects, visited=None):
|
|||
items.append(child)
|
||||
return _obj_to_raw(items)
|
||||
|
||||
|
||||
def dumps(objects, output='block', comments=False):
|
||||
if output == 'block':
|
||||
items = _obj_to_block(objects)
|
||||
|
@ -130,6 +138,7 @@ def dumps(objects, output='block', comments=False):
|
|||
|
||||
return '\n'.join(items)
|
||||
|
||||
|
||||
class NetworkConfig(object):
|
||||
|
||||
def __init__(self, indent=1, contents=None):
|
||||
|
@ -328,7 +337,7 @@ class NetworkConfig(object):
|
|||
offset = 0
|
||||
obj = None
|
||||
|
||||
## global config command
|
||||
# global config command
|
||||
if not parents:
|
||||
for line in lines:
|
||||
item = ConfigLine(line)
|
||||
|
|
|
@ -24,17 +24,16 @@
|
|||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
|
||||
import re
|
||||
import time
|
||||
import shlex
|
||||
import time
|
||||
|
||||
from ansible.module_utils.basic import BOOLEANS_TRUE, BOOLEANS_FALSE
|
||||
from ansible.module_utils.basic import get_exception
|
||||
from ansible.module_utils.basic import BOOLEANS_TRUE, BOOLEANS_FALSE, get_exception
|
||||
from ansible.module_utils.six import string_types, text_type
|
||||
from ansible.module_utils.six.moves import zip
|
||||
|
||||
|
||||
def to_list(val):
|
||||
if isinstance(val, (list, tuple)):
|
||||
return list(val)
|
||||
|
@ -49,20 +48,23 @@ class FailedConditionsError(Exception):
|
|||
super(FailedConditionsError, self).__init__(msg)
|
||||
self.failed_conditions = failed_conditions
|
||||
|
||||
|
||||
class FailedConditionalError(Exception):
|
||||
def __init__(self, msg, failed_conditional):
|
||||
super(FailedConditionalError, self).__init__(msg)
|
||||
self.failed_conditional = failed_conditional
|
||||
|
||||
|
||||
class AddCommandError(Exception):
|
||||
def __init__(self, msg, command):
|
||||
super(AddCommandError, self).__init__(msg)
|
||||
self.command = command
|
||||
|
||||
|
||||
class AddConditionError(Exception):
|
||||
def __init__(self, msg, condition):
|
||||
super(AddConditionError, self).__init__(msg)
|
||||
self.condition=condition
|
||||
self.condition = condition
|
||||
|
||||
|
||||
class Cli(object):
|
||||
|
@ -105,6 +107,7 @@ class Cli(object):
|
|||
|
||||
return responses
|
||||
|
||||
|
||||
class Command(object):
|
||||
|
||||
def __init__(self, command, output=None, prompt=None, response=None,
|
||||
|
@ -122,6 +125,7 @@ class Command(object):
|
|||
def __str__(self):
|
||||
return self.command_string
|
||||
|
||||
|
||||
class CommandRunner(object):
|
||||
|
||||
def __init__(self, module):
|
||||
|
|
|
@ -26,14 +26,13 @@
|
|||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
from contextlib import contextmanager
|
||||
from xml.etree.ElementTree import Element, SubElement
|
||||
from xml.etree.ElementTree import tostring, fromstring
|
||||
from xml.etree.ElementTree import Element, SubElement, fromstring, tostring
|
||||
|
||||
from ansible.module_utils.connection import exec_command
|
||||
|
||||
|
||||
NS_MAP = {'nc': "urn:ietf:params:xml:ns:netconf:base:1.0"}
|
||||
|
||||
|
||||
def send_request(module, obj, check_rc=True):
|
||||
request = tostring(obj)
|
||||
rc, out, err = exec_command(module, request)
|
||||
|
@ -58,6 +57,7 @@ def send_request(module, obj, check_rc=True):
|
|||
return warnings
|
||||
return fromstring(out)
|
||||
|
||||
|
||||
def children(root, iterable):
|
||||
for item in iterable:
|
||||
try:
|
||||
|
@ -65,33 +65,40 @@ def children(root, iterable):
|
|||
except NameError:
|
||||
ele = SubElement(root, item)
|
||||
|
||||
|
||||
def lock(module, target='candidate'):
|
||||
obj = Element('lock')
|
||||
children(obj, ('target', target))
|
||||
return send_request(module, obj)
|
||||
|
||||
|
||||
def unlock(module, target='candidate'):
|
||||
obj = Element('unlock')
|
||||
children(obj, ('target', target))
|
||||
return send_request(module, obj)
|
||||
|
||||
|
||||
def commit(module):
|
||||
return send_request(module, Element('commit'))
|
||||
|
||||
|
||||
def discard_changes(module):
|
||||
return send_request(module, Element('discard-changes'))
|
||||
|
||||
|
||||
def validate(module):
|
||||
obj = Element('validate')
|
||||
children(obj, ('source', 'candidate'))
|
||||
return send_request(module, obj)
|
||||
|
||||
|
||||
def get_config(module, source='running', filter=None):
|
||||
obj = Element('get-config')
|
||||
children(obj, ('source', source))
|
||||
children(obj, ('filter', filter))
|
||||
return send_request(module, obj)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def locked_config(module):
|
||||
try:
|
||||
|
|
|
@ -52,6 +52,7 @@ NET_CONNECTION_ARGS = dict()
|
|||
|
||||
NET_CONNECTIONS = dict()
|
||||
|
||||
|
||||
def _transitional_argument_spec():
|
||||
argument_spec = {}
|
||||
for key, value in iteritems(NET_TRANSPORT_ARGS):
|
||||
|
@ -59,6 +60,7 @@ def _transitional_argument_spec():
|
|||
argument_spec[key] = value
|
||||
return argument_spec
|
||||
|
||||
|
||||
def to_list(val):
|
||||
if isinstance(val, (list, tuple)):
|
||||
return list(val)
|
||||
|
@ -75,12 +77,14 @@ class ModuleStub(object):
|
|||
self.params[key] = value.get('default')
|
||||
self.fail_json = fail_json
|
||||
|
||||
|
||||
class NetworkError(Exception):
|
||||
|
||||
def __init__(self, msg, **kwargs):
|
||||
super(NetworkError, self).__init__(msg)
|
||||
self.kwargs = kwargs
|
||||
|
||||
|
||||
class Config(object):
|
||||
|
||||
def __init__(self, connection):
|
||||
|
@ -185,6 +189,7 @@ class NetworkModule(AnsibleModule):
|
|||
exc = get_exception()
|
||||
self.fail_json(msg=to_native(exc))
|
||||
|
||||
|
||||
def register_transport(transport, default=False):
|
||||
def register(cls):
|
||||
NET_CONNECTIONS[transport] = cls
|
||||
|
@ -193,6 +198,6 @@ def register_transport(transport, default=False):
|
|||
return cls
|
||||
return register
|
||||
|
||||
|
||||
def add_argument(key, value):
|
||||
NET_CONNECTION_ARGS[key] = value
|
||||
|
||||
|
|
|
@ -24,9 +24,10 @@
|
|||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
from ansible.module_utils.six import iteritems
|
||||
|
||||
from ansible.module_utils.basic import AnsibleFallbackNotFound
|
||||
from ansible.module_utils.six import iteritems
|
||||
|
||||
|
||||
def to_list(val):
|
||||
if isinstance(val, (list, tuple, set)):
|
||||
|
@ -36,6 +37,7 @@ def to_list(val):
|
|||
else:
|
||||
return list()
|
||||
|
||||
|
||||
class ComplexDict(object):
|
||||
"""Transforms a dict to with an argument spec
|
||||
|
||||
|
@ -88,7 +90,6 @@ class ComplexDict(object):
|
|||
self_has_key = True
|
||||
attr['required'] = True
|
||||
|
||||
|
||||
def _dict(self, value):
|
||||
obj = {}
|
||||
for name, attr in iteritems(self._attributes):
|
||||
|
@ -131,8 +132,7 @@ class ComplexDict(object):
|
|||
|
||||
if 'choices' in attr:
|
||||
if value[name] not in attr['choices']:
|
||||
raise ValueError('%s must be one of %s, got %s' % \
|
||||
(name, ', '.join(attr['choices']), value[name]))
|
||||
raise ValueError('%s must be one of %s, got %s' % (name, ', '.join(attr['choices']), value[name]))
|
||||
|
||||
if value[name] is not None:
|
||||
value_type = attr.get('type', 'str')
|
||||
|
@ -141,6 +141,7 @@ class ComplexDict(object):
|
|||
|
||||
return value
|
||||
|
||||
|
||||
class ComplexList(ComplexDict):
|
||||
"""Extends ```ComplexDict``` to handle a list of dicts """
|
||||
|
||||
|
@ -148,4 +149,3 @@ class ComplexList(ComplexDict):
|
|||
if not isinstance(values, (list, tuple)):
|
||||
raise TypeError('value must be an ordered iterable')
|
||||
return [(super(ComplexList, self).__call__(v)) for v in values]
|
||||
|
||||
|
|
|
@ -30,6 +30,7 @@ import os
|
|||
|
||||
from ansible.module_utils.six import iteritems
|
||||
|
||||
|
||||
def openstack_argument_spec():
|
||||
# DEPRECATED: This argument spec is only used for the deprecated old
|
||||
# OpenStack modules. It turns out that modern OpenStack auth is WAY
|
||||
|
@ -37,17 +38,17 @@ def openstack_argument_spec():
|
|||
# Consume standard OpenStack environment variables.
|
||||
# This is mainly only useful for ad-hoc command line operation as
|
||||
# in playbooks one would assume variables would be used appropriately
|
||||
OS_AUTH_URL=os.environ.get('OS_AUTH_URL', 'http://127.0.0.1:35357/v2.0/')
|
||||
OS_PASSWORD=os.environ.get('OS_PASSWORD', None)
|
||||
OS_REGION_NAME=os.environ.get('OS_REGION_NAME', None)
|
||||
OS_USERNAME=os.environ.get('OS_USERNAME', 'admin')
|
||||
OS_TENANT_NAME=os.environ.get('OS_TENANT_NAME', OS_USERNAME)
|
||||
OS_AUTH_URL = os.environ.get('OS_AUTH_URL', 'http://127.0.0.1:35357/v2.0/')
|
||||
OS_PASSWORD = os.environ.get('OS_PASSWORD', None)
|
||||
OS_REGION_NAME = os.environ.get('OS_REGION_NAME', None)
|
||||
OS_USERNAME = os.environ.get('OS_USERNAME', 'admin')
|
||||
OS_TENANT_NAME = os.environ.get('OS_TENANT_NAME', OS_USERNAME)
|
||||
|
||||
spec = dict(
|
||||
login_username = dict(default=OS_USERNAME),
|
||||
auth_url = dict(default=OS_AUTH_URL),
|
||||
region_name = dict(default=OS_REGION_NAME),
|
||||
availability_zone = dict(default=None),
|
||||
login_username=dict(default=OS_USERNAME),
|
||||
auth_url=dict(default=OS_AUTH_URL),
|
||||
region_name=dict(default=OS_REGION_NAME),
|
||||
availability_zone=dict(),
|
||||
)
|
||||
if OS_PASSWORD:
|
||||
spec['login_password'] = dict(default=OS_PASSWORD)
|
||||
|
@ -59,6 +60,7 @@ def openstack_argument_spec():
|
|||
spec['login_tenant_name'] = dict(required=True)
|
||||
return spec
|
||||
|
||||
|
||||
def openstack_find_nova_addresses(addresses, ext_tag, key_name=None):
|
||||
|
||||
ret = []
|
||||
|
@ -71,6 +73,7 @@ def openstack_find_nova_addresses(addresses, ext_tag, key_name=None):
|
|||
ret.append(interface_spec['addr'])
|
||||
return ret
|
||||
|
||||
|
||||
def openstack_full_argument_spec(**kwargs):
|
||||
spec = dict(
|
||||
cloud=dict(default=None),
|
||||
|
|
|
@ -37,6 +37,7 @@ from ansible.module_utils.urls import fetch_url, url_argument_spec
|
|||
add_argument('use_ssl', dict(default=True, type='bool'))
|
||||
add_argument('validate_certs', dict(default=True, type='bool'))
|
||||
|
||||
|
||||
def get_opsidl():
|
||||
extschema = restparser.parseSchema(settings.get('ext_schema'))
|
||||
ovsschema = settings.get('ovs_schema')
|
||||
|
@ -129,7 +130,7 @@ class Rest(object):
|
|||
def authorize(self, params, **kwargs):
|
||||
raise NotImplementedError
|
||||
|
||||
### REST methods ###
|
||||
# REST methods
|
||||
|
||||
def _url_builder(self, path):
|
||||
if path[0] == '/':
|
||||
|
@ -160,12 +161,12 @@ class Rest(object):
|
|||
def delete(self, path, data=None, headers=None):
|
||||
return self.request('DELETE', path, data, headers)
|
||||
|
||||
### Command methods ###
|
||||
# Command methods
|
||||
|
||||
def run_commands(self, commands):
|
||||
raise NotImplementedError
|
||||
|
||||
### Config methods ###
|
||||
# Config methods
|
||||
|
||||
def configure(self, commands):
|
||||
path = '/system/full-configuration'
|
||||
|
@ -212,7 +213,7 @@ class Cli(CliBase):
|
|||
|
||||
NET_PASSWD_RE = re.compile(r"[\r\n]?password: $", re.I)
|
||||
|
||||
### Config methods ###
|
||||
# Config methods
|
||||
|
||||
def configure(self, commands, **kwargs):
|
||||
cmds = ['configure terminal']
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
_DEVICE_CONFIGS = {}
|
||||
|
||||
|
||||
def get_config(module, flags=[]):
|
||||
cmd = 'show running-config '
|
||||
cmd += ' '.join(flags)
|
||||
|
|
|
@ -504,7 +504,6 @@ class BaseModule(object):
|
|||
after[k] = update[k]
|
||||
return after
|
||||
|
||||
|
||||
def create(
|
||||
self,
|
||||
entity=None,
|
||||
|
@ -579,9 +578,14 @@ class BaseModule(object):
|
|||
# Wait for the entity to be created and to be in the defined state:
|
||||
entity_service = self._service.service(entity.id)
|
||||
|
||||
state_condition = lambda entity: entity
|
||||
def state_condition(entity):
|
||||
return entity
|
||||
|
||||
if result_state:
|
||||
state_condition = lambda entity: entity and entity.status == result_state
|
||||
|
||||
def state_condition(entity):
|
||||
return entity and entity.status == result_state
|
||||
|
||||
wait(
|
||||
service=entity_service,
|
||||
condition=state_condition,
|
||||
|
|
|
@ -27,24 +27,21 @@
|
|||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
# standard ansible imports
|
||||
from ansible.module_utils.basic import get_exception
|
||||
|
||||
# standard PG imports
|
||||
HAS_PSYCOPG2 = False
|
||||
try:
|
||||
import psycopg2
|
||||
import psycopg2.extras
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
HAS_PSYCOPG2 = True
|
||||
except ImportError:
|
||||
HAS_PSYCOPG2 = False
|
||||
|
||||
from ansible.module_utils.basic import get_exception
|
||||
from ansible.module_utils.six import iteritems
|
||||
|
||||
|
||||
class LibraryError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def ensure_libs(sslrootcert=None):
|
||||
if not HAS_PSYCOPG2:
|
||||
raise LibraryError('psycopg2 is not installed. we need psycopg2.')
|
||||
|
@ -54,14 +51,14 @@ def ensure_libs(sslrootcert=None):
|
|||
# no problems
|
||||
return None
|
||||
|
||||
|
||||
def postgres_common_argument_spec():
|
||||
return dict(
|
||||
login_user = dict(default='postgres'),
|
||||
login_password = dict(default='', no_log=True),
|
||||
login_host = dict(default=''),
|
||||
login_unix_socket = dict(default=''),
|
||||
port = dict(type='int', default=5432),
|
||||
ssl_mode = dict(default='prefer', choices=['disable', 'allow', 'prefer', 'require', 'verify-ca', 'verify-full']),
|
||||
ssl_rootcert = dict(default=None),
|
||||
login_user=dict(default='postgres'),
|
||||
login_password=dict(default='', no_log=True),
|
||||
login_host=dict(default=''),
|
||||
login_unix_socket=dict(default=''),
|
||||
port=dict(type='int', default=5432),
|
||||
ssl_mode=dict(default='prefer', choices=['disable', 'allow', 'prefer', 'require', 'verify-ca', 'verify-full']),
|
||||
ssl_rootcert=dict(),
|
||||
)
|
||||
|
||||
|
|
|
@ -25,10 +25,10 @@
|
|||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
|
||||
import sys
|
||||
|
||||
|
||||
def get_exception():
|
||||
"""Get the current exception.
|
||||
|
||||
|
|
|
@ -125,9 +125,9 @@ class Rhsm(RegistrationBase):
|
|||
# Pass supplied **kwargs as parameters to subscription-manager. Ignore
|
||||
# non-configuration parameters and replace '_' with '.'. For example,
|
||||
# 'server_hostname' becomes '--system.hostname'.
|
||||
for k,v in kwargs.items():
|
||||
for k, v in kwargs.items():
|
||||
if re.search(r'^(system|rhsm)_', k):
|
||||
args.append('--%s=%s' % (k.replace('_','.'), v))
|
||||
args.append('--%s=%s' % (k.replace('_', '.'), v))
|
||||
|
||||
self.module.run_command(args, check_rc=True)
|
||||
|
||||
|
@ -213,7 +213,7 @@ class RhsmPool(object):
|
|||
|
||||
def __init__(self, module, **kwargs):
|
||||
self.module = module
|
||||
for k,v in kwargs.items():
|
||||
for k, v in kwargs.items():
|
||||
setattr(self, k, v)
|
||||
|
||||
def __str__(self):
|
||||
|
@ -255,7 +255,7 @@ class RhsmPools(object):
|
|||
continue
|
||||
# If a colon ':' is found, parse
|
||||
elif ':' in line:
|
||||
(key, value) = line.split(':',1)
|
||||
(key, value) = line.split(':', 1)
|
||||
key = key.strip().replace(" ", "") # To unify
|
||||
value = value.strip()
|
||||
if key in ['ProductName', 'SubscriptionName']:
|
||||
|
@ -265,7 +265,7 @@ class RhsmPools(object):
|
|||
# Associate value with most recently recorded product
|
||||
products[-1].__setattr__(key, value)
|
||||
# FIXME - log some warning?
|
||||
#else:
|
||||
# else:
|
||||
# warnings.warn("Unhandled subscription key/value: %s/%s" % (key,value))
|
||||
return products
|
||||
|
||||
|
@ -277,4 +277,3 @@ class RhsmPools(object):
|
|||
for product in self.products:
|
||||
if r.search(product._name):
|
||||
yield product
|
||||
|
||||
|
|
|
@ -25,20 +25,20 @@
|
|||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
|
||||
import os
|
||||
import shlex
|
||||
import subprocess
|
||||
import glob
|
||||
import select
|
||||
import os
|
||||
import pickle
|
||||
import platform
|
||||
import select
|
||||
import shlex
|
||||
import subprocess
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.six import PY2, b
|
||||
from ansible.module_utils._text import to_bytes, to_text
|
||||
|
||||
|
||||
def sysv_is_enabled(name):
|
||||
'''
|
||||
This function will check if the service name supplied
|
||||
|
@ -48,6 +48,7 @@ def sysv_is_enabled(name):
|
|||
'''
|
||||
return bool(glob.glob('/etc/rc?.d/S??%s' % name))
|
||||
|
||||
|
||||
def get_sysv_script(name):
|
||||
'''
|
||||
This function will return the expected path for an init script
|
||||
|
@ -62,6 +63,7 @@ def get_sysv_script(name):
|
|||
|
||||
return result
|
||||
|
||||
|
||||
def sysv_exists(name):
|
||||
'''
|
||||
This function will return True or False depending on
|
||||
|
@ -71,6 +73,7 @@ def sysv_exists(name):
|
|||
'''
|
||||
return os.path.exists(get_sysv_script(name))
|
||||
|
||||
|
||||
def fail_if_missing(module, found, service, msg=''):
|
||||
'''
|
||||
This function will return an error or exit gracefully depending on check mode status
|
||||
|
@ -87,6 +90,7 @@ def fail_if_missing(module, found, service, msg=''):
|
|||
else:
|
||||
module.fail_json(msg='Could not find the requested service %s: %s' % (service, msg))
|
||||
|
||||
|
||||
def daemonize(module, cmd):
|
||||
'''
|
||||
Execute a command while detaching as a deamon, returns rc, stdout, and stderr.
|
||||
|
@ -100,10 +104,10 @@ def daemonize(module, cmd):
|
|||
'''
|
||||
|
||||
# init some vars
|
||||
chunk = 4096 #FIXME: pass in as arg?
|
||||
chunk = 4096 # FIXME: pass in as arg?
|
||||
errors = 'surrogate_or_strict'
|
||||
|
||||
#start it!
|
||||
# start it!
|
||||
try:
|
||||
pipe = os.pipe()
|
||||
pid = os.fork()
|
||||
|
@ -162,7 +166,7 @@ def daemonize(module, cmd):
|
|||
fds = [p.stdout, p.stderr]
|
||||
|
||||
# loop reading output till its done
|
||||
output = { p.stdout: b(""), p.sterr: b("") }
|
||||
output = {p.stdout: b(""), p.sterr: b("")}
|
||||
while fds:
|
||||
rfd, wfd, efd = select.select(fds, [], fds, 1)
|
||||
if (rfd + wfd + efd) or p.poll():
|
||||
|
@ -207,6 +211,7 @@ def daemonize(module, cmd):
|
|||
# py2 and py3)
|
||||
return pickle.loads(to_bytes(return_data, errors=errors))
|
||||
|
||||
|
||||
def check_ps(module, pattern):
|
||||
|
||||
# Set ps flags
|
||||
|
|
|
@ -281,5 +281,8 @@ class CliBase(object):
|
|||
exc = get_exception()
|
||||
raise NetworkError(to_native(exc))
|
||||
|
||||
run_commands = lambda self, x: self.execute(to_list(x))
|
||||
exec_command = lambda self, x: self.shell.send(self.to_command(x))
|
||||
def run_commands(self, x):
|
||||
return self.execute(to_list(x))
|
||||
|
||||
def exec_command(self, x):
|
||||
return self.shell.send(self.to_command(x))
|
||||
|
|
|
@ -26,6 +26,7 @@
|
|||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
|
||||
def _get_quote_state(token, quote_char):
|
||||
'''
|
||||
the goal of this block is to determine if the quoted string
|
||||
|
@ -36,7 +37,7 @@ def _get_quote_state(token, quote_char):
|
|||
prev_char = None
|
||||
for idx, cur_char in enumerate(token):
|
||||
if idx > 0:
|
||||
prev_char = token[idx-1]
|
||||
prev_char = token[idx - 1]
|
||||
if cur_char in '"\'' and prev_char != '\\':
|
||||
if quote_char:
|
||||
if cur_char == quote_char:
|
||||
|
@ -45,6 +46,7 @@ def _get_quote_state(token, quote_char):
|
|||
quote_char = cur_char
|
||||
return quote_char
|
||||
|
||||
|
||||
def _count_jinja2_blocks(token, cur_depth, open_token, close_token):
|
||||
'''
|
||||
this function counts the number of opening/closing blocks for a
|
||||
|
@ -59,6 +61,7 @@ def _count_jinja2_blocks(token, cur_depth, open_token, close_token):
|
|||
cur_depth = 0
|
||||
return cur_depth
|
||||
|
||||
|
||||
def split_args(args):
|
||||
'''
|
||||
Splits args on whitespace, but intelligently reassembles
|
||||
|
@ -105,7 +108,7 @@ def split_args(args):
|
|||
|
||||
# now we loop over each split chunk, coalescing tokens if the white space
|
||||
# split occurred within quotes or a jinja2 block of some kind
|
||||
for itemidx,item in enumerate(items):
|
||||
for itemidx, item in enumerate(items):
|
||||
|
||||
# we split on spaces and newlines separately, so that we
|
||||
# can tell which character we split on for reassembly
|
||||
|
@ -113,7 +116,7 @@ def split_args(args):
|
|||
tokens = item.strip().split(' ')
|
||||
|
||||
line_continuation = False
|
||||
for idx,token in enumerate(tokens):
|
||||
for idx, token in enumerate(tokens):
|
||||
|
||||
# if we hit a line continuation character, but
|
||||
# we're not inside quotes, ignore it and continue
|
||||
|
@ -201,12 +204,13 @@ def split_args(args):
|
|||
|
||||
return params
|
||||
|
||||
|
||||
def is_quoted(data):
|
||||
return len(data) > 0 and (data[0] == '"' and data[-1] == '"' or data[0] == "'" and data[-1] == "'")
|
||||
|
||||
|
||||
def unquote(data):
|
||||
''' removes first and last quotes from a string, if the string starts and ends with the same quotes '''
|
||||
if is_quoted(data):
|
||||
return data[1:-1]
|
||||
return data
|
||||
|
||||
|
|
|
@ -114,11 +114,11 @@ def uldap():
|
|||
|
||||
import univention.admin.uldap
|
||||
return univention.admin.uldap.access(
|
||||
host = config_registry()['ldap/master'],
|
||||
base = base_dn(),
|
||||
binddn = bind_dn,
|
||||
bindpw = pwd,
|
||||
start_tls = 1
|
||||
host=config_registry()['ldap/master'],
|
||||
base=base_dn(),
|
||||
binddn=bind_dn,
|
||||
bindpw=pwd,
|
||||
start_tls=1,
|
||||
)
|
||||
|
||||
return _singleton('uldap', construct)
|
||||
|
|
|
@ -206,17 +206,14 @@ except ImportError:
|
|||
HAS_MATCH_HOSTNAME = False
|
||||
|
||||
if not HAS_MATCH_HOSTNAME:
|
||||
###
|
||||
### The following block of code is under the terms and conditions of the
|
||||
### Python Software Foundation License
|
||||
###
|
||||
# The following block of code is under the terms and conditions of the
|
||||
# Python Software Foundation License
|
||||
|
||||
"""The match_hostname() function from Python 3.4, essential when using SSL."""
|
||||
|
||||
class CertificateError(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
def _dnsname_match(dn, hostname, max_wildcards=1):
|
||||
"""Matching according to RFC 6125, section 6.4.3
|
||||
|
||||
|
@ -269,7 +266,6 @@ if not HAS_MATCH_HOSTNAME:
|
|||
pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
|
||||
return pat.match(hostname)
|
||||
|
||||
|
||||
def match_hostname(cert, hostname):
|
||||
"""Verify that *cert* (in decoded format as returned by
|
||||
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
|
||||
|
@ -299,20 +295,13 @@ if not HAS_MATCH_HOSTNAME:
|
|||
return
|
||||
dnsnames.append(value)
|
||||
if len(dnsnames) > 1:
|
||||
raise CertificateError("hostname %r "
|
||||
"doesn't match either of %s"
|
||||
% (hostname, ', '.join(map(repr, dnsnames))))
|
||||
raise CertificateError("hostname %r " "doesn't match either of %s" % (hostname, ', '.join(map(repr, dnsnames))))
|
||||
elif len(dnsnames) == 1:
|
||||
raise CertificateError("hostname %r "
|
||||
"doesn't match %r"
|
||||
% (hostname, dnsnames[0]))
|
||||
raise CertificateError("hostname %r doesn't match %r" % (hostname, dnsnames[0]))
|
||||
else:
|
||||
raise CertificateError("no appropriate commonName or "
|
||||
"subjectAltName fields were found")
|
||||
raise CertificateError("no appropriate commonName or subjectAltName fields were found")
|
||||
|
||||
###
|
||||
### End of Python Software Foundation Licensed code
|
||||
###
|
||||
# End of Python Software Foundation Licensed code
|
||||
|
||||
HAS_MATCH_HOSTNAME = True
|
||||
|
||||
|
@ -551,9 +540,8 @@ def RedirectHandlerFactory(follow_redirects=None, validate_certs=True):
|
|||
if do_redirect:
|
||||
# be conciliant with URIs containing a space
|
||||
newurl = newurl.replace(' ', '%20')
|
||||
newheaders = dict((k,v) for k,v in req.headers.items()
|
||||
if k.lower() not in ("content-length", "content-type")
|
||||
)
|
||||
newheaders = dict((k, v) for k, v in req.headers.items()
|
||||
if k.lower() not in ("content-length", "content-type"))
|
||||
try:
|
||||
# Python 2-3.3
|
||||
origin_req_host = req.get_origin_req_host()
|
||||
|
@ -660,7 +648,7 @@ class SSLValidationHandler(urllib_request.BaseHandler):
|
|||
dir_contents = os.listdir(path)
|
||||
for f in dir_contents:
|
||||
full_path = os.path.join(path, f)
|
||||
if os.path.isfile(full_path) and os.path.splitext(f)[1] in ('.crt','.pem'):
|
||||
if os.path.isfile(full_path) and os.path.splitext(f)[1] in ('.crt', '.pem'):
|
||||
try:
|
||||
cert_file = open(full_path, 'rb')
|
||||
cert = cert_file.read()
|
||||
|
@ -738,7 +726,7 @@ class SSLValidationHandler(urllib_request.BaseHandler):
|
|||
if proxy_parts.get('scheme') == 'http':
|
||||
s.sendall(self.CONNECT_COMMAND % (self.hostname, self.port))
|
||||
if proxy_parts.get('username'):
|
||||
credentials = "%s:%s" % (proxy_parts.get('username',''), proxy_parts.get('password',''))
|
||||
credentials = "%s:%s" % (proxy_parts.get('username', ''), proxy_parts.get('password', ''))
|
||||
s.sendall(b('Proxy-Authorization: Basic %s\r\n') % base64.b64encode(to_bytes(credentials, errors='surrogate_or_strict')).strip())
|
||||
s.sendall(b('\r\n'))
|
||||
connect_result = b("")
|
||||
|
@ -767,7 +755,7 @@ class SSLValidationHandler(urllib_request.BaseHandler):
|
|||
ssl_s = ssl.wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED, ssl_version=PROTOCOL)
|
||||
match_hostname(ssl_s.getpeercert(), self.hostname)
|
||||
# close the ssl connection
|
||||
#ssl_s.unwrap()
|
||||
# ssl_s.unwrap()
|
||||
s.close()
|
||||
except (ssl.SSLError, CertificateError):
|
||||
e = get_exception()
|
||||
|
@ -923,7 +911,7 @@ def open_url(url, data=None, headers=None, method=None, use_proxy=True,
|
|||
|
||||
data = to_bytes(data, nonstring='passthru')
|
||||
if method:
|
||||
if method.upper() not in ('OPTIONS','GET','HEAD','POST','PUT','DELETE','TRACE','CONNECT','PATCH'):
|
||||
if method.upper() not in ('OPTIONS', 'GET', 'HEAD', 'POST', 'PUT', 'DELETE', 'TRACE', 'CONNECT', 'PATCH'):
|
||||
raise ConnectionError('invalid HTTP request method; %s' % method.upper())
|
||||
request = RequestWithMethod(url, method.upper(), data)
|
||||
else:
|
||||
|
@ -951,7 +939,7 @@ def open_url(url, data=None, headers=None, method=None, use_proxy=True,
|
|||
request.add_header(header, headers[header])
|
||||
|
||||
urlopen_args = [request, None]
|
||||
if sys.version_info >= (2,6,0):
|
||||
if sys.version_info >= (2, 6, 0):
|
||||
# urlopen in python prior to 2.6.0 did not
|
||||
# have a timeout parameter
|
||||
urlopen_args.append(timeout)
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
|
||||
try:
|
||||
from pyvcloud.vcloudair import VCA
|
||||
HAS_PYVCLOUD = True
|
||||
|
@ -29,16 +30,18 @@ LOGIN_HOST = {'vca': 'vca.vmware.com', 'vchs': 'vchs.vmware.com'}
|
|||
DEFAULT_SERVICE_TYPE = 'vca'
|
||||
DEFAULT_VERSION = '5.7'
|
||||
|
||||
|
||||
class VcaError(Exception):
|
||||
|
||||
def __init__(self, msg, **kwargs):
|
||||
self.kwargs = kwargs
|
||||
super(VcaError, self).__init__(msg)
|
||||
|
||||
|
||||
def vca_argument_spec():
|
||||
return dict(
|
||||
username=dict(type='str', aliases=['user'], required=True),
|
||||
password=dict(type='str', aliases=['pass','passwd'], required=True, no_log=True),
|
||||
password=dict(type='str', aliases=['pass', 'passwd'], required=True, no_log=True),
|
||||
org=dict(),
|
||||
service_id=dict(),
|
||||
instance_id=dict(),
|
||||
|
@ -50,6 +53,7 @@ def vca_argument_spec():
|
|||
verify_certs=dict(type='bool', default=True)
|
||||
)
|
||||
|
||||
|
||||
class VcaAnsibleModule(AnsibleModule):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
@ -193,7 +197,6 @@ class VcaAnsibleModule(AnsibleModule):
|
|||
self.exit_json(**kwargs)
|
||||
|
||||
|
||||
|
||||
# -------------------------------------------------------------
|
||||
# 9/18/2015 @privateip
|
||||
# All of the functions below here were migrated from the original
|
||||
|
@ -206,6 +209,7 @@ VCA_REQ_ARGS = ['instance_id', 'vdc_name']
|
|||
VCHS_REQ_ARGS = ['service_id']
|
||||
VCD_REQ_ARGS = []
|
||||
|
||||
|
||||
def _validate_module(module):
|
||||
if not HAS_PYVCLOUD:
|
||||
module.fail_json(msg="python module pyvcloud is needed for this module")
|
||||
|
@ -237,6 +241,7 @@ def serialize_instances(instance_list):
|
|||
instances.append(dict(apiUrl=i['apiUrl'], instance_id=i['id']))
|
||||
return instances
|
||||
|
||||
|
||||
def _vca_login(vca, password, instance):
|
||||
if not vca.login(password=password):
|
||||
raise VcaError("Login Failed: Please check username or password",
|
||||
|
@ -249,6 +254,7 @@ def _vca_login(vca, password, instance):
|
|||
|
||||
return vca
|
||||
|
||||
|
||||
def _vchs_login(vca, password, service, org):
|
||||
if not vca.login(password=password):
|
||||
raise VcaError("Login Failed: Please check username or password",
|
||||
|
@ -272,6 +278,7 @@ def _vcd_login(vca, password, org):
|
|||
if not vca.login(token=vca.token, org=org, org_url=vca.vcloud_session.org_url):
|
||||
raise VcaError("Failed to login to org", error=vca.response.content)
|
||||
|
||||
|
||||
def vca_login(module):
|
||||
service_type = module.params.get('service_type')
|
||||
username = module.params.get('username')
|
||||
|
@ -323,8 +330,3 @@ def vca_login(module):
|
|||
module.fail_json(msg=e.message, **e.kwargs)
|
||||
|
||||
return vca
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@ __metaclass__ = type
|
|||
|
||||
from copy import deepcopy
|
||||
|
||||
|
||||
class Attribute:
|
||||
|
||||
def __init__(self, isa=None, private=False, default=None, required=False, listof=None, priority=0, class_type=None, always_post_validate=False,
|
||||
|
|
|
@ -200,13 +200,13 @@ class Base(with_metaclass(BaseMeta, object)):
|
|||
print("DUMPING OBJECT ------------------------------------------------------")
|
||||
print("%s- %s (%s, id=%s)" % (" " * depth, self.__class__.__name__, self, id(self)))
|
||||
if hasattr(self, '_parent') and self._parent:
|
||||
self._parent.dump_me(depth+2)
|
||||
self._parent.dump_me(depth + 2)
|
||||
dep_chain = self._parent.get_dep_chain()
|
||||
if dep_chain:
|
||||
for dep in dep_chain:
|
||||
dep.dump_me(depth+2)
|
||||
dep.dump_me(depth + 2)
|
||||
if hasattr(self, '_play') and self._play:
|
||||
self._play.dump_me(depth+2)
|
||||
self._play.dump_me(depth + 2)
|
||||
|
||||
def preprocess_data(self, ds):
|
||||
''' infrequently used method to do some pre-processing of legacy terms '''
|
||||
|
@ -405,12 +405,12 @@ class Base(with_metaclass(BaseMeta, object)):
|
|||
)
|
||||
value = value.split(',')
|
||||
else:
|
||||
value = [ value ]
|
||||
value = [value]
|
||||
if attribute.listof is not None:
|
||||
for item in value:
|
||||
if not isinstance(item, attribute.listof):
|
||||
raise AnsibleParserError("the field '%s' should be a list of %s,"
|
||||
" but the item '%s' is a %s" % (name, attribute.listof, item, type(item)), obj=self.get_ds())
|
||||
raise AnsibleParserError("the field '%s' should be a list of %s, "
|
||||
"but the item '%s' is a %s" % (name, attribute.listof, item, type(item)), obj=self.get_ds())
|
||||
elif attribute.required and attribute.listof == string_types:
|
||||
if item is None or item.strip() == "":
|
||||
raise AnsibleParserError("the field '%s' is required, and cannot have empty values" % (name,), obj=self.get_ds())
|
||||
|
@ -423,7 +423,7 @@ class Base(with_metaclass(BaseMeta, object)):
|
|||
else:
|
||||
# Making a list like this handles strings of
|
||||
# text and bytes properly
|
||||
value = [ value ]
|
||||
value = [value]
|
||||
if not isinstance(value, set):
|
||||
value = set(value)
|
||||
elif attribute.isa == 'dict':
|
||||
|
@ -440,12 +440,12 @@ class Base(with_metaclass(BaseMeta, object)):
|
|||
setattr(self, name, value)
|
||||
|
||||
except (TypeError, ValueError) as e:
|
||||
raise AnsibleParserError("the field '%s' has an invalid value (%s), and could not be converted to an %s."
|
||||
" Error was: %s" % (name, value, attribute.isa, e), obj=self.get_ds())
|
||||
raise AnsibleParserError("the field '%s' has an invalid value (%s), and could not be converted to an %s. "
|
||||
"The error was: %s" % (name, value, attribute.isa, e), obj=self.get_ds())
|
||||
except (AnsibleUndefinedVariable, UndefinedError) as e:
|
||||
if templar._fail_on_undefined_errors and name != 'name':
|
||||
raise AnsibleParserError("the field '%s' has an invalid value, which appears to include a variable that is undefined."
|
||||
" The error was: %s" % (name,e), obj=self.get_ds())
|
||||
raise AnsibleParserError("the field '%s' has an invalid value, which appears to include a variable that is undefined. "
|
||||
"The error was: %s" % (name, e), obj=self.get_ds())
|
||||
|
||||
self._finalized = True
|
||||
|
||||
|
@ -490,16 +490,16 @@ class Base(with_metaclass(BaseMeta, object)):
|
|||
'''
|
||||
|
||||
if not isinstance(value, list):
|
||||
value = [ value ]
|
||||
value = [value]
|
||||
if not isinstance(new_value, list):
|
||||
new_value = [ new_value ]
|
||||
new_value = [new_value]
|
||||
|
||||
if prepend:
|
||||
combined = new_value + value
|
||||
else:
|
||||
combined = value + new_value
|
||||
|
||||
return [i for i,_ in itertools.groupby(combined) if i is not None]
|
||||
return [i for i, _ in itertools.groupby(combined) if i is not None]
|
||||
|
||||
def dump_attrs(self):
|
||||
'''
|
||||
|
|
|
@ -29,6 +29,7 @@ except ImportError:
|
|||
from ansible.utils.display import Display
|
||||
display = Display()
|
||||
|
||||
|
||||
class Become:
|
||||
|
||||
# Privilege escalation
|
||||
|
@ -101,4 +102,3 @@ class Become:
|
|||
become_method = C.DEFAULT_BECOME_METHOD
|
||||
if become_user is None:
|
||||
become_user = C.DEFAULT_BECOME_USER
|
||||
|
||||
|
|
|
@ -28,6 +28,7 @@ from ansible.playbook.helpers import load_list_of_tasks
|
|||
from ansible.playbook.role import Role
|
||||
from ansible.playbook.taggable import Taggable
|
||||
|
||||
|
||||
class Block(Base, Become, Conditional, Taggable):
|
||||
|
||||
# main block fields containing the task lists
|
||||
|
@ -42,7 +43,7 @@ class Block(Base, Become, Conditional, Taggable):
|
|||
|
||||
# for future consideration? this would be functionally
|
||||
# similar to the 'else' clause for exceptions
|
||||
#_otherwise = FieldAttribute(isa='list')
|
||||
# _otherwise = FieldAttribute(isa='list')
|
||||
|
||||
def __init__(self, play=None, parent_block=None, role=None, task_include=None, use_handlers=False, implicit=False):
|
||||
self._play = play
|
||||
|
@ -385,4 +386,3 @@ class Block(Base, Become, Conditional, Taggable):
|
|||
return self._parent.all_parents_static()
|
||||
|
||||
return True
|
||||
|
||||
|
|
|
@ -41,6 +41,7 @@ DEFINED_REGEX = re.compile(r'(hostvars\[.+\]|[\w_]+)\s+(not\s+is|is|is\s+not)\s+
|
|||
LOOKUP_REGEX = re.compile(r'lookup\s*\(')
|
||||
VALID_VAR_REGEX = re.compile("^[_A-Za-z][_a-zA-Z0-9]*$")
|
||||
|
||||
|
||||
class Conditional:
|
||||
|
||||
'''
|
||||
|
@ -63,7 +64,7 @@ class Conditional:
|
|||
|
||||
def _validate_when(self, attr, name, value):
|
||||
if not isinstance(value, list):
|
||||
setattr(self, name, [ value ])
|
||||
setattr(self, name, [value])
|
||||
|
||||
def _get_attr_when(self):
|
||||
'''
|
||||
|
@ -234,7 +235,4 @@ class Conditional:
|
|||
# trigger the AnsibleUndefinedVariable exception again below
|
||||
raise
|
||||
except Exception as new_e:
|
||||
raise AnsibleUndefinedVariable(
|
||||
"error while evaluating conditional (%s): %s" % (original, e)
|
||||
)
|
||||
|
||||
raise AnsibleUndefinedVariable("error while evaluating conditional (%s): %s" % (original, e))
|
||||
|
|
|
@ -22,6 +22,7 @@ __metaclass__ = type
|
|||
from ansible.playbook.attribute import FieldAttribute
|
||||
from ansible.playbook.task import Task
|
||||
|
||||
|
||||
class Handler(Task):
|
||||
|
||||
_listen = FieldAttribute(isa='list')
|
||||
|
@ -41,7 +42,7 @@ class Handler(Task):
|
|||
return t.load_data(data, variable_manager=variable_manager, loader=loader)
|
||||
|
||||
def flag_for_host(self, host):
|
||||
#assert instanceof(host, Host)
|
||||
# assert instanceof(host, Host)
|
||||
if host not in self._flagged_hosts:
|
||||
self._flagged_hosts.append(host)
|
||||
|
||||
|
|
|
@ -19,9 +19,10 @@
|
|||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
#from ansible.inventory.host import Host
|
||||
from ansible.playbook.task_include import TaskInclude
|
||||
# from ansible.inventory.host import Host
|
||||
from ansible.playbook.handler import Handler
|
||||
from ansible.playbook.task_include import TaskInclude
|
||||
|
||||
|
||||
class HandlerTaskInclude(Handler, TaskInclude):
|
||||
|
||||
|
@ -29,4 +30,3 @@ class HandlerTaskInclude(Handler, TaskInclude):
|
|||
def load(data, block=None, role=None, task_include=None, variable_manager=None, loader=None):
|
||||
t = HandlerTaskInclude(block=block, role=role, task_include=task_include)
|
||||
return t.load_data(data, variable_manager=variable_manager, loader=loader)
|
||||
|
||||
|
|
|
@ -179,9 +179,9 @@ def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_h
|
|||
include_target = templar.template(t.args['_raw_params'])
|
||||
except AnsibleUndefinedVariable:
|
||||
raise AnsibleParserError(
|
||||
"Error when evaluating variable in include name: %s.\n\n" \
|
||||
"When using static includes, ensure that any variables used in their names are defined in vars/vars_files\n" \
|
||||
"or extra-vars passed in from the command line. Static includes cannot use variables from inventory\n" \
|
||||
"Error when evaluating variable in include name: %s.\n\n"
|
||||
"When using static includes, ensure that any variables used in their names are defined in vars/vars_files\n"
|
||||
"or extra-vars passed in from the command line. Static includes cannot use variables from inventory\n"
|
||||
"sources like group or host vars." % t.args['_raw_params'],
|
||||
obj=task_ds,
|
||||
suppress_extended_error=True,
|
||||
|
@ -209,11 +209,11 @@ def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_h
|
|||
C.DEFAULT_HANDLER_INCLUDES_STATIC and use_handlers:
|
||||
raise
|
||||
display.deprecated(
|
||||
"Included file '%s' not found, however since this include is not " \
|
||||
"explicitly marked as 'static: yes', we will try and include it dynamically " \
|
||||
"later. In the future, this will be an error unless 'static: no' is used " \
|
||||
"on the include task. If you do not want missing includes to be considered " \
|
||||
"dynamic, use 'static: yes' on the include or set the global ansible.cfg " \
|
||||
"Included file '%s' not found, however since this include is not "
|
||||
"explicitly marked as 'static: yes', we will try and include it dynamically "
|
||||
"later. In the future, this will be an error unless 'static: no' is used "
|
||||
"on the include task. If you do not want missing includes to be considered "
|
||||
"dynamic, use 'static: yes' on the include or set the global ansible.cfg "
|
||||
"options to make all inclues static for tasks and/or handlers" % include_file, version="2.7"
|
||||
)
|
||||
task_list.append(t)
|
||||
|
@ -242,7 +242,7 @@ def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_h
|
|||
if len(tags) > 0:
|
||||
if len(ti_copy.tags) > 0:
|
||||
raise AnsibleParserError(
|
||||
"Include tasks should not specify tags in more than one way (both via args and directly on the task). " \
|
||||
"Include tasks should not specify tags in more than one way (both via args and directly on the task). "
|
||||
"Mixing styles in which tags are specified is prohibited for whole import hierarchy, not only for single import statement",
|
||||
obj=task_ds,
|
||||
suppress_extended_error=True,
|
||||
|
@ -275,7 +275,7 @@ def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_h
|
|||
role=role,
|
||||
task_include=None,
|
||||
variable_manager=variable_manager,
|
||||
loader=loader
|
||||
loader=loader,
|
||||
)
|
||||
|
||||
# 1. the user has set the 'static' option to false or true
|
||||
|
@ -293,9 +293,11 @@ def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_h
|
|||
if not templar.is_template(ir.args[param]):
|
||||
needs_templating = True
|
||||
break
|
||||
is_static = C.DEFAULT_TASK_INCLUDES_STATIC or \
|
||||
(use_handlers and C.DEFAULT_HANDLER_INCLUDES_STATIC) or \
|
||||
is_static = (
|
||||
C.DEFAULT_TASK_INCLUDES_STATIC or
|
||||
(use_handlers and C.DEFAULT_HANDLER_INCLUDES_STATIC) or
|
||||
(not needs_templating and ir.all_parents_static() and not ir.loop)
|
||||
)
|
||||
display.debug('Determined that if include_role static is %s' % str(is_static))
|
||||
if is_static:
|
||||
# uses compiled list from object
|
||||
|
@ -331,4 +333,3 @@ def load_list_of_roles(ds, play, current_role_path=None, variable_manager=None,
|
|||
roles.append(i)
|
||||
|
||||
return roles
|
||||
|
||||
|
|
|
@ -30,6 +30,7 @@ except ImportError:
|
|||
from ansible.utils.display import Display
|
||||
display = Display()
|
||||
|
||||
|
||||
class IncludedFile:
|
||||
|
||||
def __init__(self, filename, args, task):
|
||||
|
@ -69,7 +70,7 @@ class IncludedFile:
|
|||
continue
|
||||
include_results = res._result['results']
|
||||
else:
|
||||
include_results = [ res._result ]
|
||||
include_results = [res._result]
|
||||
|
||||
for include_result in include_results:
|
||||
# if the task result was skipped or failed, continue
|
||||
|
|
|
@ -36,4 +36,3 @@ class LoopControl(Base):
|
|||
def load(data, variable_manager=None, loader=None):
|
||||
t = LoopControl()
|
||||
return t.load_data(data, variable_manager=variable_manager, loader=loader)
|
||||
|
||||
|
|
|
@ -20,10 +20,8 @@ from __future__ import (absolute_import, division, print_function)
|
|||
__metaclass__ = type
|
||||
|
||||
from ansible import constants as C
|
||||
|
||||
from ansible.errors import AnsibleParserError
|
||||
from ansible.module_utils.six import string_types
|
||||
|
||||
from ansible.playbook.attribute import FieldAttribute
|
||||
from ansible.playbook.base import Base
|
||||
from ansible.playbook.become import Become
|
||||
|
@ -132,8 +130,8 @@ class Play(Base, Taggable, Become):
|
|||
# this should never happen, but error out with a helpful message
|
||||
# to the user if it does...
|
||||
if 'remote_user' in ds:
|
||||
raise AnsibleParserError("both 'user' and 'remote_user' are set for %s."
|
||||
" The use of 'user' is deprecated, and should be removed" % self.get_name(), obj=ds)
|
||||
raise AnsibleParserError("both 'user' and 'remote_user' are set for %s. "
|
||||
"The use of 'user' is deprecated, and should be removed" % self.get_name(), obj=ds)
|
||||
|
||||
ds['remote_user'] = ds['user']
|
||||
del ds['user']
|
||||
|
@ -207,14 +205,14 @@ class Play(Base, Taggable, Become):
|
|||
display.deprecated("Using the 'short form' for vars_prompt has been deprecated", version="2.7")
|
||||
for vname, prompt in prompt_data.items():
|
||||
vars_prompts.append(dict(
|
||||
name = vname,
|
||||
prompt = prompt,
|
||||
default = None,
|
||||
private = None,
|
||||
confirm = None,
|
||||
encrypt = None,
|
||||
salt_size = None,
|
||||
salt = None,
|
||||
name=vname,
|
||||
prompt=prompt,
|
||||
default=None,
|
||||
private=None,
|
||||
confirm=None,
|
||||
encrypt=None,
|
||||
salt_size=None,
|
||||
salt=None,
|
||||
))
|
||||
else:
|
||||
vars_prompts.append(prompt_data)
|
||||
|
|
|
@ -54,42 +54,42 @@ except ImportError:
|
|||
# in variable names.
|
||||
|
||||
MAGIC_VARIABLE_MAPPING = dict(
|
||||
connection = ('ansible_connection',),
|
||||
remote_addr = ('ansible_ssh_host', 'ansible_host'),
|
||||
remote_user = ('ansible_ssh_user', 'ansible_user'),
|
||||
port = ('ansible_ssh_port', 'ansible_port'),
|
||||
timeout = ('ansible_ssh_timeout', 'ansible_timeout'),
|
||||
ssh_executable = ('ansible_ssh_executable',),
|
||||
accelerate_port = ('ansible_accelerate_port',),
|
||||
password = ('ansible_ssh_pass', 'ansible_password'),
|
||||
private_key_file = ('ansible_ssh_private_key_file', 'ansible_private_key_file'),
|
||||
pipelining = ('ansible_ssh_pipelining', 'ansible_pipelining'),
|
||||
shell = ('ansible_shell_type',),
|
||||
network_os = ('ansible_network_os',),
|
||||
become = ('ansible_become',),
|
||||
become_method = ('ansible_become_method',),
|
||||
become_user = ('ansible_become_user',),
|
||||
become_pass = ('ansible_become_password','ansible_become_pass'),
|
||||
become_exe = ('ansible_become_exe',),
|
||||
become_flags = ('ansible_become_flags',),
|
||||
ssh_common_args = ('ansible_ssh_common_args',),
|
||||
docker_extra_args= ('ansible_docker_extra_args',),
|
||||
sftp_extra_args = ('ansible_sftp_extra_args',),
|
||||
scp_extra_args = ('ansible_scp_extra_args',),
|
||||
ssh_extra_args = ('ansible_ssh_extra_args',),
|
||||
ssh_transfer_method = ('ansible_ssh_transfer_method',),
|
||||
sudo = ('ansible_sudo',),
|
||||
sudo_user = ('ansible_sudo_user',),
|
||||
sudo_pass = ('ansible_sudo_password', 'ansible_sudo_pass'),
|
||||
sudo_exe = ('ansible_sudo_exe',),
|
||||
sudo_flags = ('ansible_sudo_flags',),
|
||||
su = ('ansible_su',),
|
||||
su_user = ('ansible_su_user',),
|
||||
su_pass = ('ansible_su_password', 'ansible_su_pass'),
|
||||
su_exe = ('ansible_su_exe',),
|
||||
su_flags = ('ansible_su_flags',),
|
||||
executable = ('ansible_shell_executable',),
|
||||
module_compression = ('ansible_module_compression',),
|
||||
connection=('ansible_connection', ),
|
||||
remote_addr=('ansible_ssh_host', 'ansible_host'),
|
||||
remote_user=('ansible_ssh_user', 'ansible_user'),
|
||||
port=('ansible_ssh_port', 'ansible_port'),
|
||||
timeout=('ansible_ssh_timeout', 'ansible_timeout'),
|
||||
ssh_executable=('ansible_ssh_executable', ),
|
||||
accelerate_port=('ansible_accelerate_port', ),
|
||||
password=('ansible_ssh_pass', 'ansible_password'),
|
||||
private_key_file=('ansible_ssh_private_key_file', 'ansible_private_key_file'),
|
||||
pipelining=('ansible_ssh_pipelining', 'ansible_pipelining'),
|
||||
shell=('ansible_shell_type', ),
|
||||
network_os=('ansible_network_os', ),
|
||||
become=('ansible_become', ),
|
||||
become_method=('ansible_become_method', ),
|
||||
become_user=('ansible_become_user', ),
|
||||
become_pass=('ansible_become_password', 'ansible_become_pass'),
|
||||
become_exe=('ansible_become_exe', ),
|
||||
become_flags=('ansible_become_flags', ),
|
||||
ssh_common_args=('ansible_ssh_common_args', ),
|
||||
docker_extra_args=('ansible_docker_extra_args', ),
|
||||
sftp_extra_args=('ansible_sftp_extra_args', ),
|
||||
scp_extra_args=('ansible_scp_extra_args', ),
|
||||
ssh_extra_args=('ansible_ssh_extra_args', ),
|
||||
ssh_transfer_method=('ansible_ssh_transfer_method', ),
|
||||
sudo=('ansible_sudo', ),
|
||||
sudo_user=('ansible_sudo_user', ),
|
||||
sudo_pass=('ansible_sudo_password', 'ansible_sudo_pass'),
|
||||
sudo_exe=('ansible_sudo_exe', ),
|
||||
sudo_flags=('ansible_sudo_flags', ),
|
||||
su=('ansible_su', ),
|
||||
su_user=('ansible_su_user', ),
|
||||
su_pass=('ansible_su_password', 'ansible_su_pass'),
|
||||
su_exe=('ansible_su_exe', ),
|
||||
su_flags=('ansible_su_flags', ),
|
||||
executable=('ansible_shell_executable', ),
|
||||
module_compression=('ansible_module_compression', ),
|
||||
)
|
||||
|
||||
b_SU_PROMPT_LOCALIZATIONS = [
|
||||
|
@ -155,6 +155,7 @@ RESET_VARS = (
|
|||
'ansible_port',
|
||||
)
|
||||
|
||||
|
||||
class PlayContext(Base):
|
||||
|
||||
'''
|
||||
|
@ -180,7 +181,7 @@ class PlayContext(Base):
|
|||
_ssh_extra_args = FieldAttribute(isa='string')
|
||||
_ssh_executable = FieldAttribute(isa='string', default=C.ANSIBLE_SSH_EXECUTABLE)
|
||||
_ssh_transfer_method = FieldAttribute(isa='string', default=C.DEFAULT_SSH_TRANSFER_METHOD)
|
||||
_connection_lockfd= FieldAttribute(isa='int')
|
||||
_connection_lockfd = FieldAttribute(isa='int')
|
||||
_pipelining = FieldAttribute(isa='bool', default=C.ANSIBLE_SSH_PIPELINING)
|
||||
_accelerate = FieldAttribute(isa='bool', default=False)
|
||||
_accelerate_ipv6 = FieldAttribute(isa='bool', default=False, always_post_validate=True)
|
||||
|
@ -227,8 +228,8 @@ class PlayContext(Base):
|
|||
if passwords is None:
|
||||
passwords = {}
|
||||
|
||||
self.password = passwords.get('conn_pass','')
|
||||
self.become_pass = passwords.get('become_pass','')
|
||||
self.password = passwords.get('conn_pass', '')
|
||||
self.become_pass = passwords.get('become_pass', '')
|
||||
|
||||
self.prompt = ''
|
||||
self.success_key = ''
|
||||
|
@ -243,7 +244,6 @@ class PlayContext(Base):
|
|||
if play:
|
||||
self.set_play(play)
|
||||
|
||||
|
||||
def set_play(self, play):
|
||||
'''
|
||||
Configures this connection information instance with data from
|
||||
|
@ -291,10 +291,10 @@ class PlayContext(Base):
|
|||
|
||||
# get ssh options FIXME: make these common to all connections
|
||||
for flag in ['ssh_common_args', 'docker_extra_args', 'sftp_extra_args', 'scp_extra_args', 'ssh_extra_args']:
|
||||
setattr(self, flag, getattr(options,flag, ''))
|
||||
setattr(self, flag, getattr(options, flag, ''))
|
||||
|
||||
# general flags (should we move out?)
|
||||
for flag in ['connection','remote_user', 'private_key_file', 'verbosity', 'force_handlers', 'step', 'start_at_task', 'diff']:
|
||||
for flag in ['connection', 'remote_user', 'private_key_file', 'verbosity', 'force_handlers', 'step', 'start_at_task', 'diff']:
|
||||
attribute = getattr(options, flag, False)
|
||||
if attribute:
|
||||
setattr(self, flag, attribute)
|
||||
|
@ -464,7 +464,6 @@ class PlayContext(Base):
|
|||
if task.check_mode is not None:
|
||||
new_info.check_mode = task.check_mode
|
||||
|
||||
|
||||
return new_info
|
||||
|
||||
def make_become_cmd(self, cmd, executable=None):
|
||||
|
@ -490,18 +489,22 @@ class PlayContext(Base):
|
|||
command = success_cmd
|
||||
|
||||
# set executable to use for the privilege escalation method, with various overrides
|
||||
exe = self.become_exe or \
|
||||
getattr(self, '%s_exe' % self.become_method, None) or \
|
||||
C.DEFAULT_BECOME_EXE or \
|
||||
getattr(C, 'DEFAULT_%s_EXE' % self.become_method.upper(), None) or \
|
||||
exe = (
|
||||
self.become_exe or
|
||||
getattr(self, '%s_exe' % self.become_method, None) or
|
||||
C.DEFAULT_BECOME_EXE or
|
||||
getattr(C, 'DEFAULT_%s_EXE' % self.become_method.upper(), None) or
|
||||
self.become_method
|
||||
)
|
||||
|
||||
# set flags to use for the privilege escalation method, with various overrides
|
||||
flags = self.become_flags or \
|
||||
getattr(self, '%s_flags' % self.become_method, None) or \
|
||||
C.DEFAULT_BECOME_FLAGS or \
|
||||
getattr(C, 'DEFAULT_%s_FLAGS' % self.become_method.upper(), None) or \
|
||||
flags = (
|
||||
self.become_flags or
|
||||
getattr(self, '%s_flags' % self.become_method, None) or
|
||||
C.DEFAULT_BECOME_FLAGS or
|
||||
getattr(C, 'DEFAULT_%s_FLAGS' % self.become_method.upper(), None) or
|
||||
''
|
||||
)
|
||||
|
||||
if self.become_method == 'sudo':
|
||||
# If we have a password, we run sudo with a randomly-generated
|
||||
|
@ -517,11 +520,10 @@ class PlayContext(Base):
|
|||
# force quick error if password is required but not supplied, should prevent sudo hangs.
|
||||
if self.become_pass:
|
||||
prompt = '[sudo via ansible, key=%s] password: ' % randbits
|
||||
becomecmd = '%s %s -p "%s" -u %s %s' % (exe, flags.replace('-n',''), prompt, self.become_user, command)
|
||||
becomecmd = '%s %s -p "%s" -u %s %s' % (exe, flags.replace('-n', ''), prompt, self.become_user, command)
|
||||
else:
|
||||
becomecmd = '%s %s -u %s %s' % (exe, flags, self.become_user, command)
|
||||
|
||||
|
||||
elif self.become_method == 'su':
|
||||
|
||||
# passing code ref to examine prompt as simple string comparisson isn't good enough with su
|
||||
|
@ -537,7 +539,7 @@ class PlayContext(Base):
|
|||
|
||||
elif self.become_method == 'pbrun':
|
||||
|
||||
prompt='Password:'
|
||||
prompt = 'Password:'
|
||||
becomecmd = '%s %s -u %s %s' % (exe, flags, self.become_user, success_cmd)
|
||||
|
||||
elif self.become_method == 'ksu':
|
||||
|
@ -575,7 +577,7 @@ class PlayContext(Base):
|
|||
if self.become_user:
|
||||
flags += ' -u %s ' % self.become_user
|
||||
|
||||
#FIXME: make shell independent
|
||||
# FIXME: make shell independent
|
||||
becomecmd = '%s %s echo %s && %s %s env ANSIBLE=true %s' % (exe, flags, success_key, exe, flags, cmd)
|
||||
|
||||
elif self.become_method == 'dzdo':
|
||||
|
@ -591,7 +593,7 @@ class PlayContext(Base):
|
|||
|
||||
exe = self.become_exe or 'pmrun'
|
||||
|
||||
prompt='Enter UPM user password:'
|
||||
prompt = 'Enter UPM user password:'
|
||||
becomecmd = '%s %s %s' % (exe, flags, shlex_quote(command))
|
||||
|
||||
else:
|
||||
|
@ -645,4 +647,3 @@ class PlayContext(Base):
|
|||
self.connection = conn_type
|
||||
|
||||
return self._attributes['connection']
|
||||
|
||||
|
|
|
@ -113,7 +113,7 @@ class PlaybookInclude(Base, Conditional, Taggable):
|
|||
if isinstance(ds, AnsibleBaseYAMLObject):
|
||||
new_ds.ansible_pos = ds.ansible_pos
|
||||
|
||||
for (k,v) in iteritems(ds):
|
||||
for (k, v) in iteritems(ds):
|
||||
if k == 'include':
|
||||
self._preprocess_include(ds, new_ds, k, v)
|
||||
else:
|
||||
|
@ -152,4 +152,3 @@ class PlaybookInclude(Base, Conditional, Taggable):
|
|||
if 'vars' in new_ds:
|
||||
raise AnsibleParserError("include parameters cannot be mixed with 'vars' entries for include statements", obj=ds)
|
||||
new_ds['vars'] = params
|
||||
|
||||
|
|
|
@ -42,6 +42,7 @@ __all__ = ['Role', 'hash_params']
|
|||
# in a static method. This is also used in the base class for
|
||||
# strategies (ansible/plugins/strategy/__init__.py)
|
||||
|
||||
|
||||
def hash_params(params):
|
||||
"""
|
||||
Construct a data structure of parameters that is hashable.
|
||||
|
@ -209,7 +210,7 @@ class Role(Base, Become, Conditional, Taggable):
|
|||
try:
|
||||
self._task_blocks = load_list_of_blocks(task_data, play=self._play, role=self, loader=self._loader, variable_manager=self._variable_manager)
|
||||
except AssertionError:
|
||||
raise AnsibleParserError("The tasks/main.yml file for role '%s' must contain a list of tasks" % self._role_name , obj=task_data)
|
||||
raise AnsibleParserError("The tasks/main.yml file for role '%s' must contain a list of tasks" % self._role_name, obj=task_data)
|
||||
|
||||
handler_data = self._load_role_yaml('handlers')
|
||||
if handler_data:
|
||||
|
@ -217,7 +218,7 @@ class Role(Base, Become, Conditional, Taggable):
|
|||
self._handler_blocks = load_list_of_blocks(handler_data, play=self._play, role=self, use_handlers=True, loader=self._loader,
|
||||
variable_manager=self._variable_manager)
|
||||
except AssertionError:
|
||||
raise AnsibleParserError("The handlers/main.yml file for role '%s' must contain a list of tasks" % self._role_name , obj=handler_data)
|
||||
raise AnsibleParserError("The handlers/main.yml file for role '%s' must contain a list of tasks" % self._role_name, obj=handler_data)
|
||||
|
||||
# vars and default vars are regular dictionaries
|
||||
self._role_vars = self._load_role_yaml('vars', main=self._from_files.get('vars'))
|
||||
|
@ -239,7 +240,7 @@ class Role(Base, Become, Conditional, Taggable):
|
|||
if self._loader.path_exists(main_file):
|
||||
return self._loader.load_from_file(main_file)
|
||||
elif main is not None:
|
||||
raise AnsibleParserError("Could not find specified file in role: %s/%s" % (subdir,main))
|
||||
raise AnsibleParserError("Could not find specified file in role: %s/%s" % (subdir, main))
|
||||
return None
|
||||
|
||||
def _resolve_main(self, basepath, main=None):
|
||||
|
@ -286,7 +287,6 @@ class Role(Base, Become, Conditional, Taggable):
|
|||
|
||||
return deps
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# other functions
|
||||
|
||||
def add_parent(self, parent_role):
|
||||
|
@ -488,4 +488,3 @@ class Role(Base, Become, Conditional, Taggable):
|
|||
parent.set_loader(loader)
|
||||
for dep in self.get_direct_dependencies():
|
||||
dep.set_loader(loader)
|
||||
|
||||
|
|
|
@ -59,7 +59,7 @@ class RoleDefinition(Base, Become, Conditional, Taggable):
|
|||
self._role_basedir = role_basedir
|
||||
self._role_params = dict()
|
||||
|
||||
#def __repr__(self):
|
||||
# def __repr__(self):
|
||||
# return 'ROLEDEF: ' + self._attributes.get('role', '<no name set>')
|
||||
|
||||
@staticmethod
|
||||
|
@ -205,8 +205,8 @@ class RoleDefinition(Base, Become, Conditional, Taggable):
|
|||
# remember to update it manually.
|
||||
if key not in base_attribute_names or key in ('connection', 'port', 'remote_user'):
|
||||
if key in ('connection', 'port', 'remote_user'):
|
||||
display.deprecated("Using '%s' as a role param has been deprecated. " % key + \
|
||||
"In the future, these values should be entered in the `vars:` " + \
|
||||
display.deprecated("Using '%s' as a role param has been deprecated. " % key +
|
||||
"In the future, these values should be entered in the `vars:` " +
|
||||
"section for roles, but for now we'll store it as both a param and an attribute.", version="2.7")
|
||||
role_def[key] = value
|
||||
# this key does not match a field attribute, so it must be a role param
|
||||
|
|
|
@ -57,4 +57,3 @@ class RoleInclude(RoleDefinition):
|
|||
|
||||
ri = RoleInclude(play=play, role_basedir=current_role_path, variable_manager=variable_manager, loader=loader)
|
||||
return ri.load_data(data, variable_manager=variable_manager, loader=loader)
|
||||
|
||||
|
|
|
@ -103,8 +103,8 @@ class RoleMetadata(Base):
|
|||
|
||||
def serialize(self):
|
||||
return dict(
|
||||
allow_duplicates = self._allow_duplicates,
|
||||
dependencies = self._dependencies,
|
||||
allow_duplicates=self._allow_duplicates,
|
||||
dependencies=self._dependencies,
|
||||
)
|
||||
|
||||
def deserialize(self, data):
|
||||
|
|
|
@ -46,6 +46,7 @@ except ImportError:
|
|||
from ansible.utils.display import Display
|
||||
display = Display()
|
||||
|
||||
|
||||
class RoleRequirement(RoleDefinition):
|
||||
|
||||
"""
|
||||
|
@ -193,7 +194,7 @@ class RoleRequirement(RoleDefinition):
|
|||
raise AnsibleError("error executing: %s" % " ".join(clone_cmd))
|
||||
rc = popen.wait()
|
||||
if rc != 0:
|
||||
raise AnsibleError ("- command %s failed in directory %s (rc=%s)" % (' '.join(clone_cmd), tempdir, rc))
|
||||
raise AnsibleError("- command %s failed in directory %s (rc=%s)" % (' '.join(clone_cmd), tempdir, rc))
|
||||
|
||||
if scm == 'git' and version:
|
||||
checkout_cmd = [scm, 'checkout', version]
|
||||
|
@ -228,4 +229,3 @@ class RoleRequirement(RoleDefinition):
|
|||
|
||||
shutil.rmtree(tempdir, ignore_errors=True)
|
||||
return temp_file.name
|
||||
|
||||
|
|
|
@ -60,7 +60,6 @@ class IncludeRole(Task):
|
|||
self._parent_role = role
|
||||
self._role_name = None
|
||||
|
||||
|
||||
def get_block_list(self, play=None, variable_manager=None, loader=None):
|
||||
|
||||
# only need play passed in when dynamic
|
||||
|
@ -99,7 +98,7 @@ class IncludeRole(Task):
|
|||
|
||||
ir = IncludeRole(block, role, task_include=task_include).load_data(data, variable_manager=variable_manager, loader=loader)
|
||||
|
||||
### Process options
|
||||
# Process options
|
||||
# name is needed, or use role as alias
|
||||
ir._role_name = ir.args.get('name', ir.args.get('role'))
|
||||
if ir._role_name is None:
|
||||
|
@ -107,11 +106,11 @@ class IncludeRole(Task):
|
|||
|
||||
# build options for role includes
|
||||
for key in ['tasks', 'vars', 'defaults']:
|
||||
from_key ='%s_from' % key
|
||||
from_key = '%s_from' % key
|
||||
if ir.args.get(from_key):
|
||||
ir._from_files[key] = basename(ir.args.get(from_key))
|
||||
|
||||
#FIXME: find a way to make this list come from object ( attributes does not work as per below)
|
||||
# FIXME: find a way to make this list come from object ( attributes does not work as per below)
|
||||
# manual list as otherwise the options would set other task parameters we don't want.
|
||||
for option in ['private', 'allow_duplicates']:
|
||||
if option in ir.args:
|
||||
|
|
|
@ -30,7 +30,7 @@ from ansible.template import Templar
|
|||
class Taggable:
|
||||
|
||||
untagged = frozenset(['untagged'])
|
||||
_tags = FieldAttribute(isa='list', default=[], listof=(string_types,int))
|
||||
_tags = FieldAttribute(isa='list', default=[], listof=(string_types, int))
|
||||
|
||||
def __init__(self):
|
||||
super(Taggable, self).__init__()
|
||||
|
@ -41,9 +41,9 @@ class Taggable:
|
|||
elif isinstance(ds, string_types):
|
||||
value = ds.split(',')
|
||||
if isinstance(value, list):
|
||||
return [ x.strip() for x in value ]
|
||||
return [x.strip() for x in value]
|
||||
else:
|
||||
return [ ds ]
|
||||
return [ds]
|
||||
else:
|
||||
raise AnsibleError('tags must be specified as a list', obj=ds)
|
||||
|
||||
|
@ -73,7 +73,7 @@ class Taggable:
|
|||
else:
|
||||
tags = set([tags])
|
||||
else:
|
||||
tags = set([i for i,_ in itertools.groupby(tags)])
|
||||
tags = set([i for i, _ in itertools.groupby(tags)])
|
||||
else:
|
||||
# this makes isdisjoint work for untagged
|
||||
tags = self.untagged
|
||||
|
|
|
@ -125,10 +125,10 @@ class Task(Base, Conditional, Taggable, Become):
|
|||
return ds
|
||||
elif isinstance(ds, dict):
|
||||
buf = ""
|
||||
for (k,v) in iteritems(ds):
|
||||
for (k, v) in iteritems(ds):
|
||||
if k.startswith('_'):
|
||||
continue
|
||||
buf = buf + "%s=%s " % (k,v)
|
||||
buf = buf + "%s=%s " % (k, v)
|
||||
buf = buf.strip()
|
||||
return buf
|
||||
|
||||
|
@ -203,7 +203,7 @@ class Task(Base, Conditional, Taggable, Become):
|
|||
else:
|
||||
new_ds['vars'] = dict()
|
||||
|
||||
for (k,v) in iteritems(ds):
|
||||
for (k, v) in iteritems(ds):
|
||||
if k in ('action', 'local_action', 'args', 'delegate_to') or k == action or k == 'shell':
|
||||
# we don't want to re-assign these values, which were
|
||||
# determined by the ModuleArgsParser() above
|
||||
|
@ -216,9 +216,9 @@ class Task(Base, Conditional, Taggable, Become):
|
|||
# here, and show a deprecation message as we will remove this at
|
||||
# some point in the future.
|
||||
if action == 'include' and k not in self._valid_attrs and k not in self.DEPRECATED_ATTRIBUTES:
|
||||
display.deprecated("Specifying include variables at the top-level of the task is deprecated."
|
||||
" Please see:\nhttp://docs.ansible.com/ansible/playbooks_roles.html#task-include-files-and-encouraging-reuse\n\n"
|
||||
" for currently supported syntax regarding included files and variables", version="2.7")
|
||||
display.deprecated("Specifying include variables at the top-level of the task is deprecated. "
|
||||
"Please see:\nhttp://docs.ansible.com/ansible/playbooks_roles.html#task-include-files-and-encouraging-reuse\n\n "
|
||||
"for currently supported syntax regarding included files and variables", version="2.7")
|
||||
new_ds['vars'][k] = v
|
||||
else:
|
||||
new_ds[k] = v
|
||||
|
|
|
@ -76,4 +76,3 @@ class TaskInclude(Task):
|
|||
del all_vars['when']
|
||||
|
||||
return all_vars
|
||||
|
||||
|
|
|
@ -114,16 +114,16 @@ class PluginLoader:
|
|||
'''
|
||||
|
||||
return dict(
|
||||
class_name = self.class_name,
|
||||
base_class = self.base_class,
|
||||
package = self.package,
|
||||
config = self.config,
|
||||
subdir = self.subdir,
|
||||
aliases = self.aliases,
|
||||
_extra_dirs = self._extra_dirs,
|
||||
_searched_paths = self._searched_paths,
|
||||
PATH_CACHE = PATH_CACHE[self.class_name],
|
||||
PLUGIN_PATH_CACHE = PLUGIN_PATH_CACHE[self.class_name],
|
||||
class_name=self.class_name,
|
||||
base_class=self.base_class,
|
||||
package=self.package,
|
||||
config=self.config,
|
||||
subdir=self.subdir,
|
||||
aliases=self.aliases,
|
||||
_extra_dirs=self._extra_dirs,
|
||||
_searched_paths=self._searched_paths,
|
||||
PATH_CACHE=PATH_CACHE[self.class_name],
|
||||
PLUGIN_PATH_CACHE=PLUGIN_PATH_CACHE[self.class_name],
|
||||
)
|
||||
|
||||
def format_paths(self, paths):
|
||||
|
@ -145,7 +145,7 @@ class PluginLoader:
|
|||
for root, subdirs, files in os.walk(dir, followlinks=True):
|
||||
if '__init__.py' in files:
|
||||
for x in subdirs:
|
||||
results.append(os.path.join(root,x))
|
||||
results.append(os.path.join(root, x))
|
||||
return results
|
||||
|
||||
def _get_package_paths(self, subdirs=True):
|
||||
|
@ -271,7 +271,7 @@ class PluginLoader:
|
|||
|
||||
# HACK: We have no way of executing python byte
|
||||
# compiled files as ansible modules so specifically exclude them
|
||||
### FIXME: I believe this is only correct for modules and
|
||||
# FIXME: I believe this is only correct for modules and
|
||||
# module_utils. For all other plugins we want .pyc and .pyo should
|
||||
# bew valid
|
||||
if full_path.endswith(('.pyc', '.pyo')):
|
||||
|
@ -550,4 +550,3 @@ vars_loader = PluginLoader(
|
|||
C.DEFAULT_VARS_PLUGIN_PATH,
|
||||
'vars_plugins',
|
||||
)
|
||||
|
||||
|
|
|
@ -177,7 +177,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
|||
if self._task.environment is not None:
|
||||
environments = self._task.environment
|
||||
if not isinstance(environments, list):
|
||||
environments = [ environments ]
|
||||
environments = [environments]
|
||||
|
||||
# the environments as inherited need to be reversed, to make
|
||||
# sure we merge in the parent's values first so those in the
|
||||
|
@ -213,7 +213,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
|||
Determines if we are required and can do pipelining
|
||||
'''
|
||||
if self._connection.always_pipeline_modules:
|
||||
return True #eg, winrm
|
||||
return True # eg, winrm
|
||||
|
||||
# any of these require a true
|
||||
for condition in [
|
||||
|
@ -263,16 +263,16 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
|||
if self._play_context.verbosity > 3:
|
||||
output = u'SSH encountered an unknown error. The output was:\n%s%s' % (result['stdout'], result['stderr'])
|
||||
else:
|
||||
output = (u'SSH encountered an unknown error during the connection.'
|
||||
' We recommend you re-run the command using -vvvv, which will enable SSH debugging output to help diagnose the issue')
|
||||
output = (u'SSH encountered an unknown error during the connection. '
|
||||
'We recommend you re-run the command using -vvvv, which will enable SSH debugging output to help diagnose the issue')
|
||||
|
||||
elif u'No space left on device' in result['stderr']:
|
||||
output = result['stderr']
|
||||
else:
|
||||
output = ('Authentication or permission failure.'
|
||||
' In some cases, you may have been able to authenticate and did not have permissions on the target directory.'
|
||||
' Consider changing the remote temp path in ansible.cfg to a path rooted in "/tmp".'
|
||||
' Failed command was: %s, exited with result %d' % (cmd, result['rc']))
|
||||
output = ('Authentication or permission failure. '
|
||||
'In some cases, you may have been able to authenticate and did not have permissions on the target directory. '
|
||||
'Consider changing the remote temp path in ansible.cfg to a path rooted in "/tmp". '
|
||||
'Failed command was: %s, exited with result %d' % (cmd, result['rc']))
|
||||
if 'stdout' in result and result['stdout'] != u'':
|
||||
output = output + u": %s" % result['stdout']
|
||||
raise AnsibleConnectionFailure(output)
|
||||
|
@ -309,8 +309,8 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
|||
|
||||
tmp_rm_data = self._parse_returned_data(tmp_rm_res)
|
||||
if tmp_rm_data.get('rc', 0) != 0:
|
||||
display.warning('Error deleting remote temporary files (rc: {0}, stderr: {1})'.format(tmp_rm_res.get('rc'),
|
||||
tmp_rm_res.get('stderr', 'No error string available.')))
|
||||
display.warning('Error deleting remote temporary files (rc: %s, stderr: %s})'
|
||||
% (tmp_rm_res.get('rc'), tmp_rm_res.get('stderr', 'No error string available.')))
|
||||
|
||||
def _transfer_file(self, local_path, remote_path):
|
||||
self._connection.put_file(local_path, remote_path)
|
||||
|
@ -408,7 +408,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
|||
setfacl_mode = 'r-x'
|
||||
else:
|
||||
chmod_mode = 'rX'
|
||||
### Note: this form fails silently on freebsd. We currently
|
||||
# NOTE: this form fails silently on freebsd. We currently
|
||||
# never call _fixup_perms2() with execute=False but if we
|
||||
# start to we'll have to fix this.
|
||||
setfacl_mode = 'r-X'
|
||||
|
@ -426,22 +426,23 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
|||
res = self._remote_chown(remote_paths, self._play_context.become_user)
|
||||
if res['rc'] != 0 and remote_user == 'root':
|
||||
# chown failed even if remove_user is root
|
||||
raise AnsibleError('Failed to change ownership of the temporary files Ansible needs to create despite connecting as root.'
|
||||
' Unprivileged become user would be unable to read the file.')
|
||||
raise AnsibleError('Failed to change ownership of the temporary files Ansible needs to create despite connecting as root. '
|
||||
'Unprivileged become user would be unable to read the file.')
|
||||
elif res['rc'] != 0:
|
||||
if C.ALLOW_WORLD_READABLE_TMPFILES:
|
||||
# chown and fs acls failed -- do things this insecure
|
||||
# way only if the user opted in in the config file
|
||||
display.warning('Using world-readable permissions for temporary files Ansible needs to create when becoming an unprivileged user.'
|
||||
' This may be insecure. For information on securing this, see'
|
||||
' https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user')
|
||||
display.warning('Using world-readable permissions for temporary files Ansible needs to create when becoming an unprivileged user. '
|
||||
'This may be insecure. For information on securing this, see '
|
||||
'https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user')
|
||||
res = self._remote_chmod(remote_paths, 'a+%s' % chmod_mode)
|
||||
if res['rc'] != 0:
|
||||
raise AnsibleError('Failed to set file mode on remote files (rc: {0}, err: {1})'.format(res['rc'], to_native(res['stderr'])))
|
||||
else:
|
||||
raise AnsibleError('Failed to set permissions on the temporary files Ansible needs to create when becoming an unprivileged user'
|
||||
' (rc: {0}, err: {1}). For information on working around this,'
|
||||
' see https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user'.format(res['rc'], to_native(res['stderr'])))
|
||||
raise AnsibleError('Failed to set permissions on the temporary files Ansible needs to create when becoming an unprivileged user '
|
||||
'(rc: %s, err: %s}). For information on working around this, see '
|
||||
'https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user'
|
||||
% (res['rc'], to_native(res['stderr'])))
|
||||
elif execute:
|
||||
# Can't depend on the file being transferred with execute permissions.
|
||||
# Only need user perms because no become was used here
|
||||
|
@ -479,7 +480,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
|||
'''
|
||||
Get information from remote file.
|
||||
'''
|
||||
module_args=dict(
|
||||
module_args = dict(
|
||||
path=path,
|
||||
follow=follow,
|
||||
get_md5=False,
|
||||
|
@ -611,8 +612,6 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
|||
# give the module the socket for persistent connections
|
||||
module_args['_ansible_socket'] = task_vars.get('ansible_socket')
|
||||
|
||||
|
||||
|
||||
def _execute_module(self, module_name=None, module_args=None, tmp=None, task_vars=None, persist_files=False, delete_remote_tmp=True, wrap_async=False):
|
||||
'''
|
||||
Transfer and run a module along with its arguments.
|
||||
|
@ -641,7 +640,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
|||
if not self._is_pipelining_enabled(module_style, wrap_async):
|
||||
|
||||
# we might need remote tmp dir
|
||||
if not tmp or not 'tmp' in tmp:
|
||||
if not tmp or 'tmp' not in tmp:
|
||||
tmp = self._make_tmp_path()
|
||||
|
||||
remote_module_filename = self._connection._shell.get_remote_filename(module_path)
|
||||
|
@ -661,7 +660,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
|||
# we need to dump the module args to a k=v string in a file on
|
||||
# the remote system, which can be read and parsed by the module
|
||||
args_data = ""
|
||||
for k,v in iteritems(module_args):
|
||||
for k, v in iteritems(module_args):
|
||||
args_data += '%s=%s ' % (k, shlex_quote(text_type(v)))
|
||||
self._transfer_data(args_file_path, args_data)
|
||||
elif module_style in ('non_native_want_json', 'binary'):
|
||||
|
@ -710,7 +709,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
|||
if not self._should_remove_tmp_path(tmp):
|
||||
async_cmd.append("-preserve_tmp")
|
||||
|
||||
cmd= " ".join(to_text(x) for x in async_cmd)
|
||||
cmd = " ".join(to_text(x) for x in async_cmd)
|
||||
|
||||
else:
|
||||
|
||||
|
@ -736,7 +735,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
|||
# Fix permissions of the tmp path and tmp files. This should be called after all files have been transferred.
|
||||
if remote_files:
|
||||
# remove none/empty
|
||||
remote_files = [ x for x in remote_files if x]
|
||||
remote_files = [x for x in remote_files if x]
|
||||
self._fixup_perms2(remote_files, self._play_context.remote_user)
|
||||
|
||||
# actually execute
|
||||
|
@ -745,7 +744,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
|||
# parse the main result
|
||||
data = self._parse_returned_data(res)
|
||||
|
||||
#NOTE: INTERNAL KEYS ONLY ACCESSIBLE HERE
|
||||
# NOTE: INTERNAL KEYS ONLY ACCESSIBLE HERE
|
||||
# get internal info before cleaning
|
||||
tmpdir_delete = (not data.pop("_ansible_suppress_tmpdir_delete", False) and wrap_async)
|
||||
|
||||
|
@ -756,7 +755,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
|||
if (self._play_context.become and self._play_context.become_user != 'root') and not persist_files and delete_remote_tmp or tmpdir_delete:
|
||||
self._remove_tmp_path(tmp)
|
||||
|
||||
#FIXME: for backwards compat, figure out if still makes sense
|
||||
# FIXME: for backwards compat, figure out if still makes sense
|
||||
if wrap_async:
|
||||
data['changed'] = True
|
||||
|
||||
|
@ -784,7 +783,6 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
|||
if key in data and not data[key]:
|
||||
del data[key]
|
||||
|
||||
|
||||
def _clean_returned_data(self, data):
|
||||
remove_keys = set()
|
||||
fact_keys = set(data.keys())
|
||||
|
|
|
@ -22,7 +22,6 @@ __metaclass__ = type
|
|||
from ansible.plugins.action import ActionBase
|
||||
from ansible.plugins.action.net_config import ActionModule as NetActionModule
|
||||
|
||||
|
||||
class ActionModule(NetActionModule, ActionBase):
|
||||
pass
|
||||
|
||||
|
||||
|
|
|
@ -22,5 +22,6 @@ __metaclass__ = type
|
|||
from ansible.plugins.action import ActionBase
|
||||
from ansible.plugins.action.net_template import ActionModule as NetActionModule
|
||||
|
||||
|
||||
class ActionModule(NetActionModule, ActionBase):
|
||||
pass
|
||||
|
|
|
@ -159,7 +159,7 @@ class ActionModule(ActionBase):
|
|||
# fix file permissions when the copy is done as a different user
|
||||
self._fixup_perms2((tmp, remote_path))
|
||||
|
||||
new_module_args.update( dict( src=xfered,))
|
||||
new_module_args.update(dict(src=xfered,))
|
||||
|
||||
res = self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, tmp=tmp, delete_remote_tmp=False)
|
||||
if diff:
|
||||
|
|
|
@ -27,6 +27,7 @@ import urlparse
|
|||
from ansible.module_utils._text import to_text
|
||||
from ansible.plugins.action.eos import ActionModule as _ActionModule
|
||||
|
||||
|
||||
class ActionModule(_ActionModule):
|
||||
|
||||
def run(self, tmp=None, task_vars=None):
|
||||
|
|
|
@ -120,7 +120,7 @@ class ActionModule(ActionBase):
|
|||
target_name = self._play_context.remote_addr
|
||||
dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local)
|
||||
|
||||
dest = dest.replace("//","/")
|
||||
dest = dest.replace("//", "/")
|
||||
|
||||
if remote_checksum in ('0', '1', '2', '3', '4', '5'):
|
||||
result['changed'] = False
|
||||
|
|
|
@ -23,7 +23,7 @@ from ansible.plugins.action import ActionBase
|
|||
class ActionModule(ActionBase):
|
||||
''' Create inventory groups based on variables '''
|
||||
|
||||
### We need to be able to modify the inventory
|
||||
# We need to be able to modify the inventory
|
||||
TRANSFERS_FILES = False
|
||||
|
||||
def run(self, tmp=None, task_vars=None):
|
||||
|
@ -38,7 +38,7 @@ class ActionModule(ActionBase):
|
|||
return result
|
||||
|
||||
group_name = self._task.args.get('key')
|
||||
group_name = group_name.replace(' ','-')
|
||||
group_name = group_name.replace(' ', '-')
|
||||
|
||||
result['changed'] = False
|
||||
result['add_group'] = group_name
|
||||
|
|
|
@ -27,6 +27,7 @@ import urlparse
|
|||
from ansible.module_utils._text import to_text
|
||||
from ansible.plugins.action.ios import ActionModule as _ActionModule
|
||||
|
||||
|
||||
class ActionModule(_ActionModule):
|
||||
|
||||
def run(self, tmp=None, task_vars=None):
|
||||
|
|
|
@ -27,6 +27,7 @@ import urlparse
|
|||
from ansible.module_utils._text import to_text
|
||||
from ansible.plugins.action.iosxr import ActionModule as _ActionModule
|
||||
|
||||
|
||||
class ActionModule(_ActionModule):
|
||||
|
||||
def run(self, tmp=None, task_vars=None):
|
||||
|
@ -100,4 +101,3 @@ class ActionModule(_ActionModule):
|
|||
searchpath.append(os.path.dirname(source))
|
||||
self._templar.environment.loader.searchpath = searchpath
|
||||
self._task.args['src'] = self._templar.template(template_data)
|
||||
|
||||
|
|
|
@ -112,7 +112,7 @@ class ActionModule(_ActionModule):
|
|||
path = unfrackpath("$HOME/.ansible/pc")
|
||||
# use play_context.connection instea of play_context.port to avoid
|
||||
# collision if netconf is listening on port 22
|
||||
#cp = ssh._create_control_path(play_context.remote_addr, play_context.connection, play_context.remote_user)
|
||||
# cp = ssh._create_control_path(play_context.remote_addr, play_context.connection, play_context.remote_user)
|
||||
cp = ssh._create_control_path(play_context.remote_addr, play_context.port, play_context.remote_user)
|
||||
return cp % dict(directory=path)
|
||||
|
||||
|
|
|
@ -27,6 +27,7 @@ import urlparse
|
|||
from ansible.module_utils._text import to_text
|
||||
from ansible.plugins.action.junos import ActionModule as _ActionModule
|
||||
|
||||
|
||||
class ActionModule(_ActionModule):
|
||||
|
||||
def run(self, tmp=None, task_vars=None):
|
||||
|
|
|
@ -27,6 +27,7 @@ import urlparse
|
|||
from ansible.module_utils._text import to_text
|
||||
from ansible.plugins.action.nxos import ActionModule as _ActionModule
|
||||
|
||||
|
||||
class ActionModule(_ActionModule):
|
||||
|
||||
def run(self, tmp=None, task_vars=None):
|
||||
|
|
|
@ -22,7 +22,6 @@ __metaclass__ = type
|
|||
from ansible.plugins.action import ActionBase
|
||||
from ansible.plugins.action.net_config import ActionModule as NetActionModule
|
||||
|
||||
|
||||
class ActionModule(NetActionModule, ActionBase):
|
||||
pass
|
||||
|
||||
|
||||
|
|
|
@ -48,5 +48,3 @@ class ActionModule(NetActionModule, ActionBase):
|
|||
del result['_backup']
|
||||
|
||||
return result
|
||||
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ class ActionModule(ActionBase):
|
|||
tmp_src = self._connection._shell.join_path(tmp, os.path.basename(src))
|
||||
self._transfer_file(src, tmp_src)
|
||||
|
||||
self._fixup_perms2((tmp, tmp_src) )
|
||||
self._fixup_perms2((tmp, tmp_src))
|
||||
|
||||
new_module_args = self._task.args.copy()
|
||||
new_module_args.update(
|
||||
|
|
|
@ -59,13 +59,13 @@ class ActionModule(ActionBase):
|
|||
prompt = None
|
||||
seconds = None
|
||||
result.update(dict(
|
||||
changed = False,
|
||||
rc = 0,
|
||||
stderr = '',
|
||||
stdout = '',
|
||||
start = None,
|
||||
stop = None,
|
||||
delta = None,
|
||||
changed=False,
|
||||
rc=0,
|
||||
stderr='',
|
||||
stdout='',
|
||||
start=None,
|
||||
stop=None,
|
||||
delta=None,
|
||||
))
|
||||
|
||||
# Is 'args' empty, then this is the default prompted pause
|
||||
|
@ -163,7 +163,6 @@ class ActionModule(ActionBase):
|
|||
else:
|
||||
raise AnsibleError('user requested abort!')
|
||||
|
||||
|
||||
except AnsibleTimeoutExceeded:
|
||||
# this is the exception we expect when the alarm signal
|
||||
# fires, so we simply ignore it to move into the cleanup
|
||||
|
|
|
@ -28,7 +28,7 @@ class ActionModule(ActionBase):
|
|||
|
||||
TRANSFERS_FILES = False
|
||||
|
||||
#TODO: document this in non-empty set_stats.py module
|
||||
# TODO: document this in non-empty set_stats.py module
|
||||
def run(self, tmp=None, task_vars=None):
|
||||
if task_vars is None:
|
||||
task_vars = dict()
|
||||
|
|
|
@ -19,14 +19,14 @@
|
|||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import glob
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
import glob
|
||||
|
||||
from ansible.plugins.action.sros import ActionModule as _ActionModule
|
||||
from ansible.module_utils._text import to_text
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlsplit
|
||||
from ansible.module_utils._text import to_text
|
||||
from ansible.plugins.action.sros import ActionModule as _ActionModule
|
||||
from ansible.utils.vars import merge_hash
|
||||
|
||||
PRIVATE_KEYS_RE = re.compile('__.+__')
|
||||
|
@ -110,4 +110,3 @@ class ActionModule(_ActionModule):
|
|||
searchpath.append(os.path.dirname(source))
|
||||
self._templar.environment.loader.searchpath = searchpath
|
||||
self._task.args['src'] = self._templar.template(template_data)
|
||||
|
||||
|
|
|
@ -62,7 +62,7 @@ class ActionModule(ActionBase):
|
|||
return path
|
||||
|
||||
# If using docker, do not add user information
|
||||
if self._remote_transport not in [ 'docker' ] and user:
|
||||
if self._remote_transport not in ['docker'] and user:
|
||||
user_prefix = '%s@' % (user, )
|
||||
|
||||
if self._host_is_ipv6_address(host):
|
||||
|
@ -308,8 +308,7 @@ class ActionModule(ActionBase):
|
|||
src = _tmp_args.get('src', None)
|
||||
dest = _tmp_args.get('dest', None)
|
||||
if src is None or dest is None:
|
||||
return dict(failed=True,
|
||||
msg="synchronize requires both src and dest parameters are set")
|
||||
return dict(failed=True, msg="synchronize requires both src and dest parameters are set")
|
||||
|
||||
if not dest_is_local:
|
||||
# Private key handling
|
||||
|
@ -384,7 +383,7 @@ class ActionModule(ActionBase):
|
|||
|
||||
# If launching synchronize against docker container
|
||||
# use rsync_opts to support container to override rsh options
|
||||
if self._remote_transport in [ 'docker' ]:
|
||||
if self._remote_transport in ['docker']:
|
||||
# Replicate what we do in the module argumentspec handling for lists
|
||||
if not isinstance(_tmp_args.get('rsync_opts'), MutableSequence):
|
||||
tmp_rsync_opts = _tmp_args.get('rsync_opts', [])
|
||||
|
|
|
@ -23,11 +23,12 @@ from ansible import constants as C
|
|||
from ansible.errors import AnsibleError
|
||||
from ansible.module_utils._text import to_bytes, to_native, to_text
|
||||
from ansible.plugins.action import ActionBase
|
||||
from ansible.utils.hashing import checksum_s
|
||||
from ansible.template import generate_ansible_template_vars
|
||||
from ansible.utils.hashing import checksum_s
|
||||
|
||||
boolean = C.mk_boolean
|
||||
|
||||
|
||||
class ActionModule(ActionBase):
|
||||
|
||||
TRANSFERS_FILES = True
|
||||
|
|
26
lib/ansible/plugins/cache/__init__.py
vendored
26
lib/ansible/plugins/cache/__init__.py
vendored
|
@ -86,13 +86,13 @@ class BaseFileCacheModule(BaseCacheModule):
|
|||
self._cache_dir = os.path.expanduser(os.path.expandvars(C.CACHE_PLUGIN_CONNECTION))
|
||||
|
||||
if not self._cache_dir:
|
||||
raise AnsibleError("error, '%s' cache plugin requires the 'fact_caching_connection' config option"
|
||||
" to be set (to a writeable directory path)" % self.plugin_name)
|
||||
raise AnsibleError("error, '%s' cache plugin requires the 'fact_caching_connection' config option "
|
||||
"to be set (to a writeable directory path)" % self.plugin_name)
|
||||
|
||||
if not os.path.exists(self._cache_dir):
|
||||
try:
|
||||
os.makedirs(self._cache_dir)
|
||||
except (OSError,IOError) as e:
|
||||
except (OSError, IOError) as e:
|
||||
raise AnsibleError("error in '%s' cache plugin while trying to create cache dir %s : %s" % (self.plugin_name, self._cache_dir, to_bytes(e)))
|
||||
else:
|
||||
for x in (os.R_OK, os.W_OK, os.X_OK):
|
||||
|
@ -118,12 +118,12 @@ class BaseFileCacheModule(BaseCacheModule):
|
|||
self._cache[key] = value
|
||||
return value
|
||||
except ValueError as e:
|
||||
display.warning("error in '%s' cache plugin while trying to read %s : %s."
|
||||
" Most likely a corrupt file, so erasing and failing." % (self.plugin_name, cachefile, to_bytes(e)))
|
||||
display.warning("error in '%s' cache plugin while trying to read %s : %s. "
|
||||
"Most likely a corrupt file, so erasing and failing." % (self.plugin_name, cachefile, to_bytes(e)))
|
||||
self.delete(key)
|
||||
raise AnsibleError("The cache file %s was corrupt, or did not otherwise contain valid data."
|
||||
" It has been removed, so you can re-run your command now." % cachefile)
|
||||
except (OSError,IOError) as e:
|
||||
raise AnsibleError("The cache file %s was corrupt, or did not otherwise contain valid data. "
|
||||
"It has been removed, so you can re-run your command now." % cachefile)
|
||||
except (OSError, IOError) as e:
|
||||
display.warning("error in '%s' cache plugin while trying to read %s : %s" % (self.plugin_name, cachefile, to_bytes(e)))
|
||||
raise KeyError
|
||||
except Exception as e:
|
||||
|
@ -136,7 +136,7 @@ class BaseFileCacheModule(BaseCacheModule):
|
|||
cachefile = "%s/%s" % (self._cache_dir, key)
|
||||
try:
|
||||
self._dump(value, cachefile)
|
||||
except (OSError,IOError) as e:
|
||||
except (OSError, IOError) as e:
|
||||
display.warning("error in '%s' cache plugin while trying to write to %s : %s" % (self.plugin_name, cachefile, to_bytes(e)))
|
||||
|
||||
def has_expired(self, key):
|
||||
|
@ -147,7 +147,7 @@ class BaseFileCacheModule(BaseCacheModule):
|
|||
cachefile = "%s/%s" % (self._cache_dir, key)
|
||||
try:
|
||||
st = os.stat(cachefile)
|
||||
except (OSError,IOError) as e:
|
||||
except (OSError, IOError) as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
return False
|
||||
else:
|
||||
|
@ -179,7 +179,7 @@ class BaseFileCacheModule(BaseCacheModule):
|
|||
try:
|
||||
os.stat(cachefile)
|
||||
return True
|
||||
except (OSError,IOError) as e:
|
||||
except (OSError, IOError) as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
return False
|
||||
else:
|
||||
|
@ -194,7 +194,7 @@ class BaseFileCacheModule(BaseCacheModule):
|
|||
try:
|
||||
os.remove("%s/%s" % (self._cache_dir, key))
|
||||
except (OSError, IOError):
|
||||
pass #TODO: only pass on non existing?
|
||||
pass # TODO: only pass on non existing?
|
||||
|
||||
def flush(self):
|
||||
self._cache = {}
|
||||
|
@ -236,6 +236,7 @@ class BaseFileCacheModule(BaseCacheModule):
|
|||
"""
|
||||
pass
|
||||
|
||||
|
||||
class FactCache(MutableMapping):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
@ -247,7 +248,6 @@ class FactCache(MutableMapping):
|
|||
# Backwards compat: self._display isn't really needed, just import the global display and use that.
|
||||
self._display = display
|
||||
|
||||
|
||||
def __getitem__(self, key):
|
||||
if not self._plugin.contains(key):
|
||||
raise KeyError
|
||||
|
|
1
lib/ansible/plugins/cache/base.py
vendored
1
lib/ansible/plugins/cache/base.py
vendored
|
@ -19,4 +19,3 @@ __metaclass__ = type
|
|||
|
||||
# moved actual classes to __init__ kept here for backward compat with 3rd parties
|
||||
from ansible.plugins.cache import BaseCacheModule, BaseFileCacheModule
|
||||
|
||||
|
|
1
lib/ansible/plugins/cache/jsonfile.py
vendored
1
lib/ansible/plugins/cache/jsonfile.py
vendored
|
@ -37,6 +37,7 @@ except ImportError:
|
|||
from ansible.parsing.utils.jsonify import jsonify
|
||||
from ansible.plugins.cache import BaseFileCacheModule
|
||||
|
||||
|
||||
class CacheModule(BaseFileCacheModule):
|
||||
"""
|
||||
A caching module backed by json files.
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue