Collated PEP8 fixes (#25293)

- Make PEP8 compliant
This commit is contained in:
Dag Wieers 2017-06-02 13:14:11 +02:00 committed by John R Barker
commit 5553b20828
206 changed files with 1853 additions and 1870 deletions

View file

@ -45,8 +45,8 @@ except LookupError:
_COMPOSED_ERROR_HANDLERS = frozenset((None, 'surrogate_or_escape',
'surrogate_or_strict',
'surrogate_then_replace'))
'surrogate_or_strict',
'surrogate_then_replace'))
def to_bytes(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):

View file

@ -44,6 +44,7 @@ AXAPI_VPORT_PROTOCOLS = {
'https': 12,
}
def a10_argument_spec():
return dict(
host=dict(type='str', required=True),
@ -52,11 +53,13 @@ def a10_argument_spec():
write_config=dict(type='bool', default=False)
)
def axapi_failure(result):
if 'response' in result and result['response'].get('status') == 'fail':
return True
return False
def axapi_call(module, url, post=None):
'''
Returns a datastructure based on the result of the API call
@ -81,6 +84,7 @@ def axapi_call(module, url, post=None):
rsp.close()
return data
def axapi_authenticate(module, base_url, username, password):
url = '%s&method=authenticate&username=%s&password=%s' % (base_url, username, password)
result = axapi_call(module, url)
@ -89,6 +93,7 @@ def axapi_authenticate(module, base_url, username, password):
sessid = result['session_id']
return base_url + '&session_id=' + sessid
def axapi_authenticate_v3(module, base_url, username, password):
url = base_url
auth_payload = {"credentials": {"username": username, "password": password}}
@ -98,6 +103,7 @@ def axapi_authenticate_v3(module, base_url, username, password):
signature = result['authresponse']['signature']
return signature
def axapi_call_v3(module, url, method=None, body=None, signature=None):
'''
Returns a datastructure based on the result of the API call
@ -126,6 +132,7 @@ def axapi_call_v3(module, url, method=None, body=None, signature=None):
rsp.close()
return data
def axapi_enabled_disabled(flag):
'''
The axapi uses 0/1 integer values for flags, rather than strings
@ -137,8 +144,10 @@ def axapi_enabled_disabled(flag):
else:
return 0
def axapi_get_port_protocol(protocol):
return AXAPI_PORT_PROTOCOLS.get(protocol.lower(), None)
def axapi_get_vport_protocol(protocol):
return AXAPI_VPORT_PROTOCOLS.get(protocol.lower(), None)

View file

@ -79,9 +79,9 @@ def tower_check_mode(module):
def tower_argument_spec():
return dict(
tower_host = dict(),
tower_username = dict(),
tower_password = dict(no_log=True),
tower_verify_ssl = dict(type='bool', default=True),
tower_config_file = dict(type='path'),
tower_host=dict(),
tower_username=dict(),
tower_password=dict(no_log=True),
tower_verify_ssl=dict(type='bool', default=True),
tower_config_file=dict(type='path'),
)

View file

@ -37,8 +37,8 @@ from ansible.module_utils.aos import *
"""
import json
from distutils.version import LooseVersion
from ansible.module_utils.pycompat24 import get_exception
from distutils.version import LooseVersion
try:
import yaml
@ -53,6 +53,7 @@ try:
except ImportError:
HAS_AOS_PYEZ = False
def check_aos_version(module, min=False):
"""
Check if the library aos-pyez is present.
@ -71,6 +72,7 @@ def check_aos_version(module, min=False):
return True
def get_aos_session(module, auth):
"""
Resume an existing session and return an AOS object.
@ -94,6 +96,7 @@ def get_aos_session(module, auth):
return aos
def find_collection_item(collection, item_name=False, item_id=False):
"""
Find collection_item based on name or id from a collection object
@ -114,6 +117,7 @@ def find_collection_item(collection, item_name=False, item_id=False):
else:
return my_dict
def content_to_dict(module, content):
"""
Convert 'content' into a Python Dict based on 'content_format'
@ -144,12 +148,12 @@ def content_to_dict(module, content):
except:
module.fail_json(msg="Unable to convert 'content' to a dict, please check if valid")
# replace the string with the dict
module.params['content'] = content_dict
return content_dict
def do_load_resource(module, collection, name):
"""
Create a new object (collection.item) by loading a datastructure directly
@ -161,10 +165,7 @@ def do_load_resource(module, collection, name):
module.fail_json(msg="An error occurred while running 'find_collection_item'")
if item.exists:
module.exit_json( changed=False,
name=item.name,
id=item.id,
value=item.value )
module.exit_json(changed=False, name=item.name, id=item.id, value=item.value)
# If not in check mode, apply the changes
if not module.check_mode:
@ -175,7 +176,4 @@ def do_load_resource(module, collection, name):
e = get_exception()
module.fail_json(msg="Unable to write item content : %r" % e)
module.exit_json( changed=True,
name=item.name,
id=item.id,
value=item.value )
module.exit_json(changed=True, name=item.name, id=item.id, value=item.value)

View file

@ -40,6 +40,7 @@ The 'api' module provides the following common argument specs:
"""
import time
def rate_limit_argument_spec(spec=None):
"""Creates an argument spec for working with rate limiting"""
arg_spec = (dict(
@ -50,6 +51,7 @@ def rate_limit_argument_spec(spec=None):
arg_spec.update(spec)
return arg_spec
def retry_argument_spec(spec=None):
"""Creates an argument spec for working with retrying"""
arg_spec = (dict(
@ -60,41 +62,48 @@ def retry_argument_spec(spec=None):
arg_spec.update(spec)
return arg_spec
def basic_auth_argument_spec(spec=None):
arg_spec = (dict(
api_username=dict(type='str', required=False),
api_password=dict(type='str', required=False, no_log=True),
api_url=dict(type='str', required=False),
api_username=dict(type='str'),
api_password=dict(type='str', no_log=True),
api_url=dict(type='str'),
validate_certs=dict(type='bool', default=True)
))
if spec:
arg_spec.update(spec)
return arg_spec
def rate_limit(rate=None, rate_limit=None):
"""rate limiting decorator"""
minrate = None
if rate is not None and rate_limit is not None:
minrate = float(rate_limit) / float(rate)
def wrapper(f):
last = [0.0]
def ratelimited(*args,**kwargs):
def ratelimited(*args, **kwargs):
if minrate is not None:
elapsed = time.clock() - last[0]
left = minrate - elapsed
if left > 0:
time.sleep(left)
last[0] = time.clock()
ret = f(*args,**kwargs)
ret = f(*args, **kwargs)
return ret
return ratelimited
return wrapper
def retry(retries=None, retry_pause=1):
"""Retry decorator"""
def wrapper(f):
retry_count = 0
def retried(*args,**kwargs):
def retried(*args, **kwargs):
if retries is not None:
ret = None
while True:
@ -102,13 +111,13 @@ def retry(retries=None, retry_pause=1):
if retry_count >= retries:
raise Exception("Retry limit exceeded: %d" % retries)
try:
ret = f(*args,**kwargs)
ret = f(*args, **kwargs)
except:
pass
if ret:
break
time.sleep(retry_pause)
return ret
return retried
return wrapper

View file

@ -25,7 +25,6 @@
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# This module initially matched the namespace of network module avi. However,
# that causes namespace import error when other modules from avi namespaces
@ -40,8 +39,7 @@ HAS_AVI = True
try:
import avi.sdk
sdk_version = getattr(avi.sdk, '__version__', None)
if ((sdk_version is None) or (sdk_version and
(parse_version(sdk_version) < parse_version('17.1')))):
if ((sdk_version is None) or (sdk_version and (parse_version(sdk_version) < parse_version('17.1')))):
# It allows the __version__ to be '' as that value is used in development builds
raise ImportError
from avi.sdk.utils.ansible_utils import avi_ansible_api

View file

@ -1,4 +1,3 @@
#
# Copyright (c) 2016 Matt Davis, <mdavis@ansible.com>
# Chris Houseknecht, <house@redhat.com>
#
@ -16,7 +15,6 @@
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import json
import os
@ -102,6 +100,7 @@ except ImportError as exc:
HAS_AZURE_EXC = exc
HAS_AZURE = False
def azure_id_to_dict(id):
pieces = re.sub(r'^\/', '', id).split('/')
result = {}
@ -121,6 +120,7 @@ AZURE_EXPECTED_VERSIONS = dict(
AZURE_MIN_RELEASE = '2.0.0rc5'
class AzureRMModuleBase(object):
def __init__(self, derived_arg_spec, bypass_checks=False, no_log=False,
@ -202,7 +202,7 @@ class AzureRMModuleBase(object):
if Version(client_version) < Version(expected_version):
self.fail("Installed {0} client version is {1}. The supported version is {2}. Try "
"`pip install azure>={3} --upgrade`".format(client_name, client_version, expected_version,
AZURE_MIN_RELEASE))
AZURE_MIN_RELEASE))
def exec_module(self, **kwargs):
self.fail("Error: {0} failed to implement exec_module method.".format(self.__class__.__name__))
@ -220,11 +220,11 @@ class AzureRMModuleBase(object):
def log(self, msg, pretty_print=False):
pass
# Use only during module development
#if self.debug:
# log_file = open('azure_rm.log', 'a')
# if pretty_print:
# if self.debug:
# log_file = open('azure_rm.log', 'a')
# if pretty_print:
# log_file.write(json.dumps(msg, indent=4, sort_keys=True))
# else:
# else:
# log_file.write(msg + u'\n')
def validate_tags(self, tags):

View file

@ -32,7 +32,17 @@ BOOLEANS_TRUE = ['y', 'yes', 'on', '1', 'true', 1, True]
BOOLEANS_FALSE = ['n', 'no', 'off', '0', 'false', 0, False]
BOOLEANS = BOOLEANS_TRUE + BOOLEANS_FALSE
SIZE_RANGES = { 'Y': 1<<80, 'Z': 1<<70, 'E': 1<<60, 'P': 1<<50, 'T': 1<<40, 'G': 1<<30, 'M': 1<<20, 'K': 1<<10, 'B': 1 }
SIZE_RANGES = {
'Y': 1 << 80,
'Z': 1 << 70,
'E': 1 << 60,
'P': 1 << 50,
'T': 1 << 40,
'G': 1 << 30,
'M': 1 << 20,
'K': 1 << 10,
'B': 1,
}
FILE_ATTRIBUTES = {
'A': 'noatime',
@ -83,9 +93,9 @@ from itertools import repeat, chain
try:
import syslog
HAS_SYSLOG=True
HAS_SYSLOG = True
except ImportError:
HAS_SYSLOG=False
HAS_SYSLOG = False
try:
from systemd import journal
@ -93,10 +103,10 @@ try:
except ImportError:
has_journal = False
HAVE_SELINUX=False
HAVE_SELINUX = False
try:
import selinux
HAVE_SELINUX=True
HAVE_SELINUX = True
except ImportError:
pass
@ -161,8 +171,16 @@ except ImportError:
pass
from ansible.module_utils.pycompat24 import get_exception, literal_eval
from ansible.module_utils.six import (PY2, PY3, b, binary_type, integer_types,
iteritems, text_type, string_types)
from ansible.module_utils.six import (
PY2,
PY3,
b,
binary_type,
integer_types,
iteritems,
string_types,
text_type,
)
from ansible.module_utils.six.moves import map, reduce, shlex_quote
from ansible.module_utils._text import to_native, to_bytes, to_text
@ -208,33 +226,33 @@ _literal_eval = literal_eval
# is an internal implementation detail
_ANSIBLE_ARGS = None
FILE_COMMON_ARGUMENTS=dict(
src = dict(),
mode = dict(type='raw'),
owner = dict(),
group = dict(),
seuser = dict(),
serole = dict(),
selevel = dict(),
setype = dict(),
follow = dict(type='bool', default=False),
FILE_COMMON_ARGUMENTS = dict(
src=dict(),
mode=dict(type='raw'),
owner=dict(),
group=dict(),
seuser=dict(),
serole=dict(),
selevel=dict(),
setype=dict(),
follow=dict(type='bool', default=False),
# not taken by the file module, but other modules call file so it must ignore them.
content = dict(no_log=True),
backup = dict(),
force = dict(),
remote_src = dict(), # used by assemble
regexp = dict(), # used by assemble
delimiter = dict(), # used by assemble
directory_mode = dict(), # used by copy
unsafe_writes = dict(type='bool'), # should be available to any module using atomic_move
attributes = dict(aliases=['attr']),
content=dict(no_log=True),
backup=dict(),
force=dict(),
remote_src=dict(), # used by assemble
regexp=dict(), # used by assemble
delimiter=dict(), # used by assemble
directory_mode=dict(), # used by copy
unsafe_writes=dict(type='bool'), # should be available to any module using atomic_move
attributes=dict(aliases=['attr']),
)
PASSWD_ARG_RE = re.compile(r'^[-]{0,2}pass[-]?(word|wd)?')
# Can't use 07777 on Python 3, can't use 0o7777 on Python 2.4
PERM_BITS = int('07777', 8) # file mode permission bits
EXEC_PERM_BITS = int('00111', 8) # execute permission bits
EXEC_PERM_BITS = int('00111', 8) # execute permission bits
DEFAULT_PERM = int('0666', 8) # default file permission bits
@ -242,11 +260,12 @@ def get_platform():
''' what's the platform? example: Linux is a platform. '''
return platform.system()
def get_distribution():
''' return the distribution name '''
if platform.system() == 'Linux':
try:
supported_dists = platform._supported_dists + ('arch','alpine')
supported_dists = platform._supported_dists + ('arch', 'alpine')
distribution = platform.linux_distribution(supported_dists=supported_dists)[0].capitalize()
if not distribution and os.path.isfile('/etc/system-release'):
distribution = platform.linux_distribution(supported_dists=['system'])[0].capitalize()
@ -261,6 +280,7 @@ def get_distribution():
distribution = None
return distribution
def get_distribution_version():
''' return the distribution version '''
if platform.system() == 'Linux':
@ -275,6 +295,7 @@ def get_distribution_version():
distribution_version = None
return distribution_version
def get_all_subclasses(cls):
'''
used by modules like Hardware or Network fact classes to retrieve all subclasses of a given class.
@ -338,6 +359,7 @@ def json_dict_unicode_to_bytes(d, encoding='utf-8', errors='surrogate_or_strict'
else:
return d
def json_dict_bytes_to_unicode(d, encoding='utf-8', errors='surrogate_or_strict'):
''' Recursively convert dict keys and values to byte str
@ -357,6 +379,7 @@ def json_dict_bytes_to_unicode(d, encoding='utf-8', errors='surrogate_or_strict'
else:
return d
def return_values(obj):
""" Return native stringified values from datastructures.
@ -381,6 +404,7 @@ def return_values(obj):
else:
raise TypeError('Unknown parameter type: %s, %s' % (type(obj), obj))
def remove_values(value, no_log_strings):
""" Remove strings in no_log_strings from value. If value is a container
type, then remove a lot more"""
@ -489,6 +513,7 @@ def heuristic_log_sanitize(data, no_log_values=None):
output = remove_values(output, no_log_values)
return output
def bytes_to_human(size, isbits=False, unit=None):
base = 'Bytes'
@ -505,7 +530,8 @@ def bytes_to_human(size, isbits=False, unit=None):
else:
suffix = base
return '%.2f %s' % (float(size)/ limit, suffix)
return '%.2f %s' % (float(size) / limit, suffix)
def human_to_bytes(number, default_unit=None, isbits=False):
@ -555,6 +581,7 @@ def human_to_bytes(number, default_unit=None, isbits=False):
return int(round(num * limit))
def is_executable(path):
'''is the given path executable?
@ -568,6 +595,7 @@ def is_executable(path):
# execute bits are set.
return ((stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) & os.stat(path)[stat.ST_MODE])
def _load_params():
''' read the modules parameters and store them globally.
@ -623,6 +651,7 @@ def _load_params():
'"failed": true}')
sys.exit(1)
def env_fallback(*args, **kwargs):
''' Load value from environment '''
for arg in args:
@ -631,6 +660,7 @@ def env_fallback(*args, **kwargs):
else:
raise AnsibleFallbackNotFound
def _lenient_lowercase(lst):
"""Lowercase elements of a list.
@ -644,6 +674,7 @@ def _lenient_lowercase(lst):
lowered.append(value)
return lowered
def format_attributes(attributes):
attribute_list = []
for attr in attributes:
@ -651,13 +682,15 @@ def format_attributes(attributes):
attribute_list.append(FILE_ATTRIBUTES[attr])
return attribute_list
def get_flags_from_attributes(attributes):
flags = []
for key,attr in FILE_ATTRIBUTES.items():
for key, attr in FILE_ATTRIBUTES.items():
if attr in attributes:
flags.append(key)
return ''.join(flags)
class AnsibleFallbackNotFound(Exception):
pass
@ -674,7 +707,7 @@ class AnsibleModule(object):
see library/* for examples
'''
self._name = os.path.basename(__file__) #initialize name until we can parse from options
self._name = os.path.basename(__file__) # initialize name until we can parse from options
self.argument_spec = argument_spec
self.supports_check_mode = supports_check_mode
self.check_mode = False
@ -808,15 +841,15 @@ class AnsibleModule(object):
b_path = os.path.realpath(b_path)
path = to_native(b_path)
mode = params.get('mode', None)
owner = params.get('owner', None)
group = params.get('group', None)
mode = params.get('mode', None)
owner = params.get('owner', None)
group = params.get('group', None)
# selinux related options
seuser = params.get('seuser', None)
serole = params.get('serole', None)
setype = params.get('setype', None)
selevel = params.get('selevel', None)
seuser = params.get('seuser', None)
serole = params.get('serole', None)
setype = params.get('setype', None)
selevel = params.get('selevel', None)
secontext = [seuser, serole, setype]
if self.selinux_mls_enabled():
@ -834,7 +867,6 @@ class AnsibleModule(object):
selevel=selevel, secontext=secontext, attributes=attributes,
)
# Detect whether using selinux that is MLS-aware.
# While this means you can set the level/range with
# selinux.lsetfilecon(), it may or may not mean that you
@ -853,7 +885,7 @@ class AnsibleModule(object):
if not HAVE_SELINUX:
seenabled = self.get_bin_path('selinuxenabled')
if seenabled is not None:
(rc,out,err) = self.run_command(seenabled)
(rc, out, err) = self.run_command(seenabled)
if rc == 0:
self.fail_json(msg="Aborting, target uses selinux but python bindings (libselinux-python) aren't installed!")
return False
@ -1127,7 +1159,7 @@ class AnsibleModule(object):
e = get_exception()
if os.path.islink(b_path) and e.errno == errno.EPERM: # Can't set mode on symbolic links
pass
elif e.errno in (errno.ENOENT, errno.ELOOP): # Can't set mode on broken symbolic links
elif e.errno in (errno.ENOENT, errno.ELOOP): # Can't set mode on broken symbolic links
pass
else:
raise e
@ -1154,7 +1186,7 @@ class AnsibleModule(object):
existing = self.get_file_attributes(b_path)
if existing.get('attr_flags','') != attributes:
if existing.get('attr_flags', '') != attributes:
attrcmd = self.get_bin_path('chattr')
if attrcmd:
attrcmd = [attrcmd, '=%s' % attributes, b_path]
@ -1187,14 +1219,13 @@ class AnsibleModule(object):
rc, out, err = self.run_command(attrcmd)
if rc == 0:
res = out.split(' ')[0:2]
output['attr_flags'] = res[1].replace('-','').strip()
output['attr_flags'] = res[1].replace('-', '').strip()
output['version'] = res[0].strip()
output['attributes'] = format_attributes(output['attr_flags'])
except:
pass
return output
def _symbolic_mode_to_octal(self, path_stat, symbolic_mode):
new_mode = stat.S_IMODE(path_stat.st_mode)
@ -1217,7 +1248,7 @@ class AnsibleModule(object):
return new_mode
def _apply_operation_to_mode(self, user, operator, mode_to_apply, current_mode):
if operator == '=':
if operator == '=':
if user == 'u':
mask = stat.S_IRWXU | stat.S_ISUID
elif user == 'g':
@ -1247,13 +1278,13 @@ class AnsibleModule(object):
X_perms = {
'u': {'X': stat.S_IXUSR},
'g': {'X': stat.S_IXGRP},
'o': {'X': stat.S_IXOTH}
'o': {'X': stat.S_IXOTH},
}
else:
X_perms = {
'u': {'X': 0},
'g': {'X': 0},
'o': {'X': 0}
'o': {'X': 0},
}
user_perms_to_modes = {
@ -1265,7 +1296,8 @@ class AnsibleModule(object):
't': 0,
'u': prev_mode & stat.S_IRWXU,
'g': (prev_mode & stat.S_IRWXG) << 3,
'o': (prev_mode & stat.S_IRWXO) << 6 },
'o': (prev_mode & stat.S_IRWXO) << 6,
},
'g': {
'r': stat.S_IRGRP,
'w': stat.S_IWGRP,
@ -1274,7 +1306,8 @@ class AnsibleModule(object):
't': 0,
'u': (prev_mode & stat.S_IRWXU) >> 3,
'g': prev_mode & stat.S_IRWXG,
'o': (prev_mode & stat.S_IRWXO) << 3 },
'o': (prev_mode & stat.S_IRWXO) << 3,
},
'o': {
'r': stat.S_IROTH,
'w': stat.S_IWOTH,
@ -1283,14 +1316,17 @@ class AnsibleModule(object):
't': stat.S_ISVTX,
'u': (prev_mode & stat.S_IRWXU) >> 6,
'g': (prev_mode & stat.S_IRWXG) >> 3,
'o': prev_mode & stat.S_IRWXO }
'o': prev_mode & stat.S_IRWXO,
}
}
# Insert X_perms into user_perms_to_modes
for key, value in X_perms.items():
user_perms_to_modes[key].update(value)
or_reduce = lambda mode, perm: mode | user_perms_to_modes[user][perm]
def or_reduce(mode, perm):
return mode | user_perms_to_modes[user][perm]
return reduce(or_reduce, perms, 0)
def set_fs_attributes_if_different(self, file_args, changed, diff=None, expand=True):
@ -1383,10 +1419,10 @@ class AnsibleModule(object):
def _handle_aliases(self, spec=None):
# this uses exceptions as it happens before we can safely call fail_json
aliases_results = {} #alias:canon
aliases_results = {} # alias:canon
if spec is None:
spec = self.argument_spec
for (k,v) in spec.items():
for (k, v) in spec.items():
self._legal_inputs.append(k)
aliases = v.get('aliases', None)
default = v.get('default', None)
@ -1409,7 +1445,7 @@ class AnsibleModule(object):
def _check_arguments(self, check_invalid_arguments):
self._syslog_facility = 'LOG_USER'
unsupported_parameters = set()
for (k,v) in list(self.params.items()):
for (k, v) in list(self.params.items()):
if k == '_ansible_check_mode' and v:
self.check_mode = True
@ -1444,7 +1480,7 @@ class AnsibleModule(object):
elif check_invalid_arguments and k not in self._legal_inputs:
unsupported_parameters.add(k)
#clean up internal params:
# clean up internal params:
if k.startswith('_ansible_'):
del self.params[k]
@ -1482,20 +1518,20 @@ class AnsibleModule(object):
if spec is None:
return
for check in spec:
counts = [ self._count_terms([field]) for field in check ]
non_zero = [ c for c in counts if c > 0 ]
counts = [self._count_terms([field]) for field in check]
non_zero = [c for c in counts if c > 0]
if len(non_zero) > 0:
if 0 in counts:
self.fail_json(msg="parameters are required together: %s" % (check,))
def _check_required_arguments(self, spec=None, param=None ):
def _check_required_arguments(self, spec=None, param=None):
''' ensure all required arguments are present '''
missing = []
if spec is None:
spec = self.argument_spec
if param is None:
param = self.params
for (k,v) in spec.items():
for (k, v) in spec.items():
required = v.get('required', False)
if required and k not in param:
missing.append(k)
@ -1534,8 +1570,8 @@ class AnsibleModule(object):
spec = self.argument_spec
if param is None:
param = self.params
for (k,v) in spec.items():
choices = v.get('choices',None)
for (k, v) in spec.items():
choices = v.get('choices', None)
if choices is None:
continue
if isinstance(choices, SEQUENCETYPE) and not isinstance(choices, (binary_type, text_type)):
@ -1561,8 +1597,8 @@ class AnsibleModule(object):
(param[k],) = overlap
if param[k] not in choices:
choices_str=",".join([to_native(c) for c in choices])
msg="value of %s must be one of: %s, got: %s" % (k, choices_str, param[k])
choices_str = ",".join([to_native(c) for c in choices])
msg = "value of %s must be one of: %s, got: %s" % (k, choices_str, param[k])
self.fail_json(msg=msg)
else:
self.fail_json(msg="internal error: choices for argument %s are not iterable: %s" % (k, choices))
@ -1610,7 +1646,7 @@ class AnsibleModule(object):
if isinstance(value, string_types):
return value.split(",")
elif isinstance(value, int) or isinstance(value, float):
return [ str(value) ]
return [str(value)]
raise TypeError('%s cannot be converted to a list' % type(value))
@ -1703,14 +1739,12 @@ class AnsibleModule(object):
def _check_type_raw(self, value):
return value
def _check_type_bytes(self, value):
try:
self.human_to_bytes(value)
except ValueError:
raise TypeError('%s cannot be converted to a Byte value' % type(value))
def _check_type_bits(self, value):
try:
self.human_to_bytes(value, isbits=True)
@ -1761,7 +1795,7 @@ class AnsibleModule(object):
self._check_argument_values(spec, param[k])
def _set_defaults(self, pre=True):
for (k,v) in self.argument_spec.items():
for (k, v) in self.argument_spec.items():
default = v.get('default', None)
if pre is True:
# this prevents setting defaults on required items
@ -1773,7 +1807,7 @@ class AnsibleModule(object):
self.params[k] = default
def _set_fallbacks(self):
for k,v in self.argument_spec.items():
for (k, v) in self.argument_spec.items():
fallback = v.get('fallback', (None,))
fallback_strategy = fallback[0]
fallback_args = []
@ -1856,16 +1890,14 @@ class AnsibleModule(object):
log_args = dict()
for param in self.params:
canon = self.aliases.get(param, param)
canon = self.aliases.get(param, param)
arg_opts = self.argument_spec.get(canon, {})
no_log = arg_opts.get('no_log', False)
if self.boolean(no_log):
log_args[param] = 'NOT_LOGGING_PARAMETER'
# try to capture all passwords/passphrase named fields missed by no_log
elif PASSWORD_MATCH.search(param) and \
arg_opts.get('type', 'str') != 'bool' and \
not arg_opts.get('choices', False):
elif PASSWORD_MATCH.search(param) and arg_opts.get('type', 'str') != 'bool' and not arg_opts.get('choices', False):
# skip boolean and enums as they are about 'password' state
log_args[param] = 'NOT_LOGGING_PASSWORD'
self.warn('Module did not set no_log for %s' % param)
@ -1885,11 +1917,10 @@ class AnsibleModule(object):
self.log(msg, log_args=log_args)
def _set_cwd(self):
try:
cwd = os.getcwd()
if not os.access(cwd, os.F_OK|os.R_OK):
if not os.access(cwd, os.F_OK | os.R_OK):
raise Exception()
return cwd
except:
@ -1897,7 +1928,7 @@ class AnsibleModule(object):
# Try and move to a neutral location to prevent errors
for cwd in [os.path.expandvars('$HOME'), tempfile.gettempdir()]:
try:
if os.access(cwd, os.F_OK|os.R_OK):
if os.access(cwd, os.F_OK | os.R_OK):
os.chdir(cwd)
return cwd
except:
@ -2011,7 +2042,7 @@ class AnsibleModule(object):
def exit_json(self, **kwargs):
''' return from the module, without error '''
if not 'changed' in kwargs:
if 'changed' not in kwargs:
kwargs['changed'] = False
self.do_cleanup_files()
@ -2024,7 +2055,7 @@ class AnsibleModule(object):
assert 'msg' in kwargs, "implementation error -- msg to explain the error is required"
kwargs['failed'] = True
if not 'changed' in kwargs:
if 'changed' not in kwargs:
kwargs['changed'] = False
self.do_cleanup_files()
@ -2175,7 +2206,7 @@ class AnsibleModule(object):
native_suffix = os.path.basename(b_dest)
native_prefix = b('.ansible_tmp')
try:
tmp_dest_fd, tmp_dest_name = tempfile.mkstemp( prefix=native_prefix, dir=native_dest_dir, suffix=native_suffix)
tmp_dest_fd, tmp_dest_name = tempfile.mkstemp(prefix=native_prefix, dir=native_dest_dir, suffix=native_suffix)
except (OSError, IOError):
e = get_exception()
self.fail_json(msg='The destination directory (%s) is not writable by the current user. Error was: %s' % (os.path.dirname(dest), e))
@ -2261,7 +2292,6 @@ class AnsibleModule(object):
e = get_exception()
self.fail_json(msg='Could not write data to file (%s) from (%s): %s' % (dest, src, e), exception=traceback.format_exc())
def _read_from_pipes(self, rpipes, rfds, file_descriptor):
data = b('')
if file_descriptor in rfds:
@ -2359,7 +2389,7 @@ class AnsibleModule(object):
# expand things like $HOME and ~
if not shell:
args = [ os.path.expanduser(os.path.expandvars(x)) for x in args if x is not None ]
args = [os.path.expanduser(os.path.expandvars(x)) for x in args if x is not None]
rc = 0
msg = None
@ -2387,9 +2417,9 @@ class AnsibleModule(object):
# Clean out python paths set by ansiballz
if 'PYTHONPATH' in os.environ:
pypaths = os.environ['PYTHONPATH'].split(':')
pypaths = [x for x in pypaths \
if not x.endswith('/ansible_modlib.zip') \
and not x.endswith('/debug_dir')]
pypaths = [x for x in pypaths
if not x.endswith('/ansible_modlib.zip') and
not x.endswith('/debug_dir')]
os.environ['PYTHONPATH'] = ':'.join(pypaths)
if not os.environ['PYTHONPATH']:
del os.environ['PYTHONPATH']
@ -2510,7 +2540,7 @@ class AnsibleModule(object):
self.fail_json(rc=e.errno, msg=to_native(e), cmd=clean_args)
except Exception:
e = get_exception()
self.log("Error Executing CMD:%s Exception:%s" % (clean_args,to_native(traceback.format_exc())))
self.log("Error Executing CMD:%s Exception:%s" % (clean_args, to_native(traceback.format_exc())))
self.fail_json(rc=257, msg=to_native(e), exception=traceback.format_exc(), cmd=clean_args)
# Restore env settings

View file

@ -25,9 +25,12 @@
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import json
from ansible.module_utils.urls import fetch_url
class Response(object):
def __init__(self, resp, info):

View file

@ -25,27 +25,29 @@
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import signal
import socket
import struct
import signal
from ansible.module_utils.basic import get_exception
from ansible.module_utils._text import to_bytes, to_native
def send_data(s, data):
packed_len = struct.pack('!Q',len(data))
packed_len = struct.pack('!Q', len(data))
return s.sendall(packed_len + data)
def recv_data(s):
header_len = 8 # size of a packed unsigned long long
header_len = 8 # size of a packed unsigned long long
data = to_bytes("")
while len(data) < header_len:
d = s.recv(header_len - len(data))
if not d:
return None
data += d
data_len = struct.unpack('!Q',data[:header_len])[0]
data_len = struct.unpack('!Q', data[:header_len])[0]
data = data[header_len:]
while len(data) < data_len:
d = s.recv(data_len - len(data))
@ -54,6 +56,7 @@ def recv_data(s):
data += d
return data
def exec_command(module, command):
try:
sf = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)

View file

@ -26,9 +26,11 @@
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class SQLParseError(Exception):
pass
class UnclosedQuoteError(SQLParseError):
pass
@ -38,6 +40,7 @@ class UnclosedQuoteError(SQLParseError):
_PG_IDENTIFIER_TO_DOT_LEVEL = dict(database=1, schema=2, table=3, column=4, role=1)
_MYSQL_IDENTIFIER_TO_DOT_LEVEL = dict(database=1, table=2, column=3, role=1, vars=1)
def _find_end_quote(identifier, quote_char):
accumulate = 0
while True:
@ -47,12 +50,12 @@ def _find_end_quote(identifier, quote_char):
raise UnclosedQuoteError
accumulate = accumulate + quote
try:
next_char = identifier[quote+1]
next_char = identifier[quote + 1]
except IndexError:
return accumulate
if next_char == quote_char:
try:
identifier = identifier[quote+2:]
identifier = identifier[quote + 2:]
accumulate = accumulate + 2
except IndexError:
raise UnclosedQuoteError
@ -73,10 +76,10 @@ def _identifier_parse(identifier, quote_char):
already_quoted = False
else:
if end_quote < len(identifier) - 1:
if identifier[end_quote+1] == '.':
if identifier[end_quote + 1] == '.':
dot = end_quote + 1
first_identifier = identifier[:dot]
next_identifier = identifier[dot+1:]
next_identifier = identifier[dot + 1:]
further_identifiers = _identifier_parse(next_identifier, quote_char)
further_identifiers.insert(0, first_identifier)
else:
@ -88,19 +91,19 @@ def _identifier_parse(identifier, quote_char):
try:
dot = identifier.index('.')
except ValueError:
identifier = identifier.replace(quote_char, quote_char*2)
identifier = identifier.replace(quote_char, quote_char * 2)
identifier = ''.join((quote_char, identifier, quote_char))
further_identifiers = [identifier]
else:
if dot == 0 or dot >= len(identifier) - 1:
identifier = identifier.replace(quote_char, quote_char*2)
identifier = identifier.replace(quote_char, quote_char * 2)
identifier = ''.join((quote_char, identifier, quote_char))
further_identifiers = [identifier]
else:
first_identifier = identifier[:dot]
next_identifier = identifier[dot+1:]
next_identifier = identifier[dot + 1:]
further_identifiers = _identifier_parse(next_identifier, quote_char)
first_identifier = first_identifier.replace(quote_char, quote_char*2)
first_identifier = first_identifier.replace(quote_char, quote_char * 2)
first_identifier = ''.join((quote_char, first_identifier, quote_char))
further_identifiers.insert(0, first_identifier)
@ -113,6 +116,7 @@ def pg_quote_identifier(identifier, id_type):
raise SQLParseError('PostgreSQL does not support %s with more than %i dots' % (id_type, _PG_IDENTIFIER_TO_DOT_LEVEL[id_type]))
return '.'.join(identifier_fragments)
def mysql_quote_identifier(identifier, id_type):
identifier_fragments = _identifier_parse(identifier, quote_char='`')
if len(identifier_fragments) > _MYSQL_IDENTIFIER_TO_DOT_LEVEL[id_type]:

View file

@ -217,7 +217,7 @@ class AnsibleDockerClient(Client):
docker_host=self._get_value('docker_host', params['docker_host'], 'DOCKER_HOST',
DEFAULT_DOCKER_HOST),
tls_hostname=self._get_value('tls_hostname', params['tls_hostname'],
'DOCKER_TLS_HOSTNAME', 'localhost'),
'DOCKER_TLS_HOSTNAME', 'localhost'),
api_version=self._get_value('api_version', params['api_version'], 'DOCKER_API_VERSION',
'auto'),
cacert_path=self._get_value('cacert_path', params['cacert_path'], 'DOCKER_CERT_PATH', None),
@ -244,7 +244,7 @@ class AnsibleDockerClient(Client):
def _get_tls_config(self, **kwargs):
self.log("get_tls_config:")
for key in kwargs:
self.log(" %s: %s" % (key, kwargs[key]))
self.log(" %s: %s" % (key, kwargs[key]))
try:
tls_config = TLSConfig(**kwargs)
return tls_config
@ -327,11 +327,10 @@ class AnsibleDockerClient(Client):
def _handle_ssl_error(self, error):
match = re.match(r"hostname.*doesn\'t match (\'.*\')", str(error))
if match:
msg = "You asked for verification that Docker host name matches %s. The actual hostname is %s. " \
"Most likely you need to set DOCKER_TLS_HOSTNAME or pass tls_hostname with a value of %s. " \
"You may also use TLS without verification by setting the tls parameter to true." \
% (self.auth_params['tls_hostname'], match.group(1), match.group(1))
self.fail(msg)
self.fail("You asked for verification that Docker host name matches %s. The actual hostname is %s. "
"Most likely you need to set DOCKER_TLS_HOSTNAME or pass tls_hostname with a value of %s. "
"You may also use TLS without verification by setting the tls parameter to true."
% (self.auth_params['tls_hostname'], match.group(1), match.group(1)))
self.fail("SSL Exception: %s" % (error))
def get_container(self, name=None):
@ -448,5 +447,3 @@ class AnsibleDockerClient(Client):
new_tag = self.find_image(name, tag)
return new_tag, old_tag == new_tag

View file

@ -34,7 +34,7 @@ from ansible.module_utils.cloud import CloudRetry
try:
import boto
import boto.ec2 #boto does weird import stuff
import boto.ec2 # boto does weird import stuff
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
@ -54,6 +54,7 @@ except:
from ansible.module_utils.six import string_types, binary_type, text_type
class AnsibleAWSError(Exception):
pass
@ -98,6 +99,7 @@ def boto3_conn(module, conn_type=None, resource=None, region=None, endpoint=None
module.fail_json(msg='There is an issue in the code of the module. You must specify either both, resource or client to the conn_type '
'parameter in the boto3_conn function call')
def _boto3_conn(conn_type=None, resource=None, region=None, endpoint=None, **params):
profile = params.pop('profile_name', None)
@ -120,6 +122,7 @@ def _boto3_conn(conn_type=None, resource=None, region=None, endpoint=None, **par
boto3_inventory_conn = _boto3_conn
def aws_common_argument_spec():
return dict(
ec2_url=dict(),
@ -291,6 +294,7 @@ def ec2_connect(module):
return ec2
def paging(pause=0, marker_property='marker'):
""" Adds paging to boto retrieval functions that support a 'marker'
this is configurable as not all boto functions seem to use the
@ -330,7 +334,6 @@ def camel_dict_to_snake_dict(camel_dict):
return all_cap_re.sub(r'\1_\2', s1).lower()
def value_is_list(camel_list):
checked_list = []
@ -344,7 +347,6 @@ def camel_dict_to_snake_dict(camel_dict):
return checked_list
snake_dict = {}
for k, v in camel_dict.items():
if isinstance(v, dict):
@ -403,7 +405,7 @@ def ansible_dict_to_boto3_filter_list(filters_dict):
"""
filters_list = []
for k,v in filters_dict.items():
for k, v in filters_dict.items():
filter_dict = {'Name': k}
if isinstance(v, string_types):
filter_dict['Values'] = [v]
@ -470,7 +472,7 @@ def ansible_dict_to_boto3_tag_list(tags_dict, tag_name_key_name='Key', tag_value
"""
tags_list = []
for k,v in tags_dict.items():
for k, v in tags_dict.items():
tags_list.append({tag_name_key_name: k, tag_value_key_name: v})
return tags_list
@ -491,7 +493,6 @@ def get_ec2_security_group_ids_from_names(sec_group_list, ec2_connection, vpc_id
else:
return sg.name
def get_sg_id(sg, boto3):
if boto3:
@ -520,7 +521,7 @@ def get_ec2_security_group_ids_from_names(sec_group_list, ec2_connection, vpc_id
all_sec_groups = ec2_connection.describe_security_groups()['SecurityGroups']
else:
if vpc_id:
filters = { 'vpc-id': vpc_id }
filters = {'vpc-id': vpc_id}
all_sec_groups = ec2_connection.get_all_security_groups(filters=filters)
else:
all_sec_groups = ec2_connection.get_all_security_groups()
@ -536,7 +537,7 @@ def get_ec2_security_group_ids_from_names(sec_group_list, ec2_connection, vpc_id
if len(still_unmatched) > 0:
raise ValueError("The following group names are not valid: %s" % ', '.join(still_unmatched))
sec_group_id_list += [ str(get_sg_id(all_sg, boto3)) for all_sg in all_sec_groups if str(get_sg_name(all_sg, boto3)) in sec_group_name_list ]
sec_group_id_list += [str(get_sg_id(all_sg, boto3)) for all_sg in all_sec_groups if str(get_sg_name(all_sg, boto3)) in sec_group_name_list]
return sec_group_id_list

View file

@ -56,7 +56,6 @@ def f5_argument_spec():
server_port=dict(
type='int',
default=443,
required=False,
fallback=(env_fallback, ['F5_SERVER_PORT'])
),
state=dict(
@ -80,7 +79,7 @@ def f5_parse_arguments(module):
import ssl
if not hasattr(ssl, 'SSLContext'):
module.fail_json(
msg="bigsuds does not support verifying certificates with python < 2.7.9." \
msg="bigsuds does not support verifying certificates with python < 2.7.9."
"Either update python or set validate_certs=False on the task'")
return (
@ -122,26 +121,22 @@ def bigip_api(bigip, user, password, validate_certs, port=443):
# Fully Qualified name (with the partition)
def fq_name(partition,name):
def fq_name(partition, name):
if name is not None and not name.startswith('/'):
return '/%s/%s' % (partition,name)
return '/%s/%s' % (partition, name)
return name
# Fully Qualified name (with partition) for a list
def fq_list_names(partition,list_names):
def fq_list_names(partition, list_names):
if list_names is None:
return None
return map(lambda x: fq_name(partition,x),list_names)
return map(lambda x: fq_name(partition, x), list_names)
# New style
from abc import ABCMeta, abstractproperty
from ansible.module_utils.six import with_metaclass
from collections import defaultdict
try:
@ -158,7 +153,7 @@ except ImportError:
from ansible.module_utils.basic import *
from ansible.module_utils.six import iteritems
from ansible.module_utils.six import iteritems, with_metaclass
F5_COMMON_ARGS = dict(
@ -187,7 +182,6 @@ F5_COMMON_ARGS = dict(
server_port=dict(
type='int',
default=443,
required=False,
fallback=(env_fallback, ['F5_SERVER_PORT'])
),
state=dict(
@ -286,7 +280,7 @@ class AnsibleF5Parameters(object):
def __init__(self, params=None):
self._values = defaultdict(lambda: None)
if params:
for k,v in iteritems(params):
for k, v in iteritems(params):
if self.api_map is not None and k in self.api_map:
dict_to_use = self.api_map
map_key = self.api_map[k]

View file

@ -33,31 +33,31 @@ from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
#check for pyFG lib
# check for pyFG lib
try:
from pyFG import FortiOS, FortiConfig
from pyFG.exceptions import CommandExecutionException, FailedCommit
HAS_PYFG=True
HAS_PYFG = True
except ImportError:
HAS_PYFG=False
HAS_PYFG = False
fortios_argument_spec = dict(
file_mode = dict(type='bool', default=False),
config_file = dict(type='path'),
host = dict( ),
username = dict( ),
password = dict(type='str', no_log=True ),
timeout = dict(type='int', default=60),
vdom = dict(type='str', default=None ),
backup = dict(type='bool', default=False),
backup_path = dict(type='path'),
backup_filename = dict(type='str'),
file_mode=dict(type='bool', default=False),
config_file=dict(type='path'),
host=dict(),
username=dict(),
password=dict(type='str', no_log=True),
timeout=dict(type='int', default=60),
vdom=dict(type='str'),
backup=dict(type='bool', default=False),
backup_path=dict(type='path'),
backup_filename=dict(type='str'),
)
fortios_required_if = [
['file_mode', False, ['host', 'username', 'password']],
['file_mode', True, ['config_file']],
['backup', True , ['backup_path'] ],
['backup', True, ['backup_path']],
]
fortios_mutually_exclusive = [
@ -67,12 +67,12 @@ fortios_mutually_exclusive = [
]
fortios_error_codes = {
'-3':"Object not found",
'-61':"Command error"
'-3': "Object not found",
'-61': "Command error"
}
def backup(module,running_config):
def backup(module, running_config):
backup_path = module.params['backup_path']
backup_filename = module.params['backup_filename']
if not os.path.exists(backup_path):
@ -91,8 +91,6 @@ def backup(module,running_config):
module.fail_json(msg="Can't create backup file {0} Permission denied ?".format(filename))
class AnsibleFortios(object):
def __init__(self, module):
if not HAS_PYFG:
@ -103,7 +101,6 @@ class AnsibleFortios(object):
}
self.module = module
def _connect(self):
if self.module.params['file_mode']:
self.forti_device = FortiOS('')
@ -122,11 +119,10 @@ class AnsibleFortios(object):
e = get_exception()
self.module.fail_json(msg='Error connecting device. %s' % e)
def load_config(self, path):
self.path = path
self._connect()
#load in file_mode
# load in file_mode
if self.module.params['file_mode']:
try:
f = open(self.module.params['config_file'], 'r')
@ -135,10 +131,10 @@ class AnsibleFortios(object):
except IOError:
e = get_exception()
self.module.fail_json(msg='Error reading configuration file. %s' % e)
self.forti_device.load_config(config_text=running, path = path)
self.forti_device.load_config(config_text=running, path=path)
else:
#get config
# get config
try:
self.forti_device.load_config(path=path)
except Exception:
@ -146,22 +142,21 @@ class AnsibleFortios(object):
e = get_exception()
self.module.fail_json(msg='Error reading running config. %s' % e)
#set configs in object
# set configs in object
self.result['running_config'] = self.forti_device.running_config.to_text()
self.candidate_config = self.forti_device.candidate_config
#backup if needed
# backup if needed
if self.module.params['backup']:
backup(self.module, self.forti_device.running_config.to_text())
def apply_changes(self):
change_string = self.forti_device.compare_config()
if change_string:
self.result['change_string'] = change_string
self.result['changed'] = True
#Commit if not check mode
# Commit if not check mode
if change_string and not self.module.check_mode:
if self.module.params['file_mode']:
try:
@ -175,35 +170,31 @@ class AnsibleFortios(object):
try:
self.forti_device.commit()
except FailedCommit:
#Something's wrong (rollback is automatic)
# Something's wrong (rollback is automatic)
self.forti_device.close()
e = get_exception()
error_list = self.get_error_infos(e)
self.module.fail_json(msg_error_list=error_list, msg="Unable to commit change, check your args, the error was %s" % e.message )
self.module.fail_json(msg_error_list=error_list, msg="Unable to commit change, check your args, the error was %s" % e.message)
self.forti_device.close()
self.module.exit_json(**self.result)
def del_block(self, block_id):
self.forti_device.candidate_config[self.path].del_block(block_id)
def add_block(self, block_id, block):
self.forti_device.candidate_config[self.path][block_id] = block
def get_error_infos(self, cli_errors):
error_list = []
for errors in cli_errors.args:
for error in errors:
error_code = error[0]
error_string = error[1]
error_type = fortios_error_codes.get(error_code,"unknown")
error_list.append(dict(error_code=error_code, error_type=error_type, error_string= error_string))
error_type = fortios_error_codes.get(error_code, "unknown")
error_list.append(dict(error_code=error_code, error_type=error_type, error_string=error_string))
return error_list
def get_empty_configuration_block(self, block_name, block_type):
return FortiConfig(block_name, block_type)

View file

@ -27,9 +27,6 @@
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
from ansible.module_utils.gcp import gcp_connect
from ansible.module_utils.gcp import unexpected_error_msg as gcp_error
try:
from libcloud.dns.types import Provider
from libcloud.dns.providers import get_driver
@ -37,9 +34,13 @@ try:
except ImportError:
HAS_LIBCLOUD_BASE = False
from ansible.module_utils.gcp import gcp_connect
from ansible.module_utils.gcp import unexpected_error_msg as gcp_error
USER_AGENT_PRODUCT = "Ansible-gcdns"
USER_AGENT_VERSION = "v1"
def gcdns_connect(module, provider=None):
"""Return a GCP connection for Google Cloud DNS."""
if not HAS_LIBCLOUD_BASE:
@ -48,6 +49,7 @@ def gcdns_connect(module, provider=None):
provider = provider or Provider.GOOGLE
return gcp_connect(module, provider, get_driver, USER_AGENT_PRODUCT, USER_AGENT_VERSION)
def unexpected_error_msg(error):
"""Create an error string based on passed in error."""
return gcp_error(error)

View file

@ -25,10 +25,6 @@
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
from ansible.module_utils.gcp import gcp_connect
from ansible.module_utils.gcp import unexpected_error_msg as gcp_error
try:
from libcloud.compute.types import Provider
@ -37,9 +33,13 @@ try:
except ImportError:
HAS_LIBCLOUD_BASE = False
from ansible.module_utils.gcp import gcp_connect
from ansible.module_utils.gcp import unexpected_error_msg as gcp_error
USER_AGENT_PRODUCT = "Ansible-gce"
USER_AGENT_VERSION = "v1"
def gce_connect(module, provider=None):
"""Return a GCP connection for Google Compute Engine."""
if not HAS_LIBCLOUD_BASE:
@ -48,6 +48,7 @@ def gce_connect(module, provider=None):
return gcp_connect(module, provider, get_driver, USER_AGENT_PRODUCT, USER_AGENT_VERSION)
def unexpected_error_msg(error):
"""Create an error string based on passed in error."""
return gcp_error(error)

View file

@ -502,8 +502,7 @@ class GCPUtils(object):
@staticmethod
def underscore_to_camel(txt):
return txt.split('_')[0] + ''.join(x.capitalize()
or '_' for x in txt.split('_')[1:])
return txt.split('_')[0] + ''.join(x.capitalize() or '_' for x in txt.split('_')[1:])
@staticmethod
def remove_non_gcp_params(params):
@ -626,7 +625,7 @@ class GCPUtils(object):
# TODO(supertom): Isolate 'build-new-request' stuff.
resource_name_singular = GCPUtils.get_entity_name_from_resource_name(
resource_name)
if op_resp['operationType'] == 'insert' or not 'entity_name' in parsed_url:
if op_resp['operationType'] == 'insert' or 'entity_name' not in parsed_url:
parsed_url['entity_name'] = GCPUtils.parse_gcp_url(op_resp['targetLink'])[
'entity_name']
args = {'project': project_id,

View file

@ -82,9 +82,9 @@ def infinibox_argument_spec():
"""Return standard base dictionary used for the argument_spec argument in AnsibleModule"""
return dict(
system = dict(required=True),
user = dict(),
password = dict(no_log=True),
system=dict(required=True),
user=dict(),
password=dict(no_log=True),
)

View file

@ -29,6 +29,7 @@ try:
except ImportError:
import simplejson as json
# NB: a copy of this function exists in ../../modules/core/async_wrapper.py. Ensure any
# changes are propagated there.
def _filter_non_json_lines(data):

View file

@ -47,12 +47,13 @@ ARGS_DEFAULT_VALUE = {
'timeout': 10
}
def check_args(module, warnings):
provider = module.params['provider'] or {}
for key in junos_argument_spec:
if key not in ('provider',) and module.params[key]:
warnings.append('argument %s has been deprecated and will be '
'removed in a future version' % key)
'removed in a future version' % key)
# set argument's default value if not provided in input
# This is done to avoid unwanted argument deprecation warning
@ -66,6 +67,7 @@ def check_args(module, warnings):
if provider.get(param):
module.no_log_values.update(return_values(provider[param]))
def _validate_rollback_id(module, value):
try:
if not 0 <= int(value) <= 49:
@ -73,6 +75,7 @@ def _validate_rollback_id(module, value):
except ValueError:
module.fail_json(msg='rollback must be between 0 and 49')
def load_configuration(module, candidate=None, action='merge', rollback=None, format='xml'):
if all((candidate is None, rollback is None)):
@ -117,6 +120,7 @@ def load_configuration(module, candidate=None, action='merge', rollback=None, fo
cfg.append(candidate)
return send_request(module, obj)
def get_configuration(module, compare=False, format='xml', rollback='0'):
if format not in CONFIG_FORMATS:
module.fail_json(msg='invalid config format specified')
@ -127,6 +131,7 @@ def get_configuration(module, compare=False, format='xml', rollback='0'):
xattrs['rollback'] = str(rollback)
return send_request(module, Element('get-configuration', xattrs))
def commit_configuration(module, confirm=False, check=False, comment=None, confirm_timeout=None):
obj = Element('commit-configuration')
if confirm:
@ -141,6 +146,7 @@ def commit_configuration(module, confirm=False, check=False, comment=None, confi
subele.text = str(confirm_timeout)
return send_request(module, obj)
def command(module, command, format='text', rpc_only=False):
xattrs = {'format': format}
if rpc_only:
@ -148,8 +154,14 @@ def command(module, command, format='text', rpc_only=False):
xattrs['format'] = 'text'
return send_request(module, Element('command', xattrs, text=command))
lock_configuration = lambda x: send_request(x, Element('lock-configuration'))
unlock_configuration = lambda x: send_request(x, Element('unlock-configuration'))
def lock_configuration(x):
return send_request(x, Element('lock-configuration'))
def unlock_configuration(x):
return send_request(x, Element('unlock-configuration'))
@contextmanager
def locked_config(module):
@ -159,6 +171,7 @@ def locked_config(module):
finally:
unlock_configuration(module)
def get_diff(module):
reply = get_configuration(module, compare=True, format='text')
@ -166,6 +179,7 @@ def get_diff(module):
if output is not None:
return to_text(output.text, encoding='latin1').strip()
def load_config(module, candidate, warnings, action='merge', commit=False, format='xml',
comment=None, confirm=False, confirm_timeout=None):
@ -192,5 +206,6 @@ def load_config(module, candidate, warnings, action='merge', commit=False, forma
return diff
def get_param(module, key):
return module.params[key] or module.params['provider'].get(key)

View file

@ -153,7 +153,7 @@ def not_in_host_file(self, host):
if tokens[0].find(HASHED_KEY_MAGIC) == 0:
# this is a hashed known host entry
try:
(kn_salt,kn_host) = tokens[0][len(HASHED_KEY_MAGIC):].split("|",2)
(kn_salt, kn_host) = tokens[0][len(HASHED_KEY_MAGIC):].split("|", 2)
hash = hmac.new(kn_salt.decode('base64'), digestmod=sha1)
hash.update(host)
if hash.digest() == kn_host.decode('base64'):
@ -204,7 +204,7 @@ def add_host_key(module, fqdn, port=22, key_type="rsa", create_dir=False):
if rc != 0 or not out:
msg = 'failed to retrieve hostkey'
if not out:
msg += '. "%s" returned no matches.' % this_cmd
msg += '. "%s" returned no matches.' % this_cmd
else:
msg += ' using command "%s". [stdout]: %s' % (this_cmd, out)

View file

@ -38,12 +38,14 @@ except ImportError:
# httplib/http.client connection using unix domain socket
import socket
import ssl
try:
from httplib import HTTPConnection, HTTPSConnection
except ImportError:
# Python 3
from http.client import HTTPConnection, HTTPSConnection
class UnixHTTPConnection(HTTPConnection):
def __init__(self, path):
HTTPConnection.__init__(self, 'localhost')
@ -54,11 +56,13 @@ class UnixHTTPConnection(HTTPConnection):
sock.connect(self.path)
self.sock = sock
class LXDClientException(Exception):
def __init__(self, msg, **kwargs):
self.msg = msg
self.kwargs = kwargs
class LXDClient(object):
def __init__(self, url, key_file=None, cert_file=None, debug=False):
"""LXD Client.

View file

@ -35,6 +35,7 @@ try:
except ImportError:
mysqldb_found = False
def mysql_connect(module, login_user=None, login_password=None, config_file='', ssl_cert=None, ssl_key=None, ssl_ca=None, db=None, cursor_class=None,
connect_timeout=30):
config = {}

View file

@ -92,13 +92,20 @@ class ConfigLine(object):
assert isinstance(obj, ConfigLine), 'child must be of type `ConfigLine`'
self._children.append(obj)
def ignore_line(text, tokens=None):
for item in (tokens or DEFAULT_COMMENT_TOKENS):
if text.startswith(item):
return True
_obj_to_text = lambda x: [o.text for o in x]
_obj_to_raw = lambda x: [o.raw for o in x]
def _obj_to_text(x):
return [o.text for o in x]
def _obj_to_raw(x):
return [o.raw for o in x]
def _obj_to_block(objects, visited=None):
items = list()
@ -110,6 +117,7 @@ def _obj_to_block(objects, visited=None):
items.append(child)
return _obj_to_raw(items)
def dumps(objects, output='block', comments=False):
if output == 'block':
items = _obj_to_block(objects)
@ -130,6 +138,7 @@ def dumps(objects, output='block', comments=False):
return '\n'.join(items)
class NetworkConfig(object):
def __init__(self, indent=1, contents=None):
@ -328,7 +337,7 @@ class NetworkConfig(object):
offset = 0
obj = None
## global config command
# global config command
if not parents:
for line in lines:
item = ConfigLine(line)

View file

@ -24,17 +24,16 @@
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import re
import time
import shlex
import time
from ansible.module_utils.basic import BOOLEANS_TRUE, BOOLEANS_FALSE
from ansible.module_utils.basic import get_exception
from ansible.module_utils.basic import BOOLEANS_TRUE, BOOLEANS_FALSE, get_exception
from ansible.module_utils.six import string_types, text_type
from ansible.module_utils.six.moves import zip
def to_list(val):
if isinstance(val, (list, tuple)):
return list(val)
@ -49,20 +48,23 @@ class FailedConditionsError(Exception):
super(FailedConditionsError, self).__init__(msg)
self.failed_conditions = failed_conditions
class FailedConditionalError(Exception):
def __init__(self, msg, failed_conditional):
super(FailedConditionalError, self).__init__(msg)
self.failed_conditional = failed_conditional
class AddCommandError(Exception):
def __init__(self, msg, command):
super(AddCommandError, self).__init__(msg)
self.command = command
class AddConditionError(Exception):
def __init__(self, msg, condition):
super(AddConditionError, self).__init__(msg)
self.condition=condition
self.condition = condition
class Cli(object):
@ -105,6 +107,7 @@ class Cli(object):
return responses
class Command(object):
def __init__(self, command, output=None, prompt=None, response=None,
@ -122,6 +125,7 @@ class Command(object):
def __str__(self):
return self.command_string
class CommandRunner(object):
def __init__(self, module):

View file

@ -26,14 +26,13 @@
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
from contextlib import contextmanager
from xml.etree.ElementTree import Element, SubElement
from xml.etree.ElementTree import tostring, fromstring
from xml.etree.ElementTree import Element, SubElement, fromstring, tostring
from ansible.module_utils.connection import exec_command
NS_MAP = {'nc': "urn:ietf:params:xml:ns:netconf:base:1.0"}
def send_request(module, obj, check_rc=True):
request = tostring(obj)
rc, out, err = exec_command(module, request)
@ -58,6 +57,7 @@ def send_request(module, obj, check_rc=True):
return warnings
return fromstring(out)
def children(root, iterable):
for item in iterable:
try:
@ -65,33 +65,40 @@ def children(root, iterable):
except NameError:
ele = SubElement(root, item)
def lock(module, target='candidate'):
obj = Element('lock')
children(obj, ('target', target))
return send_request(module, obj)
def unlock(module, target='candidate'):
obj = Element('unlock')
children(obj, ('target', target))
return send_request(module, obj)
def commit(module):
return send_request(module, Element('commit'))
def discard_changes(module):
return send_request(module, Element('discard-changes'))
def validate(module):
obj = Element('validate')
children(obj, ('source', 'candidate'))
return send_request(module, obj)
def get_config(module, source='running', filter=None):
obj = Element('get-config')
children(obj, ('source', source))
children(obj, ('filter', filter))
return send_request(module, obj)
@contextmanager
def locked_config(module):
try:

View file

@ -52,6 +52,7 @@ NET_CONNECTION_ARGS = dict()
NET_CONNECTIONS = dict()
def _transitional_argument_spec():
argument_spec = {}
for key, value in iteritems(NET_TRANSPORT_ARGS):
@ -59,6 +60,7 @@ def _transitional_argument_spec():
argument_spec[key] = value
return argument_spec
def to_list(val):
if isinstance(val, (list, tuple)):
return list(val)
@ -75,12 +77,14 @@ class ModuleStub(object):
self.params[key] = value.get('default')
self.fail_json = fail_json
class NetworkError(Exception):
def __init__(self, msg, **kwargs):
super(NetworkError, self).__init__(msg)
self.kwargs = kwargs
class Config(object):
def __init__(self, connection):
@ -185,6 +189,7 @@ class NetworkModule(AnsibleModule):
exc = get_exception()
self.fail_json(msg=to_native(exc))
def register_transport(transport, default=False):
def register(cls):
NET_CONNECTIONS[transport] = cls
@ -193,6 +198,6 @@ def register_transport(transport, default=False):
return cls
return register
def add_argument(key, value):
NET_CONNECTION_ARGS[key] = value

View file

@ -24,9 +24,10 @@
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
from ansible.module_utils.six import iteritems
from ansible.module_utils.basic import AnsibleFallbackNotFound
from ansible.module_utils.six import iteritems
def to_list(val):
if isinstance(val, (list, tuple, set)):
@ -36,6 +37,7 @@ def to_list(val):
else:
return list()
class ComplexDict(object):
"""Transforms a dict to with an argument spec
@ -77,7 +79,7 @@ class ComplexDict(object):
if attr.get('read_from'):
spec = self._module.argument_spec.get(attr['read_from'])
if not spec:
raise ValueError('argument_spec %s does not exist' % attr['read_from'])
raise ValueError('argument_spec %s does not exist' % attr['read_from'])
for key, value in iteritems(spec):
if key not in attr:
attr[key] = value
@ -88,7 +90,6 @@ class ComplexDict(object):
self_has_key = True
attr['required'] = True
def _dict(self, value):
obj = {}
for name, attr in iteritems(self._attributes):
@ -131,8 +132,7 @@ class ComplexDict(object):
if 'choices' in attr:
if value[name] not in attr['choices']:
raise ValueError('%s must be one of %s, got %s' % \
(name, ', '.join(attr['choices']), value[name]))
raise ValueError('%s must be one of %s, got %s' % (name, ', '.join(attr['choices']), value[name]))
if value[name] is not None:
value_type = attr.get('type', 'str')
@ -141,6 +141,7 @@ class ComplexDict(object):
return value
class ComplexList(ComplexDict):
"""Extends ```ComplexDict``` to handle a list of dicts """
@ -148,4 +149,3 @@ class ComplexList(ComplexDict):
if not isinstance(values, (list, tuple)):
raise TypeError('value must be an ordered iterable')
return [(super(ComplexList, self).__call__(v)) for v in values]

View file

@ -30,6 +30,7 @@ import os
from ansible.module_utils.six import iteritems
def openstack_argument_spec():
# DEPRECATED: This argument spec is only used for the deprecated old
# OpenStack modules. It turns out that modern OpenStack auth is WAY
@ -37,17 +38,17 @@ def openstack_argument_spec():
# Consume standard OpenStack environment variables.
# This is mainly only useful for ad-hoc command line operation as
# in playbooks one would assume variables would be used appropriately
OS_AUTH_URL=os.environ.get('OS_AUTH_URL', 'http://127.0.0.1:35357/v2.0/')
OS_PASSWORD=os.environ.get('OS_PASSWORD', None)
OS_REGION_NAME=os.environ.get('OS_REGION_NAME', None)
OS_USERNAME=os.environ.get('OS_USERNAME', 'admin')
OS_TENANT_NAME=os.environ.get('OS_TENANT_NAME', OS_USERNAME)
OS_AUTH_URL = os.environ.get('OS_AUTH_URL', 'http://127.0.0.1:35357/v2.0/')
OS_PASSWORD = os.environ.get('OS_PASSWORD', None)
OS_REGION_NAME = os.environ.get('OS_REGION_NAME', None)
OS_USERNAME = os.environ.get('OS_USERNAME', 'admin')
OS_TENANT_NAME = os.environ.get('OS_TENANT_NAME', OS_USERNAME)
spec = dict(
login_username = dict(default=OS_USERNAME),
auth_url = dict(default=OS_AUTH_URL),
region_name = dict(default=OS_REGION_NAME),
availability_zone = dict(default=None),
login_username=dict(default=OS_USERNAME),
auth_url=dict(default=OS_AUTH_URL),
region_name=dict(default=OS_REGION_NAME),
availability_zone=dict(),
)
if OS_PASSWORD:
spec['login_password'] = dict(default=OS_PASSWORD)
@ -59,6 +60,7 @@ def openstack_argument_spec():
spec['login_tenant_name'] = dict(required=True)
return spec
def openstack_find_nova_addresses(addresses, ext_tag, key_name=None):
ret = []
@ -71,6 +73,7 @@ def openstack_find_nova_addresses(addresses, ext_tag, key_name=None):
ret.append(interface_spec['addr'])
return ret
def openstack_full_argument_spec(**kwargs):
spec = dict(
cloud=dict(default=None),

View file

@ -37,6 +37,7 @@ from ansible.module_utils.urls import fetch_url, url_argument_spec
add_argument('use_ssl', dict(default=True, type='bool'))
add_argument('validate_certs', dict(default=True, type='bool'))
def get_opsidl():
extschema = restparser.parseSchema(settings.get('ext_schema'))
ovsschema = settings.get('ovs_schema')
@ -129,7 +130,7 @@ class Rest(object):
def authorize(self, params, **kwargs):
raise NotImplementedError
### REST methods ###
# REST methods
def _url_builder(self, path):
if path[0] == '/':
@ -160,12 +161,12 @@ class Rest(object):
def delete(self, path, data=None, headers=None):
return self.request('DELETE', path, data, headers)
### Command methods ###
# Command methods
def run_commands(self, commands):
raise NotImplementedError
### Config methods ###
# Config methods
def configure(self, commands):
path = '/system/full-configuration'
@ -212,7 +213,7 @@ class Cli(CliBase):
NET_PASSWD_RE = re.compile(r"[\r\n]?password: $", re.I)
### Config methods ###
# Config methods
def configure(self, commands, **kwargs):
cmds = ['configure terminal']

View file

@ -1,5 +1,6 @@
_DEVICE_CONFIGS = {}
def get_config(module, flags=[]):
cmd = 'show running-config '
cmd += ' '.join(flags)

View file

@ -504,7 +504,6 @@ class BaseModule(object):
after[k] = update[k]
return after
def create(
self,
entity=None,
@ -579,9 +578,14 @@ class BaseModule(object):
# Wait for the entity to be created and to be in the defined state:
entity_service = self._service.service(entity.id)
state_condition = lambda entity: entity
def state_condition(entity):
return entity
if result_state:
state_condition = lambda entity: entity and entity.status == result_state
def state_condition(entity):
return entity and entity.status == result_state
wait(
service=entity_service,
condition=state_condition,

View file

@ -27,24 +27,21 @@
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# standard ansible imports
from ansible.module_utils.basic import get_exception
# standard PG imports
HAS_PSYCOPG2 = False
try:
import psycopg2
import psycopg2.extras
except ImportError:
pass
else:
HAS_PSYCOPG2 = True
except ImportError:
HAS_PSYCOPG2 = False
from ansible.module_utils.basic import get_exception
from ansible.module_utils.six import iteritems
class LibraryError(Exception):
pass
def ensure_libs(sslrootcert=None):
if not HAS_PSYCOPG2:
raise LibraryError('psycopg2 is not installed. we need psycopg2.')
@ -54,14 +51,14 @@ def ensure_libs(sslrootcert=None):
# no problems
return None
def postgres_common_argument_spec():
return dict(
login_user = dict(default='postgres'),
login_password = dict(default='', no_log=True),
login_host = dict(default=''),
login_unix_socket = dict(default=''),
port = dict(type='int', default=5432),
ssl_mode = dict(default='prefer', choices=['disable', 'allow', 'prefer', 'require', 'verify-ca', 'verify-full']),
ssl_rootcert = dict(default=None),
login_user=dict(default='postgres'),
login_password=dict(default='', no_log=True),
login_host=dict(default=''),
login_unix_socket=dict(default=''),
port=dict(type='int', default=5432),
ssl_mode=dict(default='prefer', choices=['disable', 'allow', 'prefer', 'require', 'verify-ca', 'verify-full']),
ssl_rootcert=dict(),
)

View file

@ -25,10 +25,10 @@
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import sys
def get_exception():
"""Get the current exception.

View file

@ -125,9 +125,9 @@ class Rhsm(RegistrationBase):
# Pass supplied **kwargs as parameters to subscription-manager. Ignore
# non-configuration parameters and replace '_' with '.'. For example,
# 'server_hostname' becomes '--system.hostname'.
for k,v in kwargs.items():
for k, v in kwargs.items():
if re.search(r'^(system|rhsm)_', k):
args.append('--%s=%s' % (k.replace('_','.'), v))
args.append('--%s=%s' % (k.replace('_', '.'), v))
self.module.run_command(args, check_rc=True)
@ -213,7 +213,7 @@ class RhsmPool(object):
def __init__(self, module, **kwargs):
self.module = module
for k,v in kwargs.items():
for k, v in kwargs.items():
setattr(self, k, v)
def __str__(self):
@ -255,7 +255,7 @@ class RhsmPools(object):
continue
# If a colon ':' is found, parse
elif ':' in line:
(key, value) = line.split(':',1)
(key, value) = line.split(':', 1)
key = key.strip().replace(" ", "") # To unify
value = value.strip()
if key in ['ProductName', 'SubscriptionName']:
@ -265,7 +265,7 @@ class RhsmPools(object):
# Associate value with most recently recorded product
products[-1].__setattr__(key, value)
# FIXME - log some warning?
#else:
# else:
# warnings.warn("Unhandled subscription key/value: %s/%s" % (key,value))
return products
@ -277,4 +277,3 @@ class RhsmPools(object):
for product in self.products:
if r.search(product._name):
yield product

View file

@ -25,20 +25,20 @@
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import os
import shlex
import subprocess
import glob
import select
import os
import pickle
import platform
import select
import shlex
import subprocess
import traceback
from ansible.module_utils.six import PY2, b
from ansible.module_utils._text import to_bytes, to_text
def sysv_is_enabled(name):
'''
This function will check if the service name supplied
@ -48,6 +48,7 @@ def sysv_is_enabled(name):
'''
return bool(glob.glob('/etc/rc?.d/S??%s' % name))
def get_sysv_script(name):
'''
This function will return the expected path for an init script
@ -62,6 +63,7 @@ def get_sysv_script(name):
return result
def sysv_exists(name):
'''
This function will return True or False depending on
@ -71,6 +73,7 @@ def sysv_exists(name):
'''
return os.path.exists(get_sysv_script(name))
def fail_if_missing(module, found, service, msg=''):
'''
This function will return an error or exit gracefully depending on check mode status
@ -87,6 +90,7 @@ def fail_if_missing(module, found, service, msg=''):
else:
module.fail_json(msg='Could not find the requested service %s: %s' % (service, msg))
def daemonize(module, cmd):
'''
Execute a command while detaching as a deamon, returns rc, stdout, and stderr.
@ -100,10 +104,10 @@ def daemonize(module, cmd):
'''
# init some vars
chunk = 4096 #FIXME: pass in as arg?
chunk = 4096 # FIXME: pass in as arg?
errors = 'surrogate_or_strict'
#start it!
# start it!
try:
pipe = os.pipe()
pid = os.fork()
@ -162,7 +166,7 @@ def daemonize(module, cmd):
fds = [p.stdout, p.stderr]
# loop reading output till its done
output = { p.stdout: b(""), p.sterr: b("") }
output = {p.stdout: b(""), p.sterr: b("")}
while fds:
rfd, wfd, efd = select.select(fds, [], fds, 1)
if (rfd + wfd + efd) or p.poll():
@ -207,6 +211,7 @@ def daemonize(module, cmd):
# py2 and py3)
return pickle.loads(to_bytes(return_data, errors=errors))
def check_ps(module, pattern):
# Set ps flags

View file

@ -281,5 +281,8 @@ class CliBase(object):
exc = get_exception()
raise NetworkError(to_native(exc))
run_commands = lambda self, x: self.execute(to_list(x))
exec_command = lambda self, x: self.shell.send(self.to_command(x))
def run_commands(self, x):
return self.execute(to_list(x))
def exec_command(self, x):
return self.shell.send(self.to_command(x))

View file

@ -26,6 +26,7 @@
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
def _get_quote_state(token, quote_char):
'''
the goal of this block is to determine if the quoted string
@ -36,7 +37,7 @@ def _get_quote_state(token, quote_char):
prev_char = None
for idx, cur_char in enumerate(token):
if idx > 0:
prev_char = token[idx-1]
prev_char = token[idx - 1]
if cur_char in '"\'' and prev_char != '\\':
if quote_char:
if cur_char == quote_char:
@ -45,13 +46,14 @@ def _get_quote_state(token, quote_char):
quote_char = cur_char
return quote_char
def _count_jinja2_blocks(token, cur_depth, open_token, close_token):
'''
this function counts the number of opening/closing blocks for a
given opening/closing type and adjusts the current depth for that
block based on the difference
'''
num_open = token.count(open_token)
num_open = token.count(open_token)
num_close = token.count(close_token)
if num_open != num_close:
cur_depth += (num_open - num_close)
@ -59,6 +61,7 @@ def _count_jinja2_blocks(token, cur_depth, open_token, close_token):
cur_depth = 0
return cur_depth
def split_args(args):
'''
Splits args on whitespace, but intelligently reassembles
@ -99,13 +102,13 @@ def split_args(args):
quote_char = None
inside_quotes = False
print_depth = 0 # used to count nested jinja2 {{ }} blocks
block_depth = 0 # used to count nested jinja2 {% %} blocks
comment_depth = 0 # used to count nested jinja2 {# #} blocks
print_depth = 0 # used to count nested jinja2 {{ }} blocks
block_depth = 0 # used to count nested jinja2 {% %} blocks
comment_depth = 0 # used to count nested jinja2 {# #} blocks
# now we loop over each split chunk, coalescing tokens if the white space
# split occurred within quotes or a jinja2 block of some kind
for itemidx,item in enumerate(items):
for itemidx, item in enumerate(items):
# we split on spaces and newlines separately, so that we
# can tell which character we split on for reassembly
@ -113,7 +116,7 @@ def split_args(args):
tokens = item.strip().split(' ')
line_continuation = False
for idx,token in enumerate(tokens):
for idx, token in enumerate(tokens):
# if we hit a line continuation character, but
# we're not inside quotes, ignore it and continue
@ -201,12 +204,13 @@ def split_args(args):
return params
def is_quoted(data):
return len(data) > 0 and (data[0] == '"' and data[-1] == '"' or data[0] == "'" and data[-1] == "'")
def unquote(data):
''' removes first and last quotes from a string, if the string starts and ends with the same quotes '''
if is_quoted(data):
return data[1:-1]
return data

View file

@ -105,20 +105,20 @@ def uldap():
def construct():
try:
secret_file = open('/etc/ldap.secret', 'r')
bind_dn = 'cn=admin,{}'.format(base_dn())
bind_dn = 'cn=admin,{}'.format(base_dn())
except IOError: # pragma: no cover
secret_file = open('/etc/machine.secret', 'r')
bind_dn = config_registry()["ldap/hostdn"]
pwd_line = secret_file.readline()
pwd = re.sub('\n', '', pwd_line)
bind_dn = config_registry()["ldap/hostdn"]
pwd_line = secret_file.readline()
pwd = re.sub('\n', '', pwd_line)
import univention.admin.uldap
return univention.admin.uldap.access(
host = config_registry()['ldap/master'],
base = base_dn(),
binddn = bind_dn,
bindpw = pwd,
start_tls = 1
host=config_registry()['ldap/master'],
base=base_dn(),
binddn=bind_dn,
bindpw=pwd,
start_tls=1,
)
return _singleton('uldap', construct)

View file

@ -206,17 +206,14 @@ except ImportError:
HAS_MATCH_HOSTNAME = False
if not HAS_MATCH_HOSTNAME:
###
### The following block of code is under the terms and conditions of the
### Python Software Foundation License
###
# The following block of code is under the terms and conditions of the
# Python Software Foundation License
"""The match_hostname() function from Python 3.4, essential when using SSL."""
class CertificateError(ValueError):
pass
def _dnsname_match(dn, hostname, max_wildcards=1):
"""Matching according to RFC 6125, section 6.4.3
@ -269,7 +266,6 @@ if not HAS_MATCH_HOSTNAME:
pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
return pat.match(hostname)
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
@ -299,20 +295,13 @@ if not HAS_MATCH_HOSTNAME:
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise CertificateError("hostname %r "
"doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames))))
raise CertificateError("hostname %r " "doesn't match either of %s" % (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise CertificateError("hostname %r "
"doesn't match %r"
% (hostname, dnsnames[0]))
raise CertificateError("hostname %r doesn't match %r" % (hostname, dnsnames[0]))
else:
raise CertificateError("no appropriate commonName or "
"subjectAltName fields were found")
raise CertificateError("no appropriate commonName or subjectAltName fields were found")
###
### End of Python Software Foundation Licensed code
###
# End of Python Software Foundation Licensed code
HAS_MATCH_HOSTNAME = True
@ -399,7 +388,7 @@ if hasattr(httplib, 'HTTPSConnection') and hasattr(urllib_request, 'HTTPSHandler
self.sock = self.context.wrap_socket(sock, server_hostname=server_hostname)
elif HAS_URLLIB3_SSL_WRAP_SOCKET:
self.sock = ssl_wrap_socket(sock, keyfile=self.key_file, cert_reqs=ssl.CERT_NONE, certfile=self.cert_file, ssl_version=PROTOCOL,
server_hostname=server_hostname)
server_hostname=server_hostname)
else:
self.sock = ssl.wrap_socket(sock, keyfile=self.key_file, certfile=self.cert_file, ssl_version=PROTOCOL)
@ -448,24 +437,24 @@ def generic_urlparse(parts):
if hasattr(parts, 'netloc'):
# urlparse is newer, just read the fields straight
# from the parts object
generic_parts['scheme'] = parts.scheme
generic_parts['netloc'] = parts.netloc
generic_parts['path'] = parts.path
generic_parts['params'] = parts.params
generic_parts['query'] = parts.query
generic_parts['scheme'] = parts.scheme
generic_parts['netloc'] = parts.netloc
generic_parts['path'] = parts.path
generic_parts['params'] = parts.params
generic_parts['query'] = parts.query
generic_parts['fragment'] = parts.fragment
generic_parts['username'] = parts.username
generic_parts['password'] = parts.password
generic_parts['hostname'] = parts.hostname
generic_parts['port'] = parts.port
generic_parts['port'] = parts.port
else:
# we have to use indexes, and then parse out
# the other parts not supported by indexing
generic_parts['scheme'] = parts[0]
generic_parts['netloc'] = parts[1]
generic_parts['path'] = parts[2]
generic_parts['params'] = parts[3]
generic_parts['query'] = parts[4]
generic_parts['scheme'] = parts[0]
generic_parts['netloc'] = parts[1]
generic_parts['path'] = parts[2]
generic_parts['params'] = parts[3]
generic_parts['query'] = parts[4]
generic_parts['fragment'] = parts[5]
# get the username, password, etc.
try:
@ -488,12 +477,12 @@ def generic_urlparse(parts):
generic_parts['username'] = username
generic_parts['password'] = password
generic_parts['hostname'] = hostname
generic_parts['port'] = port
generic_parts['port'] = port
except:
generic_parts['username'] = None
generic_parts['password'] = None
generic_parts['hostname'] = parts[1]
generic_parts['port'] = None
generic_parts['port'] = None
return generic_parts
@ -551,9 +540,8 @@ def RedirectHandlerFactory(follow_redirects=None, validate_certs=True):
if do_redirect:
# be conciliant with URIs containing a space
newurl = newurl.replace(' ', '%20')
newheaders = dict((k,v) for k,v in req.headers.items()
if k.lower() not in ("content-length", "content-type")
)
newheaders = dict((k, v) for k, v in req.headers.items()
if k.lower() not in ("content-length", "content-type"))
try:
# Python 2-3.3
origin_req_host = req.get_origin_req_host()
@ -561,9 +549,9 @@ def RedirectHandlerFactory(follow_redirects=None, validate_certs=True):
# Python 3.4+
origin_req_host = req.origin_req_host
return urllib_request.Request(newurl,
headers=newheaders,
origin_req_host=origin_req_host,
unverifiable=True)
headers=newheaders,
origin_req_host=origin_req_host,
unverifiable=True)
else:
raise urllib_error.HTTPError(req.get_full_url(), code, msg, hdrs, fp)
@ -660,7 +648,7 @@ class SSLValidationHandler(urllib_request.BaseHandler):
dir_contents = os.listdir(path)
for f in dir_contents:
full_path = os.path.join(path, f)
if os.path.isfile(full_path) and os.path.splitext(f)[1] in ('.crt','.pem'):
if os.path.isfile(full_path) and os.path.splitext(f)[1] in ('.crt', '.pem'):
try:
cert_file = open(full_path, 'rb')
cert = cert_file.read()
@ -738,7 +726,7 @@ class SSLValidationHandler(urllib_request.BaseHandler):
if proxy_parts.get('scheme') == 'http':
s.sendall(self.CONNECT_COMMAND % (self.hostname, self.port))
if proxy_parts.get('username'):
credentials = "%s:%s" % (proxy_parts.get('username',''), proxy_parts.get('password',''))
credentials = "%s:%s" % (proxy_parts.get('username', ''), proxy_parts.get('password', ''))
s.sendall(b('Proxy-Authorization: Basic %s\r\n') % base64.b64encode(to_bytes(credentials, errors='surrogate_or_strict')).strip())
s.sendall(b('\r\n'))
connect_result = b("")
@ -767,7 +755,7 @@ class SSLValidationHandler(urllib_request.BaseHandler):
ssl_s = ssl.wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED, ssl_version=PROTOCOL)
match_hostname(ssl_s.getpeercert(), self.hostname)
# close the ssl connection
#ssl_s.unwrap()
# ssl_s.unwrap()
s.close()
except (ssl.SSLError, CertificateError):
e = get_exception()
@ -923,7 +911,7 @@ def open_url(url, data=None, headers=None, method=None, use_proxy=True,
data = to_bytes(data, nonstring='passthru')
if method:
if method.upper() not in ('OPTIONS','GET','HEAD','POST','PUT','DELETE','TRACE','CONNECT','PATCH'):
if method.upper() not in ('OPTIONS', 'GET', 'HEAD', 'POST', 'PUT', 'DELETE', 'TRACE', 'CONNECT', 'PATCH'):
raise ConnectionError('invalid HTTP request method; %s' % method.upper())
request = RequestWithMethod(url, method.upper(), data)
else:
@ -951,7 +939,7 @@ def open_url(url, data=None, headers=None, method=None, use_proxy=True,
request.add_header(header, headers[header])
urlopen_args = [request, None]
if sys.version_info >= (2,6,0):
if sys.version_info >= (2, 6, 0):
# urlopen in python prior to 2.6.0 did not
# have a timeout parameter
urlopen_args.append(timeout)

View file

@ -15,6 +15,7 @@
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
try:
from pyvcloud.vcloudair import VCA
HAS_PYVCLOUD = True
@ -29,16 +30,18 @@ LOGIN_HOST = {'vca': 'vca.vmware.com', 'vchs': 'vchs.vmware.com'}
DEFAULT_SERVICE_TYPE = 'vca'
DEFAULT_VERSION = '5.7'
class VcaError(Exception):
def __init__(self, msg, **kwargs):
self.kwargs = kwargs
super(VcaError, self).__init__(msg)
def vca_argument_spec():
return dict(
username=dict(type='str', aliases=['user'], required=True),
password=dict(type='str', aliases=['pass','passwd'], required=True, no_log=True),
password=dict(type='str', aliases=['pass', 'passwd'], required=True, no_log=True),
org=dict(),
service_id=dict(),
instance_id=dict(),
@ -50,6 +53,7 @@ def vca_argument_spec():
verify_certs=dict(type='bool', default=True)
)
class VcaAnsibleModule(AnsibleModule):
def __init__(self, *args, **kwargs):
@ -193,7 +197,6 @@ class VcaAnsibleModule(AnsibleModule):
self.exit_json(**kwargs)
# -------------------------------------------------------------
# 9/18/2015 @privateip
# All of the functions below here were migrated from the original
@ -206,6 +209,7 @@ VCA_REQ_ARGS = ['instance_id', 'vdc_name']
VCHS_REQ_ARGS = ['service_id']
VCD_REQ_ARGS = []
def _validate_module(module):
if not HAS_PYVCLOUD:
module.fail_json(msg="python module pyvcloud is needed for this module")
@ -216,19 +220,19 @@ def _validate_module(module):
for arg in VCA_REQ_ARGS:
if module.params.get(arg) is None:
module.fail_json(msg="argument %s is mandatory when service type "
"is vca" % arg)
"is vca" % arg)
if service_type == 'vchs':
for arg in VCHS_REQ_ARGS:
if module.params.get(arg) is None:
module.fail_json(msg="argument %s is mandatory when service type "
"is vchs" % arg)
"is vchs" % arg)
if service_type == 'vcd':
for arg in VCD_REQ_ARGS:
if module.params.get(arg) is None:
module.fail_json(msg="argument %s is mandatory when service type "
"is vcd" % arg)
"is vcd" % arg)
def serialize_instances(instance_list):
@ -237,6 +241,7 @@ def serialize_instances(instance_list):
instances.append(dict(apiUrl=i['apiUrl'], instance_id=i['id']))
return instances
def _vca_login(vca, password, instance):
if not vca.login(password=password):
raise VcaError("Login Failed: Please check username or password",
@ -245,10 +250,11 @@ def _vca_login(vca, password, instance):
if not vca.login_to_instance_sso(instance=instance):
s_json = serialize_instances(vca.instances)
raise VcaError("Login to Instance failed: Seems like instance_id provided "
"is wrong .. Please check", valid_instances=s_json)
"is wrong .. Please check", valid_instances=s_json)
return vca
def _vchs_login(vca, password, service, org):
if not vca.login(password=password):
raise VcaError("Login Failed: Please check username or password",
@ -256,7 +262,7 @@ def _vchs_login(vca, password, service, org):
if not vca.login_to_org(service, org):
raise VcaError("Failed to login to org, Please check the orgname",
error=vca.response.content)
error=vca.response.content)
def _vcd_login(vca, password, org):
@ -272,6 +278,7 @@ def _vcd_login(vca, password, org):
if not vca.login(token=vca.token, org=org, org_url=vca.vcloud_session.org_url):
raise VcaError("Failed to login to org", error=vca.response.content)
def vca_login(module):
service_type = module.params.get('service_type')
username = module.params.get('username')
@ -323,8 +330,3 @@ def vca_login(module):
module.fail_json(msg=e.message, **e.kwargs)
return vca