mirror of
https://github.com/ansible-collections/community.general.git
synced 2025-10-23 12:33:59 -07:00
use f-strings in module utils (#10901)
Some checks are pending
EOL CI / EOL Sanity (Ⓐ2.17) (push) Waiting to run
EOL CI / EOL Units (Ⓐ2.17+py3.10) (push) Waiting to run
EOL CI / EOL Units (Ⓐ2.17+py3.12) (push) Waiting to run
EOL CI / EOL Units (Ⓐ2.17+py3.7) (push) Waiting to run
EOL CI / EOL I (Ⓐ2.17+alpine319+py:azp/posix/1/) (push) Waiting to run
EOL CI / EOL I (Ⓐ2.17+alpine319+py:azp/posix/2/) (push) Waiting to run
EOL CI / EOL I (Ⓐ2.17+alpine319+py:azp/posix/3/) (push) Waiting to run
EOL CI / EOL I (Ⓐ2.17+fedora39+py:azp/posix/1/) (push) Waiting to run
EOL CI / EOL I (Ⓐ2.17+fedora39+py:azp/posix/2/) (push) Waiting to run
EOL CI / EOL I (Ⓐ2.17+fedora39+py:azp/posix/3/) (push) Waiting to run
EOL CI / EOL I (Ⓐ2.17+ubuntu2004+py:azp/posix/1/) (push) Waiting to run
EOL CI / EOL I (Ⓐ2.17+ubuntu2004+py:azp/posix/2/) (push) Waiting to run
EOL CI / EOL I (Ⓐ2.17+ubuntu2004+py:azp/posix/3/) (push) Waiting to run
nox / Run extra sanity tests (push) Waiting to run
Some checks are pending
EOL CI / EOL Sanity (Ⓐ2.17) (push) Waiting to run
EOL CI / EOL Units (Ⓐ2.17+py3.10) (push) Waiting to run
EOL CI / EOL Units (Ⓐ2.17+py3.12) (push) Waiting to run
EOL CI / EOL Units (Ⓐ2.17+py3.7) (push) Waiting to run
EOL CI / EOL I (Ⓐ2.17+alpine319+py:azp/posix/1/) (push) Waiting to run
EOL CI / EOL I (Ⓐ2.17+alpine319+py:azp/posix/2/) (push) Waiting to run
EOL CI / EOL I (Ⓐ2.17+alpine319+py:azp/posix/3/) (push) Waiting to run
EOL CI / EOL I (Ⓐ2.17+fedora39+py:azp/posix/1/) (push) Waiting to run
EOL CI / EOL I (Ⓐ2.17+fedora39+py:azp/posix/2/) (push) Waiting to run
EOL CI / EOL I (Ⓐ2.17+fedora39+py:azp/posix/3/) (push) Waiting to run
EOL CI / EOL I (Ⓐ2.17+ubuntu2004+py:azp/posix/1/) (push) Waiting to run
EOL CI / EOL I (Ⓐ2.17+ubuntu2004+py:azp/posix/2/) (push) Waiting to run
EOL CI / EOL I (Ⓐ2.17+ubuntu2004+py:azp/posix/3/) (push) Waiting to run
nox / Run extra sanity tests (push) Waiting to run
* use f-strings in module utils * Apply suggestions from code review Co-authored-by: Felix Fontein <felix@fontein.de> * remove unused imports --------- Co-authored-by: Felix Fontein <felix@fontein.de>
This commit is contained in:
parent
74b6a0294a
commit
b85e263466
51 changed files with 270 additions and 382 deletions
|
@ -56,7 +56,7 @@ class FileLock:
|
||||||
Default is None, wait indefinitely until lock is released.
|
Default is None, wait indefinitely until lock is released.
|
||||||
:returns: True
|
:returns: True
|
||||||
'''
|
'''
|
||||||
lock_path = os.path.join(tmpdir, 'ansible-{0}.lock'.format(os.path.basename(path)))
|
lock_path = os.path.join(tmpdir, f'ansible-{os.path.basename(path)}.lock')
|
||||||
l_wait = 0.1
|
l_wait = 0.1
|
||||||
r_exception = IOError
|
r_exception = IOError
|
||||||
if sys.version_info[0] == 3:
|
if sys.version_info[0] == 3:
|
||||||
|
@ -82,7 +82,7 @@ class FileLock:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
self.lockfd.close()
|
self.lockfd.close()
|
||||||
raise LockTimeout('{0} sec'.format(lock_timeout))
|
raise LockTimeout(f'{lock_timeout} sec')
|
||||||
|
|
||||||
fcntl.flock(self.lockfd, fcntl.LOCK_EX)
|
fcntl.flock(self.lockfd, fcntl.LOCK_EX)
|
||||||
os.chmod(lock_path, stat.S_IWRITE | stat.S_IREAD)
|
os.chmod(lock_path, stat.S_IWRITE | stat.S_IREAD)
|
||||||
|
|
|
@ -68,7 +68,7 @@ class StormConfig(SSHConfig):
|
||||||
while (i < len(line)) and not line[i].isspace():
|
while (i < len(line)) and not line[i].isspace():
|
||||||
i += 1
|
i += 1
|
||||||
if i == len(line):
|
if i == len(line):
|
||||||
raise Exception('Unparsable line: %r' % line)
|
raise Exception(f'Unparsable line: {line!r}')
|
||||||
key = line[:i].lower()
|
key = line[:i].lower()
|
||||||
value = line[i:].lstrip()
|
value = line[i:].lstrip()
|
||||||
if key == 'host':
|
if key == 'host':
|
||||||
|
@ -185,7 +185,7 @@ class ConfigParser(object):
|
||||||
if isinstance(value, int):
|
if isinstance(value, int):
|
||||||
value = str(value)
|
value = str(value)
|
||||||
|
|
||||||
searchable_information += " " + value
|
searchable_information += f" {value}"
|
||||||
|
|
||||||
if search_string in searchable_information:
|
if search_string in searchable_information:
|
||||||
results.append(host_entry)
|
results.append(host_entry)
|
||||||
|
@ -218,21 +218,17 @@ class ConfigParser(object):
|
||||||
|
|
||||||
for host_item in self.config_data:
|
for host_item in self.config_data:
|
||||||
if host_item.get("type") in ['comment', 'empty_line']:
|
if host_item.get("type") in ['comment', 'empty_line']:
|
||||||
file_content += host_item.get("value") + "\n"
|
file_content += f"{host_item.get('value')}\n"
|
||||||
continue
|
continue
|
||||||
host_item_content = "Host {0}\n".format(host_item.get("host"))
|
host_item_content = f"Host {host_item.get('host')}\n"
|
||||||
for key, value in host_item.get("options").items():
|
for key, value in host_item.get("options").items():
|
||||||
if isinstance(value, list):
|
if isinstance(value, list):
|
||||||
sub_content = ""
|
sub_content = ""
|
||||||
for value_ in value:
|
for value_ in value:
|
||||||
sub_content += " {0} {1}\n".format(
|
sub_content += f" {key} {value_}\n"
|
||||||
key, value_
|
|
||||||
)
|
|
||||||
host_item_content += sub_content
|
host_item_content += sub_content
|
||||||
else:
|
else:
|
||||||
host_item_content += " {0} {1}\n".format(
|
host_item_content += f" {key} {value}\n"
|
||||||
key, value
|
|
||||||
)
|
|
||||||
file_content += host_item_content
|
file_content += host_item_content
|
||||||
|
|
||||||
return file_content
|
return file_content
|
||||||
|
|
|
@ -88,10 +88,10 @@ def connect_to_acs(acs_module, region, **params):
|
||||||
if not conn:
|
if not conn:
|
||||||
if region not in [acs_module_region.id for acs_module_region in acs_module.regions()]:
|
if region not in [acs_module_region.id for acs_module_region in acs_module.regions()]:
|
||||||
raise AnsibleACSError(
|
raise AnsibleACSError(
|
||||||
"Region %s does not seem to be available for acs module %s." % (region, acs_module.__name__))
|
f"Region {region} does not seem to be available for acs module {acs_module.__name__}.")
|
||||||
else:
|
else:
|
||||||
raise AnsibleACSError(
|
raise AnsibleACSError(
|
||||||
"Unknown problem connecting to region %s for acs module %s." % (region, acs_module.__name__))
|
f"Unknown problem connecting to region {region} for acs module {acs_module.__name__}.")
|
||||||
return conn
|
return conn
|
||||||
|
|
||||||
|
|
||||||
|
@ -125,7 +125,7 @@ def get_assume_role(params):
|
||||||
|
|
||||||
def get_profile(params):
|
def get_profile(params):
|
||||||
if not params['alicloud_access_key'] and not params['ecs_role_name'] and params['profile']:
|
if not params['alicloud_access_key'] and not params['ecs_role_name'] and params['profile']:
|
||||||
path = params['shared_credentials_file'] if params['shared_credentials_file'] else os.getenv('HOME') + '/.aliyun/config.json'
|
path = params['shared_credentials_file'] if params['shared_credentials_file'] else f"{os.getenv('HOME')}/.aliyun/config.json"
|
||||||
auth = {}
|
auth = {}
|
||||||
with open(path, 'r') as f:
|
with open(path, 'r') as f:
|
||||||
for pro in json.load(f)['profiles']:
|
for pro in json.load(f)['profiles']:
|
||||||
|
|
|
@ -26,7 +26,7 @@ __channel_map = {
|
||||||
|
|
||||||
def __map_channel(channel_name):
|
def __map_channel(channel_name):
|
||||||
if channel_name not in __channel_map:
|
if channel_name not in __channel_map:
|
||||||
raise ValueError("Unknown channel name '%s'" % channel_name)
|
raise ValueError(f"Unknown channel name '{channel_name}'")
|
||||||
return __channel_map[channel_name]
|
return __channel_map[channel_name]
|
||||||
|
|
||||||
|
|
||||||
|
@ -41,7 +41,7 @@ def sdkmanager_runner(module, **kwargs):
|
||||||
list=cmd_runner_fmt.as_fixed('--list'),
|
list=cmd_runner_fmt.as_fixed('--list'),
|
||||||
newer=cmd_runner_fmt.as_fixed("--newer"),
|
newer=cmd_runner_fmt.as_fixed("--newer"),
|
||||||
sdk_root=cmd_runner_fmt.as_opt_eq_val("--sdk_root"),
|
sdk_root=cmd_runner_fmt.as_opt_eq_val("--sdk_root"),
|
||||||
channel=cmd_runner_fmt.as_func(lambda x: ["{0}={1}".format("--channel", __map_channel(x))])
|
channel=cmd_runner_fmt.as_func(lambda x: [f"--channel={__map_channel(x)}"])
|
||||||
),
|
),
|
||||||
force_lang="C.UTF-8", # Without this, sdkmanager binary crashes
|
force_lang="C.UTF-8", # Without this, sdkmanager binary crashes
|
||||||
**kwargs
|
**kwargs
|
||||||
|
@ -126,7 +126,7 @@ class AndroidSdkManager(object):
|
||||||
unknown_package_regex = self._RE_UNKNOWN_PACKAGE.match(line)
|
unknown_package_regex = self._RE_UNKNOWN_PACKAGE.match(line)
|
||||||
if unknown_package_regex:
|
if unknown_package_regex:
|
||||||
package = unknown_package_regex.group('package')
|
package = unknown_package_regex.group('package')
|
||||||
raise SdkManagerException("Unknown package %s" % package)
|
raise SdkManagerException(f"Unknown package {package}")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _parse_packages(stdout, header_regexp, row_regexp):
|
def _parse_packages(stdout, header_regexp, row_regexp):
|
||||||
|
|
|
@ -15,7 +15,7 @@ def normalize_subvolume_path(path):
|
||||||
In addition, if the path is prefixed with a leading <FS_TREE>, this value is removed.
|
In addition, if the path is prefixed with a leading <FS_TREE>, this value is removed.
|
||||||
"""
|
"""
|
||||||
fstree_stripped = re.sub(r'^<FS_TREE>', '', path)
|
fstree_stripped = re.sub(r'^<FS_TREE>', '', path)
|
||||||
result = re.sub(r'/+$', '', re.sub(r'/+', '/', '/' + fstree_stripped))
|
result = re.sub(r'/+$', '', re.sub(r'/+', '/', f"/{fstree_stripped}"))
|
||||||
return result if len(result) > 0 else '/'
|
return result if len(result) > 0 else '/'
|
||||||
|
|
||||||
|
|
||||||
|
@ -34,7 +34,7 @@ class BtrfsCommands(object):
|
||||||
self.__btrfs = self.__module.get_bin_path("btrfs", required=True)
|
self.__btrfs = self.__module.get_bin_path("btrfs", required=True)
|
||||||
|
|
||||||
def filesystem_show(self):
|
def filesystem_show(self):
|
||||||
command = "%s filesystem show -d" % (self.__btrfs)
|
command = f"{self.__btrfs} filesystem show -d"
|
||||||
result = self.__module.run_command(command, check_rc=True)
|
result = self.__module.run_command(command, check_rc=True)
|
||||||
stdout = [x.strip() for x in result[1].splitlines()]
|
stdout = [x.strip() for x in result[1].splitlines()]
|
||||||
filesystems = []
|
filesystems = []
|
||||||
|
@ -64,7 +64,7 @@ class BtrfsCommands(object):
|
||||||
return re.sub(r'^.*path\s', '', line)
|
return re.sub(r'^.*path\s', '', line)
|
||||||
|
|
||||||
def subvolumes_list(self, filesystem_path):
|
def subvolumes_list(self, filesystem_path):
|
||||||
command = "%s subvolume list -tap %s" % (self.__btrfs, filesystem_path)
|
command = f"{self.__btrfs} subvolume list -tap {filesystem_path}"
|
||||||
result = self.__module.run_command(command, check_rc=True)
|
result = self.__module.run_command(command, check_rc=True)
|
||||||
stdout = [x.split('\t') for x in result[1].splitlines()]
|
stdout = [x.split('\t') for x in result[1].splitlines()]
|
||||||
subvolumes = [{'id': 5, 'parent': None, 'path': '/'}]
|
subvolumes = [{'id': 5, 'parent': None, 'path': '/'}]
|
||||||
|
@ -143,7 +143,7 @@ class BtrfsInfoProvider(object):
|
||||||
return [m for m in mountpoints if (m['device'] in devices)]
|
return [m for m in mountpoints if (m['device'] in devices)]
|
||||||
|
|
||||||
def __find_mountpoints(self):
|
def __find_mountpoints(self):
|
||||||
command = "%s -t btrfs -nvP" % self.__findmnt_path
|
command = f"{self.__findmnt_path} -t btrfs -nvP"
|
||||||
result = self.__module.run_command(command)
|
result = self.__module.run_command(command)
|
||||||
mountpoints = []
|
mountpoints = []
|
||||||
if result[0] == 0:
|
if result[0] == 0:
|
||||||
|
@ -165,13 +165,13 @@ class BtrfsInfoProvider(object):
|
||||||
'subvolid': self.__extract_mount_subvolid(groups['options']),
|
'subvolid': self.__extract_mount_subvolid(groups['options']),
|
||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
raise BtrfsModuleException("Failed to parse findmnt result for line: '%s'" % line)
|
raise BtrfsModuleException(f"Failed to parse findmnt result for line: '{line}'")
|
||||||
|
|
||||||
def __extract_mount_subvolid(self, mount_options):
|
def __extract_mount_subvolid(self, mount_options):
|
||||||
for option in mount_options.split(','):
|
for option in mount_options.split(','):
|
||||||
if option.startswith('subvolid='):
|
if option.startswith('subvolid='):
|
||||||
return int(option[len('subvolid='):])
|
return int(option[len('subvolid='):])
|
||||||
raise BtrfsModuleException("Failed to find subvolid for mountpoint in options '%s'" % mount_options)
|
raise BtrfsModuleException(f"Failed to find subvolid for mountpoint in options '{mount_options}'")
|
||||||
|
|
||||||
|
|
||||||
class BtrfsSubvolume(object):
|
class BtrfsSubvolume(object):
|
||||||
|
@ -222,7 +222,7 @@ class BtrfsSubvolume(object):
|
||||||
relative = absolute_child_path[len(path):]
|
relative = absolute_child_path[len(path):]
|
||||||
return re.sub(r'^/*', '', relative)
|
return re.sub(r'^/*', '', relative)
|
||||||
else:
|
else:
|
||||||
raise BtrfsModuleException("Path '%s' doesn't start with '%s'" % (absolute_child_path, path))
|
raise BtrfsModuleException(f"Path '{absolute_child_path}' doesn't start with '{path}'")
|
||||||
|
|
||||||
def get_parent_subvolume(self):
|
def get_parent_subvolume(self):
|
||||||
parent_id = self.parent
|
parent_id = self.parent
|
||||||
|
@ -373,7 +373,7 @@ class BtrfsFilesystem(object):
|
||||||
if nearest.path == subvolume_name:
|
if nearest.path == subvolume_name:
|
||||||
nearest = nearest.get_parent_subvolume()
|
nearest = nearest.get_parent_subvolume()
|
||||||
if nearest is None or nearest.get_mounted_path() is None:
|
if nearest is None or nearest.get_mounted_path() is None:
|
||||||
raise BtrfsModuleException("Failed to find a path '%s' through a mounted parent subvolume" % subvolume_name)
|
raise BtrfsModuleException(f"Failed to find a path '{subvolume_name}' through a mounted parent subvolume")
|
||||||
else:
|
else:
|
||||||
return nearest.get_mounted_path() + os.path.sep + nearest.get_child_relative_path(subvolume_name)
|
return nearest.get_mounted_path() + os.path.sep + nearest.get_child_relative_path(subvolume_name)
|
||||||
|
|
||||||
|
@ -431,12 +431,9 @@ class BtrfsFilesystemsProvider(object):
|
||||||
if len(matching) == 1:
|
if len(matching) == 1:
|
||||||
return matching[0]
|
return matching[0]
|
||||||
else:
|
else:
|
||||||
raise BtrfsModuleException("Found %d filesystems matching criteria uuid=%s label=%s device=%s" % (
|
raise BtrfsModuleException(
|
||||||
len(matching),
|
f"Found {len(matching)} filesystems matching criteria uuid={criteria['uuid']} label={criteria['label']} device={criteria['device']}"
|
||||||
criteria['uuid'],
|
)
|
||||||
criteria['label'],
|
|
||||||
criteria['device']
|
|
||||||
))
|
|
||||||
|
|
||||||
def __filesystem_matches_criteria(self, filesystem, criteria):
|
def __filesystem_matches_criteria(self, filesystem, criteria):
|
||||||
return ((criteria['uuid'] is None or filesystem.uuid == criteria['uuid']) and
|
return ((criteria['uuid'] is None or filesystem.uuid == criteria['uuid']) and
|
||||||
|
|
|
@ -132,7 +132,7 @@ class CloudRetry(object):
|
||||||
if isinstance(e, cls.base_class): # pylint: disable=isinstance-second-argument-not-valid-type
|
if isinstance(e, cls.base_class): # pylint: disable=isinstance-second-argument-not-valid-type
|
||||||
response_code = cls.status_code_from_exception(e)
|
response_code = cls.status_code_from_exception(e)
|
||||||
if cls.found(response_code, catch_extra_error_codes):
|
if cls.found(response_code, catch_extra_error_codes):
|
||||||
msg = "{0}: Retrying in {1} seconds...".format(str(e), delay)
|
msg = f"{e}: Retrying in {delay} seconds..."
|
||||||
syslog.syslog(syslog.LOG_INFO, msg)
|
syslog.syslog(syslog.LOG_INFO, msg)
|
||||||
time.sleep(delay)
|
time.sleep(delay)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -30,18 +30,10 @@ class MissingArgumentFormat(CmdRunnerException):
|
||||||
self.args_formats = args_formats
|
self.args_formats = args_formats
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "MissingArgumentFormat({0!r}, {1!r}, {2!r})".format(
|
return f"MissingArgumentFormat({self.arg!r}, {self.args_order!r}, {self.args_formats!r})"
|
||||||
self.arg,
|
|
||||||
self.args_order,
|
|
||||||
self.args_formats,
|
|
||||||
)
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "Cannot find format for parameter {0} {1} in: {2}".format(
|
return f"Cannot find format for parameter {self.arg} {self.args_order} in: {self.args_formats}"
|
||||||
self.arg,
|
|
||||||
self.args_order,
|
|
||||||
self.args_formats,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class MissingArgumentValue(CmdRunnerException):
|
class MissingArgumentValue(CmdRunnerException):
|
||||||
|
@ -50,16 +42,10 @@ class MissingArgumentValue(CmdRunnerException):
|
||||||
self.arg = arg
|
self.arg = arg
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "MissingArgumentValue({0!r}, {1!r})".format(
|
return f"MissingArgumentValue({self.args_order!r}, {self.arg!r})"
|
||||||
self.args_order,
|
|
||||||
self.arg,
|
|
||||||
)
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "Cannot find value for parameter {0} in {1}".format(
|
return f"Cannot find value for parameter {self.arg} in {self.args_order}"
|
||||||
self.arg,
|
|
||||||
self.args_order,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class FormatError(CmdRunnerException):
|
class FormatError(CmdRunnerException):
|
||||||
|
@ -71,19 +57,10 @@ class FormatError(CmdRunnerException):
|
||||||
super(FormatError, self).__init__()
|
super(FormatError, self).__init__()
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "FormatError({0!r}, {1!r}, {2!r}, {3!r})".format(
|
return f"FormatError({self.name!r}, {self.value!r}, {self.args_formats!r}, {self.exc!r})"
|
||||||
self.name,
|
|
||||||
self.value,
|
|
||||||
self.args_formats,
|
|
||||||
self.exc,
|
|
||||||
)
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "Failed to format parameter {0} with value {1}: {2}".format(
|
return f"Failed to format parameter {self.name} with value {self.value}: {self.exc}"
|
||||||
self.name,
|
|
||||||
self.value,
|
|
||||||
self.exc,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class CmdRunner(object):
|
class CmdRunner(object):
|
||||||
|
|
|
@ -27,11 +27,7 @@ class _ArgFormat(object):
|
||||||
return [str(x) for x in f(value)]
|
return [str(x) for x in f(value)]
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "<ArgFormat: func={0}, ignore_none={1}, ignore_missing_value={2}>".format(
|
return f"<ArgFormat: func={self.func}, ignore_none={self.ignore_none}, ignore_missing_value={self.ignore_missing_value}>"
|
||||||
self.func,
|
|
||||||
self.ignore_none,
|
|
||||||
self.ignore_missing_value,
|
|
||||||
)
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return str(self)
|
return str(self)
|
||||||
|
@ -51,7 +47,7 @@ def as_bool_not(args):
|
||||||
|
|
||||||
|
|
||||||
def as_optval(arg, ignore_none=None):
|
def as_optval(arg, ignore_none=None):
|
||||||
return _ArgFormat(lambda value: ["{0}{1}".format(arg, value)], ignore_none=ignore_none)
|
return _ArgFormat(lambda value: [f"{arg}{value}"], ignore_none=ignore_none)
|
||||||
|
|
||||||
|
|
||||||
def as_opt_val(arg, ignore_none=None):
|
def as_opt_val(arg, ignore_none=None):
|
||||||
|
@ -59,16 +55,16 @@ def as_opt_val(arg, ignore_none=None):
|
||||||
|
|
||||||
|
|
||||||
def as_opt_eq_val(arg, ignore_none=None):
|
def as_opt_eq_val(arg, ignore_none=None):
|
||||||
return _ArgFormat(lambda value: ["{0}={1}".format(arg, value)], ignore_none=ignore_none)
|
return _ArgFormat(lambda value: [f"{arg}={value}"], ignore_none=ignore_none)
|
||||||
|
|
||||||
|
|
||||||
def as_list(ignore_none=None, min_len=0, max_len=None):
|
def as_list(ignore_none=None, min_len=0, max_len=None):
|
||||||
def func(value):
|
def func(value):
|
||||||
value = _ensure_list(value)
|
value = _ensure_list(value)
|
||||||
if len(value) < min_len:
|
if len(value) < min_len:
|
||||||
raise ValueError("Parameter must have at least {0} element(s)".format(min_len))
|
raise ValueError(f"Parameter must have at least {min_len} element(s)")
|
||||||
if max_len is not None and len(value) > max_len:
|
if max_len is not None and len(value) > max_len:
|
||||||
raise ValueError("Parameter must have at most {0} element(s)".format(max_len))
|
raise ValueError(f"Parameter must have at most {max_len} element(s)")
|
||||||
return value
|
return value
|
||||||
return _ArgFormat(func, ignore_none=ignore_none)
|
return _ArgFormat(func, ignore_none=ignore_none)
|
||||||
|
|
||||||
|
|
|
@ -16,11 +16,7 @@ from ansible.module_utils.urls import open_url
|
||||||
|
|
||||||
|
|
||||||
def get_consul_url(configuration):
|
def get_consul_url(configuration):
|
||||||
return "%s://%s:%s/v1" % (
|
return f"{configuration.scheme}://{configuration.host}:{configuration.port}/v1"
|
||||||
configuration.scheme,
|
|
||||||
configuration.host,
|
|
||||||
configuration.port,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_auth_headers(configuration):
|
def get_auth_headers(configuration):
|
||||||
|
@ -39,12 +35,12 @@ class RequestError(Exception):
|
||||||
if self.response_data is None:
|
if self.response_data is None:
|
||||||
# self.status is already the message (backwards compat)
|
# self.status is already the message (backwards compat)
|
||||||
return self.status
|
return self.status
|
||||||
return "HTTP %d: %s" % (self.status, self.response_data)
|
return f"HTTP {self.status}: {self.response_data}"
|
||||||
|
|
||||||
|
|
||||||
def handle_consul_response_error(response):
|
def handle_consul_response_error(response):
|
||||||
if 400 <= response.status_code < 600:
|
if 400 <= response.status_code < 600:
|
||||||
raise RequestError("%d %s" % (response.status_code, response.content))
|
raise RequestError(f"{response.status_code} {response.content}")
|
||||||
|
|
||||||
|
|
||||||
AUTH_ARGUMENTS_SPEC = dict(
|
AUTH_ARGUMENTS_SPEC = dict(
|
||||||
|
@ -82,7 +78,7 @@ def validate_check(check):
|
||||||
def validate_duration(duration):
|
def validate_duration(duration):
|
||||||
if duration:
|
if duration:
|
||||||
if not re.search(r"\d+(?:ns|us|ms|s|m|h)", duration):
|
if not re.search(r"\d+(?:ns|us|ms|s|m|h)", duration):
|
||||||
duration = "{0}s".format(duration)
|
duration = f"{duration}s"
|
||||||
return duration
|
return duration
|
||||||
|
|
||||||
|
|
||||||
|
@ -246,7 +242,7 @@ class _ConsulModule:
|
||||||
if operation == OPERATION_CREATE:
|
if operation == OPERATION_CREATE:
|
||||||
return self.api_endpoint
|
return self.api_endpoint
|
||||||
elif identifier:
|
elif identifier:
|
||||||
return "/".join([self.api_endpoint, identifier])
|
return f"{self.api_endpoint}/{identifier}"
|
||||||
raise RuntimeError("invalid arguments passed")
|
raise RuntimeError("invalid arguments passed")
|
||||||
|
|
||||||
def read_object(self):
|
def read_object(self):
|
||||||
|
@ -299,11 +295,7 @@ class _ConsulModule:
|
||||||
params = {k: v for k, v in params.items() if v is not None}
|
params = {k: v for k, v in params.items() if v is not None}
|
||||||
|
|
||||||
ca_path = module_params.get("ca_path")
|
ca_path = module_params.get("ca_path")
|
||||||
base_url = "%s://%s:%s/v1" % (
|
base_url = f"{module_params['scheme']}://{module_params['host']}:{module_params['port']}/v1"
|
||||||
module_params["scheme"],
|
|
||||||
module_params["host"],
|
|
||||||
module_params["port"],
|
|
||||||
)
|
|
||||||
url = "/".join([base_url] + list(url_parts))
|
url = "/".join([base_url] + list(url_parts))
|
||||||
|
|
||||||
headers = {}
|
headers = {}
|
||||||
|
@ -316,7 +308,7 @@ class _ConsulModule:
|
||||||
data = json.dumps(data)
|
data = json.dumps(data)
|
||||||
headers["Content-Type"] = "application/json"
|
headers["Content-Type"] = "application/json"
|
||||||
if params:
|
if params:
|
||||||
url = "%s?%s" % (url, urlencode(params))
|
url = f"{url}?{urlencode(params)}"
|
||||||
response = open_url(
|
response = open_url(
|
||||||
url,
|
url,
|
||||||
method=method,
|
method=method,
|
||||||
|
@ -336,8 +328,7 @@ class _ConsulModule:
|
||||||
response_data = e.fp.read()
|
response_data = e.fp.read()
|
||||||
else:
|
else:
|
||||||
self._module.fail_json(
|
self._module.fail_json(
|
||||||
msg="Could not connect to consul agent at %s:%s, error was %s"
|
msg=f"Could not connect to consul agent at {module_params['host']}:{module_params['port']}, error was {e}"
|
||||||
% (module_params["host"], module_params["port"], str(e))
|
|
||||||
)
|
)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
|
@ -37,7 +37,7 @@ def initialize_dialect(dialect, **kwargs):
|
||||||
csv.register_dialect("unix", unix_dialect)
|
csv.register_dialect("unix", unix_dialect)
|
||||||
|
|
||||||
if dialect not in csv.list_dialects():
|
if dialect not in csv.list_dialects():
|
||||||
raise DialectNotAvailableError("Dialect '%s' is not supported by your version of python." % dialect)
|
raise DialectNotAvailableError(f"Dialect '{dialect}' is not supported by your version of python.")
|
||||||
|
|
||||||
# Create a dictionary from only set options
|
# Create a dictionary from only set options
|
||||||
dialect_params = {k: v for k, v in kwargs.items() if v is not None}
|
dialect_params = {k: v for k, v in kwargs.items() if v is not None}
|
||||||
|
@ -45,7 +45,7 @@ def initialize_dialect(dialect, **kwargs):
|
||||||
try:
|
try:
|
||||||
csv.register_dialect('custom', dialect, **dialect_params)
|
csv.register_dialect('custom', dialect, **dialect_params)
|
||||||
except TypeError as e:
|
except TypeError as e:
|
||||||
raise CustomDialectFailureError("Unable to create custom dialect: %s" % to_native(e))
|
raise CustomDialectFailureError(f"Unable to create custom dialect: {e}")
|
||||||
dialect = 'custom'
|
dialect = 'custom'
|
||||||
|
|
||||||
return dialect
|
return dialect
|
||||||
|
|
|
@ -102,19 +102,19 @@ def _identifier_parse(identifier, quote_char):
|
||||||
dot = identifier.index('.')
|
dot = identifier.index('.')
|
||||||
except ValueError:
|
except ValueError:
|
||||||
identifier = identifier.replace(quote_char, quote_char * 2)
|
identifier = identifier.replace(quote_char, quote_char * 2)
|
||||||
identifier = ''.join((quote_char, identifier, quote_char))
|
identifier = f"{quote_char}{identifier}{quote_char}"
|
||||||
further_identifiers = [identifier]
|
further_identifiers = [identifier]
|
||||||
else:
|
else:
|
||||||
if dot == 0 or dot >= len(identifier) - 1:
|
if dot == 0 or dot >= len(identifier) - 1:
|
||||||
identifier = identifier.replace(quote_char, quote_char * 2)
|
identifier = identifier.replace(quote_char, quote_char * 2)
|
||||||
identifier = ''.join((quote_char, identifier, quote_char))
|
identifier = f"{quote_char}{identifier}{quote_char}"
|
||||||
further_identifiers = [identifier]
|
further_identifiers = [identifier]
|
||||||
else:
|
else:
|
||||||
first_identifier = identifier[:dot]
|
first_identifier = identifier[:dot]
|
||||||
next_identifier = identifier[dot + 1:]
|
next_identifier = identifier[dot + 1:]
|
||||||
further_identifiers = _identifier_parse(next_identifier, quote_char)
|
further_identifiers = _identifier_parse(next_identifier, quote_char)
|
||||||
first_identifier = first_identifier.replace(quote_char, quote_char * 2)
|
first_identifier = first_identifier.replace(quote_char, quote_char * 2)
|
||||||
first_identifier = ''.join((quote_char, first_identifier, quote_char))
|
first_identifier = f"{quote_char}{first_identifier}{quote_char}"
|
||||||
further_identifiers.insert(0, first_identifier)
|
further_identifiers.insert(0, first_identifier)
|
||||||
|
|
||||||
return further_identifiers
|
return further_identifiers
|
||||||
|
@ -123,14 +123,14 @@ def _identifier_parse(identifier, quote_char):
|
||||||
def pg_quote_identifier(identifier, id_type):
|
def pg_quote_identifier(identifier, id_type):
|
||||||
identifier_fragments = _identifier_parse(identifier, quote_char='"')
|
identifier_fragments = _identifier_parse(identifier, quote_char='"')
|
||||||
if len(identifier_fragments) > _PG_IDENTIFIER_TO_DOT_LEVEL[id_type]:
|
if len(identifier_fragments) > _PG_IDENTIFIER_TO_DOT_LEVEL[id_type]:
|
||||||
raise SQLParseError('PostgreSQL does not support %s with more than %i dots' % (id_type, _PG_IDENTIFIER_TO_DOT_LEVEL[id_type]))
|
raise SQLParseError(f'PostgreSQL does not support {id_type} with more than {_PG_IDENTIFIER_TO_DOT_LEVEL[id_type]} dots')
|
||||||
return '.'.join(identifier_fragments)
|
return '.'.join(identifier_fragments)
|
||||||
|
|
||||||
|
|
||||||
def mysql_quote_identifier(identifier, id_type):
|
def mysql_quote_identifier(identifier, id_type):
|
||||||
identifier_fragments = _identifier_parse(identifier, quote_char='`')
|
identifier_fragments = _identifier_parse(identifier, quote_char='`')
|
||||||
if (len(identifier_fragments) - 1) > _MYSQL_IDENTIFIER_TO_DOT_LEVEL[id_type]:
|
if (len(identifier_fragments) - 1) > _MYSQL_IDENTIFIER_TO_DOT_LEVEL[id_type]:
|
||||||
raise SQLParseError('MySQL does not support %s with more than %i dots' % (id_type, _MYSQL_IDENTIFIER_TO_DOT_LEVEL[id_type]))
|
raise SQLParseError(f'MySQL does not support {id_type} with more than {_MYSQL_IDENTIFIER_TO_DOT_LEVEL[id_type]} dots')
|
||||||
|
|
||||||
special_cased_fragments = []
|
special_cased_fragments = []
|
||||||
for fragment in identifier_fragments:
|
for fragment in identifier_fragments:
|
||||||
|
@ -185,5 +185,4 @@ def check_input(module, *args):
|
||||||
dangerous_elements.append(elem)
|
dangerous_elements.append(elem)
|
||||||
|
|
||||||
if dangerous_elements:
|
if dangerous_elements:
|
||||||
module.fail_json(msg="Passed input '%s' is "
|
module.fail_json(msg=f"Passed input '{', '.join(dangerous_elements)}' is potentially dangerous")
|
||||||
"potentially dangerous" % ', '.join(dangerous_elements))
|
|
||||||
|
|
|
@ -53,7 +53,7 @@ class _Dependency(object):
|
||||||
module.fail_json(msg=self.message, exception=self.trace)
|
module.fail_json(msg=self.message, exception=self.trace)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "<dependency: {0} [{1}]>".format(self.name, self._states[self.state])
|
return f"<dependency: {self.name} [{self._states[self.state]}]>"
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
|
|
|
@ -73,7 +73,7 @@ class DimensionDataModule(object):
|
||||||
|
|
||||||
# Region and location are common to all Dimension Data modules.
|
# Region and location are common to all Dimension Data modules.
|
||||||
region = self.module.params['region']
|
region = self.module.params['region']
|
||||||
self.region = 'dd-{0}'.format(region)
|
self.region = f'dd-{region}'
|
||||||
self.location = self.module.params['location']
|
self.location = self.module.params['location']
|
||||||
|
|
||||||
libcloud.security.VERIFY_SSL_CERT = self.module.params['validate_certs']
|
libcloud.security.VERIFY_SSL_CERT = self.module.params['validate_certs']
|
||||||
|
@ -140,7 +140,7 @@ class DimensionDataModule(object):
|
||||||
if not user_id or not key:
|
if not user_id or not key:
|
||||||
home = expanduser('~')
|
home = expanduser('~')
|
||||||
config = configparser.RawConfigParser()
|
config = configparser.RawConfigParser()
|
||||||
config.read("%s/.dimensiondata" % home)
|
config.read(f"{home}/.dimensiondata")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
user_id = config.get("dimensiondatacloud", "MCP_USER")
|
user_id = config.get("dimensiondatacloud", "MCP_USER")
|
||||||
|
@ -190,7 +190,7 @@ class DimensionDataModule(object):
|
||||||
if network_domain:
|
if network_domain:
|
||||||
return network_domain
|
return network_domain
|
||||||
|
|
||||||
raise UnknownNetworkError("Network '%s' could not be found" % locator)
|
raise UnknownNetworkError(f"Network '{locator}' could not be found")
|
||||||
|
|
||||||
def get_vlan(self, locator, location, network_domain):
|
def get_vlan(self, locator, location, network_domain):
|
||||||
"""
|
"""
|
||||||
|
@ -212,7 +212,7 @@ class DimensionDataModule(object):
|
||||||
if vlan:
|
if vlan:
|
||||||
return vlan
|
return vlan
|
||||||
|
|
||||||
raise UnknownVLANError("VLAN '%s' could not be found" % locator)
|
raise UnknownVLANError(f"VLAN '{locator}' could not be found")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def argument_spec(**additional_argument_spec):
|
def argument_spec(**additional_argument_spec):
|
||||||
|
|
|
@ -6,7 +6,7 @@ from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from ansible.module_utils.common.text.converters import to_native, to_text
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
from ansible.module_utils.urls import fetch_url
|
from ansible.module_utils.urls import fetch_url
|
||||||
|
|
||||||
|
|
||||||
|
@ -42,7 +42,7 @@ class GandiLiveDNSAPI(object):
|
||||||
error = errors[0]
|
error = errors[0]
|
||||||
name = error.get('name')
|
name = error.get('name')
|
||||||
if name:
|
if name:
|
||||||
s += '{0} :'.format(name)
|
s += f'{name} :'
|
||||||
description = error.get('description')
|
description = error.get('description')
|
||||||
if description:
|
if description:
|
||||||
s += description
|
s += description
|
||||||
|
@ -50,9 +50,9 @@ class GandiLiveDNSAPI(object):
|
||||||
|
|
||||||
def _gandi_api_call(self, api_call, method='GET', payload=None, error_on_404=True):
|
def _gandi_api_call(self, api_call, method='GET', payload=None, error_on_404=True):
|
||||||
authorization_header = (
|
authorization_header = (
|
||||||
'Bearer {0}'.format(self.personal_access_token)
|
f'Bearer {self.personal_access_token}'
|
||||||
if self.personal_access_token
|
if self.personal_access_token
|
||||||
else 'Apikey {0}'.format(self.api_key)
|
else f'Apikey {self.api_key}'
|
||||||
)
|
)
|
||||||
headers = {'Authorization': authorization_header,
|
headers = {'Authorization': authorization_header,
|
||||||
'Content-Type': 'application/json'}
|
'Content-Type': 'application/json'}
|
||||||
|
@ -61,7 +61,7 @@ class GandiLiveDNSAPI(object):
|
||||||
try:
|
try:
|
||||||
data = json.dumps(payload)
|
data = json.dumps(payload)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.module.fail_json(msg="Failed to encode payload as JSON: %s " % to_native(e))
|
self.module.fail_json(msg=f"Failed to encode payload as JSON: {e} ")
|
||||||
|
|
||||||
resp, info = fetch_url(self.module,
|
resp, info = fetch_url(self.module,
|
||||||
self.api_endpoint + api_call,
|
self.api_endpoint + api_call,
|
||||||
|
@ -73,7 +73,7 @@ class GandiLiveDNSAPI(object):
|
||||||
if info['status'] >= 400 and (info['status'] != 404 or error_on_404):
|
if info['status'] >= 400 and (info['status'] != 404 or error_on_404):
|
||||||
err_s = self.error_strings.get(info['status'], '')
|
err_s = self.error_strings.get(info['status'], '')
|
||||||
|
|
||||||
error_msg = "API Error {0}: {1}".format(err_s, self._build_error_message(self.module, info))
|
error_msg = f"API Error {err_s}: {self._build_error_message(self.module, info)}"
|
||||||
|
|
||||||
result = None
|
result = None
|
||||||
try:
|
try:
|
||||||
|
@ -85,7 +85,7 @@ class GandiLiveDNSAPI(object):
|
||||||
try:
|
try:
|
||||||
result = json.loads(to_text(content, errors='surrogate_or_strict'))
|
result = json.loads(to_text(content, errors='surrogate_or_strict'))
|
||||||
except (getattr(json, 'JSONDecodeError', ValueError)) as e:
|
except (getattr(json, 'JSONDecodeError', ValueError)) as e:
|
||||||
error_msg += "; Failed to parse API response with error {0}: {1}".format(to_native(e), content)
|
error_msg += f"; Failed to parse API response with error {e}: {content}"
|
||||||
|
|
||||||
if error_msg:
|
if error_msg:
|
||||||
self.module.fail_json(msg=error_msg)
|
self.module.fail_json(msg=error_msg)
|
||||||
|
@ -114,11 +114,11 @@ class GandiLiveDNSAPI(object):
|
||||||
return [self.build_result(r, domain) for r in results]
|
return [self.build_result(r, domain) for r in results]
|
||||||
|
|
||||||
def get_records(self, record, type, domain):
|
def get_records(self, record, type, domain):
|
||||||
url = '/domains/%s/records' % (domain)
|
url = f'/domains/{domain}/records'
|
||||||
if record:
|
if record:
|
||||||
url += '/%s' % (record)
|
url += f'/{record}'
|
||||||
if type:
|
if type:
|
||||||
url += '/%s' % (type)
|
url += f'/{type}'
|
||||||
|
|
||||||
records, status = self._gandi_api_call(url, error_on_404=False)
|
records, status = self._gandi_api_call(url, error_on_404=False)
|
||||||
|
|
||||||
|
@ -137,7 +137,7 @@ class GandiLiveDNSAPI(object):
|
||||||
return records
|
return records
|
||||||
|
|
||||||
def create_record(self, record, type, values, ttl, domain):
|
def create_record(self, record, type, values, ttl, domain):
|
||||||
url = '/domains/%s/records' % (domain)
|
url = f'/domains/{domain}/records'
|
||||||
new_record = {
|
new_record = {
|
||||||
'rrset_name': record,
|
'rrset_name': record,
|
||||||
'rrset_type': type,
|
'rrset_type': type,
|
||||||
|
@ -152,7 +152,7 @@ class GandiLiveDNSAPI(object):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def update_record(self, record, type, values, ttl, domain):
|
def update_record(self, record, type, values, ttl, domain):
|
||||||
url = '/domains/%s/records/%s/%s' % (domain, record, type)
|
url = f'/domains/{domain}/records/{record}/{type}'
|
||||||
new_record = {
|
new_record = {
|
||||||
'rrset_values': values,
|
'rrset_values': values,
|
||||||
'rrset_ttl': ttl,
|
'rrset_ttl': ttl,
|
||||||
|
@ -161,7 +161,7 @@ class GandiLiveDNSAPI(object):
|
||||||
return record
|
return record
|
||||||
|
|
||||||
def delete_record(self, record, type, domain):
|
def delete_record(self, record, type, domain):
|
||||||
url = '/domains/%s/records/%s/%s' % (domain, record, type)
|
url = f'/domains/{domain}/records/{record}/{type}'
|
||||||
|
|
||||||
self._gandi_api_call(url, method='DELETE')
|
self._gandi_api_call(url, method='DELETE')
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,6 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from ansible.module_utils.basic import missing_required_lib
|
from ansible.module_utils.basic import missing_required_lib
|
||||||
from ansible.module_utils.common.text.converters import to_native
|
|
||||||
|
|
||||||
from ansible_collections.community.general.plugins.module_utils.version import LooseVersion
|
from ansible_collections.community.general.plugins.module_utils.version import LooseVersion
|
||||||
|
|
||||||
|
@ -62,7 +61,7 @@ def find_project(gitlab_instance, identifier):
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
current_user = gitlab_instance.user
|
current_user = gitlab_instance.user
|
||||||
try:
|
try:
|
||||||
project = gitlab_instance.projects.get(current_user.username + '/' + identifier)
|
project = gitlab_instance.projects.get(f"{current_user.username}/{identifier}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -86,11 +85,10 @@ def ensure_gitlab_package(module, min_version=None):
|
||||||
)
|
)
|
||||||
gitlab_version = gitlab.__version__
|
gitlab_version = gitlab.__version__
|
||||||
if min_version is not None and LooseVersion(gitlab_version) < LooseVersion(min_version):
|
if min_version is not None and LooseVersion(gitlab_version) < LooseVersion(min_version):
|
||||||
module.fail_json(
|
module.fail_json(msg=(
|
||||||
msg="This module requires python-gitlab Python module >= %s "
|
f"This module requires python-gitlab Python module >= {min_version} (installed version: "
|
||||||
"(installed version: %s). Please upgrade python-gitlab to version %s or above."
|
f"{gitlab_version}). Please upgrade python-gitlab to version {min_version} or above."
|
||||||
% (min_version, gitlab_version, min_version)
|
))
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def gitlab_authentication(module, min_version=None):
|
def gitlab_authentication(module, min_version=None):
|
||||||
|
@ -120,10 +118,12 @@ def gitlab_authentication(module, min_version=None):
|
||||||
oauth_token=gitlab_oauth_token, job_token=gitlab_job_token, api_version=4)
|
oauth_token=gitlab_oauth_token, job_token=gitlab_job_token, api_version=4)
|
||||||
gitlab_instance.auth()
|
gitlab_instance.auth()
|
||||||
except (gitlab.exceptions.GitlabAuthenticationError, gitlab.exceptions.GitlabGetError) as e:
|
except (gitlab.exceptions.GitlabAuthenticationError, gitlab.exceptions.GitlabGetError) as e:
|
||||||
module.fail_json(msg="Failed to connect to GitLab server: %s" % to_native(e))
|
module.fail_json(msg=f"Failed to connect to GitLab server: {e}")
|
||||||
except (gitlab.exceptions.GitlabHttpError) as e:
|
except (gitlab.exceptions.GitlabHttpError) as e:
|
||||||
module.fail_json(msg="Failed to connect to GitLab server: %s. \
|
module.fail_json(msg=(
|
||||||
GitLab remove Session API now that private tokens are removed from user API endpoints since version 10.2." % to_native(e))
|
f"Failed to connect to GitLab server: {e}. GitLab remove Session API now "
|
||||||
|
"that private tokens are removed from user API endpoints since version 10.2."
|
||||||
|
))
|
||||||
|
|
||||||
return gitlab_instance
|
return gitlab_instance
|
||||||
|
|
||||||
|
|
|
@ -12,32 +12,28 @@ import re
|
||||||
def _create_regex_group_complement(s):
|
def _create_regex_group_complement(s):
|
||||||
lines = (line.strip() for line in s.split("\n") if line.strip())
|
lines = (line.strip() for line in s.split("\n") if line.strip())
|
||||||
chars = filter(None, (line.split("#")[0].strip() for line in lines))
|
chars = filter(None, (line.split("#")[0].strip() for line in lines))
|
||||||
group = r"[^" + r"".join(chars) + r"]"
|
group = rf"[^{''.join(chars)}]"
|
||||||
return re.compile(group)
|
return re.compile(group)
|
||||||
|
|
||||||
|
|
||||||
class HomebrewValidate(object):
|
class HomebrewValidate(object):
|
||||||
# class regexes ------------------------------------------------ {{{
|
# class regexes ------------------------------------------------ {{{
|
||||||
VALID_PATH_CHARS = r"""
|
VALID_PATH_CHARS = rf"""
|
||||||
\w # alphanumeric characters (i.e., [a-zA-Z0-9_])
|
\w # alphanumeric characters (i.e., [a-zA-Z0-9_])
|
||||||
\s # spaces
|
\s # spaces
|
||||||
: # colons
|
: # colons
|
||||||
{sep} # the OS-specific path separator
|
{os.path.sep} # the OS-specific path separator
|
||||||
. # dots
|
. # dots
|
||||||
\- # dashes
|
\- # dashes
|
||||||
""".format(
|
"""
|
||||||
sep=os.path.sep
|
|
||||||
)
|
|
||||||
|
|
||||||
VALID_BREW_PATH_CHARS = r"""
|
VALID_BREW_PATH_CHARS = rf"""
|
||||||
\w # alphanumeric characters (i.e., [a-zA-Z0-9_])
|
\w # alphanumeric characters (i.e., [a-zA-Z0-9_])
|
||||||
\s # spaces
|
\s # spaces
|
||||||
{sep} # the OS-specific path separator
|
{os.path.sep} # the OS-specific path separator
|
||||||
. # dots
|
. # dots
|
||||||
\- # dashes
|
\- # dashes
|
||||||
""".format(
|
"""
|
||||||
sep=os.path.sep
|
|
||||||
)
|
|
||||||
|
|
||||||
VALID_PACKAGE_CHARS = r"""
|
VALID_PACKAGE_CHARS = r"""
|
||||||
\w # alphanumeric characters (i.e., [a-zA-Z0-9_])
|
\w # alphanumeric characters (i.e., [a-zA-Z0-9_])
|
||||||
|
@ -123,17 +119,17 @@ def parse_brew_path(module):
|
||||||
"""
|
"""
|
||||||
path = module.params["path"]
|
path = module.params["path"]
|
||||||
if not HomebrewValidate.valid_path(path):
|
if not HomebrewValidate.valid_path(path):
|
||||||
module.fail_json(msg="Invalid path: {0}".format(path))
|
module.fail_json(msg=f"Invalid path: {path}")
|
||||||
|
|
||||||
if isinstance(path, str):
|
if isinstance(path, str):
|
||||||
paths = path.split(":")
|
paths = path.split(":")
|
||||||
elif isinstance(path, list):
|
elif isinstance(path, list):
|
||||||
paths = path
|
paths = path
|
||||||
else:
|
else:
|
||||||
module.fail_json(msg="Invalid path: {0}".format(path))
|
module.fail_json(msg=f"Invalid path: {path}")
|
||||||
|
|
||||||
brew_path = module.get_bin_path("brew", required=True, opt_dirs=paths)
|
brew_path = module.get_bin_path("brew", required=True, opt_dirs=paths)
|
||||||
if not HomebrewValidate.valid_brew_path(brew_path):
|
if not HomebrewValidate.valid_brew_path(brew_path):
|
||||||
module.fail_json(msg="Invalid brew path: {0}".format(brew_path))
|
module.fail_json(msg=f"Invalid brew path: {brew_path}")
|
||||||
|
|
||||||
return brew_path
|
return brew_path
|
||||||
|
|
|
@ -30,7 +30,7 @@ class HwcModuleException(Exception):
|
||||||
self._message = message
|
self._message = message
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "[HwcClientException] message=%s" % self._message
|
return f"[HwcClientException] message={self._message}"
|
||||||
|
|
||||||
|
|
||||||
class HwcClientException(Exception):
|
class HwcClientException(Exception):
|
||||||
|
@ -41,9 +41,8 @@ class HwcClientException(Exception):
|
||||||
self._message = message
|
self._message = message
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
msg = " code=%s," % str(self._code) if self._code != 0 else ""
|
msg = f" code={self._code!s}," if self._code != 0 else ""
|
||||||
return "[HwcClientException]%s message=%s" % (
|
return f"[HwcClientException]{msg} message={self._message}"
|
||||||
msg, self._message)
|
|
||||||
|
|
||||||
|
|
||||||
class HwcClientException404(HwcClientException):
|
class HwcClientException404(HwcClientException):
|
||||||
|
@ -51,7 +50,7 @@ class HwcClientException404(HwcClientException):
|
||||||
super(HwcClientException404, self).__init__(404, message)
|
super(HwcClientException404, self).__init__(404, message)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "[HwcClientException404] message=%s" % self._message
|
return f"[HwcClientException404] message={self._message}"
|
||||||
|
|
||||||
|
|
||||||
def session_method_wrapper(f):
|
def session_method_wrapper(f):
|
||||||
|
@ -61,7 +60,7 @@ def session_method_wrapper(f):
|
||||||
r = f(self, url, *args, **kwargs)
|
r = f(self, url, *args, **kwargs)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
raise HwcClientException(
|
raise HwcClientException(
|
||||||
0, "Sending request failed, error=%s" % ex)
|
0, f"Sending request failed, error={ex}")
|
||||||
|
|
||||||
result = None
|
result = None
|
||||||
if r.content:
|
if r.content:
|
||||||
|
@ -69,7 +68,7 @@ def session_method_wrapper(f):
|
||||||
result = r.json()
|
result = r.json()
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
raise HwcClientException(
|
raise HwcClientException(
|
||||||
0, "Parsing response to json failed, error: %s" % ex)
|
0, f"Parsing response to json failed, error: {ex}")
|
||||||
|
|
||||||
code = r.status_code
|
code = r.status_code
|
||||||
if code not in [200, 201, 202, 203, 204, 205, 206, 207, 208, 226]:
|
if code not in [200, 201, 202, 203, 204, 205, 206, 207, 208, 226]:
|
||||||
|
@ -98,7 +97,7 @@ class _ServiceClient(object):
|
||||||
self._client = client
|
self._client = client
|
||||||
self._endpoint = endpoint
|
self._endpoint = endpoint
|
||||||
self._default_header = {
|
self._default_header = {
|
||||||
'User-Agent': "Huawei-Ansible-MM-%s" % product,
|
'User-Agent': f"Huawei-Ansible-MM-{product}",
|
||||||
'Accept': 'application/json',
|
'Accept': 'application/json',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -186,7 +185,7 @@ class Config(object):
|
||||||
raise_exc=False)
|
raise_exc=False)
|
||||||
|
|
||||||
def _get_service_endpoint(self, client, service_type, region):
|
def _get_service_endpoint(self, client, service_type, region):
|
||||||
k = "%s.%s" % (service_type, region if region else "")
|
k = f"{service_type}.{region if region else ''}"
|
||||||
|
|
||||||
if k in self._endpoints:
|
if k in self._endpoints:
|
||||||
return self._endpoints.get(k)
|
return self._endpoints.get(k)
|
||||||
|
@ -197,11 +196,11 @@ class Config(object):
|
||||||
region_name=region, interface="public")
|
region_name=region, interface="public")
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
raise HwcClientException(
|
raise HwcClientException(
|
||||||
0, "Getting endpoint failed, error=%s" % ex)
|
0, f"Getting endpoint failed, error={ex}")
|
||||||
|
|
||||||
if url == "":
|
if url == "":
|
||||||
raise HwcClientException(
|
raise HwcClientException(
|
||||||
0, "Cannot find the endpoint for %s" % service_type)
|
0, f"Cannot find the endpoint for {service_type}")
|
||||||
|
|
||||||
if url[-1] != "/":
|
if url[-1] != "/":
|
||||||
url += "/"
|
url += "/"
|
||||||
|
@ -340,7 +339,7 @@ def wait_to_finish(target, pending, refresh, timeout, min_interval=1, delay=3):
|
||||||
|
|
||||||
if not_found_times > 10:
|
if not_found_times > 10:
|
||||||
raise HwcModuleException(
|
raise HwcModuleException(
|
||||||
"not found the object for %d times" % not_found_times)
|
f"not found the object for {not_found_times} times")
|
||||||
else:
|
else:
|
||||||
not_found_times = 0
|
not_found_times = 0
|
||||||
|
|
||||||
|
@ -349,7 +348,7 @@ def wait_to_finish(target, pending, refresh, timeout, min_interval=1, delay=3):
|
||||||
|
|
||||||
if pending and status not in pending:
|
if pending and status not in pending:
|
||||||
raise HwcModuleException(
|
raise HwcModuleException(
|
||||||
"unexpected status(%s) occurred" % status)
|
f"unexpected status({status}) occurred")
|
||||||
|
|
||||||
if not is_last_time:
|
if not is_last_time:
|
||||||
wait *= 2
|
wait *= 2
|
||||||
|
@ -360,7 +359,7 @@ def wait_to_finish(target, pending, refresh, timeout, min_interval=1, delay=3):
|
||||||
|
|
||||||
time.sleep(wait)
|
time.sleep(wait)
|
||||||
|
|
||||||
raise HwcModuleException("async wait timeout after %d seconds" % timeout)
|
raise HwcModuleException(f"async wait timeout after {timeout} seconds")
|
||||||
|
|
||||||
|
|
||||||
def navigate_value(data, index, array_index=None):
|
def navigate_value(data, index, array_index=None):
|
||||||
|
@ -379,7 +378,7 @@ def navigate_value(data, index, array_index=None):
|
||||||
i = index[n]
|
i = index[n]
|
||||||
if i not in d:
|
if i not in d:
|
||||||
raise HwcModuleException(
|
raise HwcModuleException(
|
||||||
"navigate value failed: key(%s) is not exist in dict" % i)
|
f"navigate value failed: key({i}) is not exist in dict")
|
||||||
d = d[i]
|
d = d[i]
|
||||||
|
|
||||||
if not array_index:
|
if not array_index:
|
||||||
|
|
|
@ -67,7 +67,7 @@ def keycloak_clientsecret_module_resolve_params(module, kc):
|
||||||
|
|
||||||
if client is None:
|
if client is None:
|
||||||
module.fail_json(
|
module.fail_json(
|
||||||
msg='Client does not exist {client_id}'.format(client_id=client_id)
|
msg=f'Client does not exist {client_id}'
|
||||||
)
|
)
|
||||||
|
|
||||||
id = client['id']
|
id = client['id']
|
||||||
|
|
|
@ -20,8 +20,7 @@ class iLORedfishUtils(RedfishUtils):
|
||||||
properties = ['Description', 'Id', 'Name', 'UserName']
|
properties = ['Description', 'Id', 'Name', 'UserName']
|
||||||
|
|
||||||
# Changed self.sessions_uri to Hardcoded string.
|
# Changed self.sessions_uri to Hardcoded string.
|
||||||
response = self.get_request(
|
response = self.get_request(f"{self.root_uri}{self.service_root}SessionService/Sessions/")
|
||||||
self.root_uri + self.service_root + "SessionService/Sessions/")
|
|
||||||
if not response['ret']:
|
if not response['ret']:
|
||||||
return response
|
return response
|
||||||
result['ret'] = True
|
result['ret'] = True
|
||||||
|
@ -83,7 +82,7 @@ class iLORedfishUtils(RedfishUtils):
|
||||||
if not res_dhv6['ret']:
|
if not res_dhv6['ret']:
|
||||||
return res_dhv6
|
return res_dhv6
|
||||||
|
|
||||||
datetime_uri = self.manager_uri + "DateTime"
|
datetime_uri = f"{self.manager_uri}DateTime"
|
||||||
|
|
||||||
listofips = mgr_attributes['mgr_attr_value'].split(" ")
|
listofips = mgr_attributes['mgr_attr_value'].split(" ")
|
||||||
if len(listofips) > 2:
|
if len(listofips) > 2:
|
||||||
|
@ -102,12 +101,12 @@ class iLORedfishUtils(RedfishUtils):
|
||||||
if not response1['ret']:
|
if not response1['ret']:
|
||||||
return response1
|
return response1
|
||||||
|
|
||||||
return {'ret': True, 'changed': True, 'msg': "Modified %s" % mgr_attributes['mgr_attr_name']}
|
return {'ret': True, 'changed': True, 'msg': f"Modified {mgr_attributes['mgr_attr_name']}"}
|
||||||
|
|
||||||
def set_time_zone(self, attr):
|
def set_time_zone(self, attr):
|
||||||
key = attr['mgr_attr_name']
|
key = attr['mgr_attr_name']
|
||||||
|
|
||||||
uri = self.manager_uri + "DateTime/"
|
uri = f"{self.manager_uri}DateTime/"
|
||||||
response = self.get_request(self.root_uri + uri)
|
response = self.get_request(self.root_uri + uri)
|
||||||
if not response['ret']:
|
if not response['ret']:
|
||||||
return response
|
return response
|
||||||
|
@ -115,7 +114,7 @@ class iLORedfishUtils(RedfishUtils):
|
||||||
data = response["data"]
|
data = response["data"]
|
||||||
|
|
||||||
if key not in data:
|
if key not in data:
|
||||||
return {'ret': False, 'changed': False, 'msg': "Key %s not found" % key}
|
return {'ret': False, 'changed': False, 'msg': f"Key {key} not found"}
|
||||||
|
|
||||||
timezones = data["TimeZoneList"]
|
timezones = data["TimeZoneList"]
|
||||||
index = ""
|
index = ""
|
||||||
|
@ -129,7 +128,7 @@ class iLORedfishUtils(RedfishUtils):
|
||||||
if not response['ret']:
|
if not response['ret']:
|
||||||
return response
|
return response
|
||||||
|
|
||||||
return {'ret': True, 'changed': True, 'msg': "Modified %s" % attr['mgr_attr_name']}
|
return {'ret': True, 'changed': True, 'msg': f"Modified {attr['mgr_attr_name']}"}
|
||||||
|
|
||||||
def set_dns_server(self, attr):
|
def set_dns_server(self, attr):
|
||||||
key = attr['mgr_attr_name']
|
key = attr['mgr_attr_name']
|
||||||
|
@ -161,7 +160,7 @@ class iLORedfishUtils(RedfishUtils):
|
||||||
if not response['ret']:
|
if not response['ret']:
|
||||||
return response
|
return response
|
||||||
|
|
||||||
return {'ret': True, 'changed': True, 'msg': "Modified %s" % attr['mgr_attr_name']}
|
return {'ret': True, 'changed': True, 'msg': f"Modified {attr['mgr_attr_name']}"}
|
||||||
|
|
||||||
def set_domain_name(self, attr):
|
def set_domain_name(self, attr):
|
||||||
key = attr['mgr_attr_name']
|
key = attr['mgr_attr_name']
|
||||||
|
@ -206,7 +205,7 @@ class iLORedfishUtils(RedfishUtils):
|
||||||
response = self.patch_request(self.root_uri + ethuri, payload)
|
response = self.patch_request(self.root_uri + ethuri, payload)
|
||||||
if not response['ret']:
|
if not response['ret']:
|
||||||
return response
|
return response
|
||||||
return {'ret': True, 'changed': True, 'msg': "Modified %s" % attr['mgr_attr_name']}
|
return {'ret': True, 'changed': True, 'msg': f"Modified {attr['mgr_attr_name']}"}
|
||||||
|
|
||||||
def set_wins_registration(self, mgrattr):
|
def set_wins_registration(self, mgrattr):
|
||||||
Key = mgrattr['mgr_attr_name']
|
Key = mgrattr['mgr_attr_name']
|
||||||
|
@ -227,7 +226,7 @@ class iLORedfishUtils(RedfishUtils):
|
||||||
response = self.patch_request(self.root_uri + ethuri, payload)
|
response = self.patch_request(self.root_uri + ethuri, payload)
|
||||||
if not response['ret']:
|
if not response['ret']:
|
||||||
return response
|
return response
|
||||||
return {'ret': True, 'changed': True, 'msg': "Modified %s" % mgrattr['mgr_attr_name']}
|
return {'ret': True, 'changed': True, 'msg': f"Modified {mgrattr['mgr_attr_name']}"}
|
||||||
|
|
||||||
def get_server_poststate(self):
|
def get_server_poststate(self):
|
||||||
# Get server details
|
# Get server details
|
||||||
|
@ -302,5 +301,5 @@ class iLORedfishUtils(RedfishUtils):
|
||||||
return {
|
return {
|
||||||
"ret": False,
|
"ret": False,
|
||||||
"changed": False,
|
"changed": False,
|
||||||
"msg": "Server Reboot has failed, server state: {state} ".format(state=state)
|
"msg": f"Server Reboot has failed, server state: {state} "
|
||||||
}
|
}
|
||||||
|
|
|
@ -51,16 +51,16 @@ class IPAClient(object):
|
||||||
self.use_gssapi = False
|
self.use_gssapi = False
|
||||||
|
|
||||||
def get_base_url(self):
|
def get_base_url(self):
|
||||||
return '%s://%s/ipa' % (self.protocol, self.host)
|
return f'{self.protocol}://{self.host}/ipa'
|
||||||
|
|
||||||
def get_json_url(self):
|
def get_json_url(self):
|
||||||
return '%s/session/json' % self.get_base_url()
|
return f'{self.get_base_url()}/session/json'
|
||||||
|
|
||||||
def login(self, username, password):
|
def login(self, username, password):
|
||||||
if 'KRB5CCNAME' in os.environ and HAS_GSSAPI:
|
if 'KRB5CCNAME' in os.environ and HAS_GSSAPI:
|
||||||
self.use_gssapi = True
|
self.use_gssapi = True
|
||||||
elif 'KRB5_CLIENT_KTNAME' in os.environ and HAS_GSSAPI:
|
elif 'KRB5_CLIENT_KTNAME' in os.environ and HAS_GSSAPI:
|
||||||
ccache = "MEMORY:" + str(uuid.uuid4())
|
ccache = f"MEMORY:{uuid.uuid4()!s}"
|
||||||
os.environ['KRB5CCNAME'] = ccache
|
os.environ['KRB5CCNAME'] = ccache
|
||||||
self.use_gssapi = True
|
self.use_gssapi = True
|
||||||
else:
|
else:
|
||||||
|
@ -71,8 +71,8 @@ class IPAClient(object):
|
||||||
'GSSAPI. To use GSSAPI, please set the '
|
'GSSAPI. To use GSSAPI, please set the '
|
||||||
'KRB5_CLIENT_KTNAME or KRB5CCNAME (or both) '
|
'KRB5_CLIENT_KTNAME or KRB5CCNAME (or both) '
|
||||||
' environment variables.')
|
' environment variables.')
|
||||||
url = '%s/session/login_password' % self.get_base_url()
|
url = f'{self.get_base_url()}/session/login_password'
|
||||||
data = 'user=%s&password=%s' % (quote(username, safe=''), quote(password, safe=''))
|
data = f"user={quote(username, safe='')}&password={quote(password, safe='')}"
|
||||||
headers = {'referer': self.get_base_url(),
|
headers = {'referer': self.get_base_url(),
|
||||||
'Content-Type': 'application/x-www-form-urlencoded',
|
'Content-Type': 'application/x-www-form-urlencoded',
|
||||||
'Accept': 'text/plain'}
|
'Accept': 'text/plain'}
|
||||||
|
@ -97,7 +97,7 @@ class IPAClient(object):
|
||||||
err_string = e.get('message')
|
err_string = e.get('message')
|
||||||
else:
|
else:
|
||||||
err_string = e
|
err_string = e
|
||||||
self.module.fail_json(msg='%s: %s' % (msg, err_string))
|
self.module.fail_json(msg=f'{msg}: {err_string}')
|
||||||
|
|
||||||
def get_ipa_version(self):
|
def get_ipa_version(self):
|
||||||
response = self.ping()['summary']
|
response = self.ping()['summary']
|
||||||
|
@ -114,7 +114,7 @@ class IPAClient(object):
|
||||||
def _post_json(self, method, name, item=None):
|
def _post_json(self, method, name, item=None):
|
||||||
if item is None:
|
if item is None:
|
||||||
item = {}
|
item = {}
|
||||||
url = '%s/session/json' % self.get_base_url()
|
url = f'{self.get_base_url()}/session/json'
|
||||||
data = dict(method=method)
|
data = dict(method=method)
|
||||||
|
|
||||||
# TODO: We should probably handle this a little better.
|
# TODO: We should probably handle this a little better.
|
||||||
|
@ -132,13 +132,13 @@ class IPAClient(object):
|
||||||
if status_code not in [200, 201, 204]:
|
if status_code not in [200, 201, 204]:
|
||||||
self._fail(method, info['msg'])
|
self._fail(method, info['msg'])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self._fail('post %s' % method, to_native(e))
|
self._fail(f'post {method}', to_native(e))
|
||||||
|
|
||||||
charset = resp.headers.get_content_charset('latin-1')
|
charset = resp.headers.get_content_charset('latin-1')
|
||||||
resp = json.loads(to_text(resp.read(), encoding=charset))
|
resp = json.loads(to_text(resp.read(), encoding=charset))
|
||||||
err = resp.get('error')
|
err = resp.get('error')
|
||||||
if err is not None:
|
if err is not None:
|
||||||
self._fail('response %s' % method, err)
|
self._fail(f'response {method}', err)
|
||||||
|
|
||||||
if 'result' in resp:
|
if 'result' in resp:
|
||||||
result = resp.get('result')
|
result = resp.get('result')
|
||||||
|
|
|
@ -146,28 +146,28 @@ def add_host_key(module, fqdn, port=22, key_type="rsa", create_dir=False):
|
||||||
try:
|
try:
|
||||||
os.makedirs(user_ssh_dir, int('700', 8))
|
os.makedirs(user_ssh_dir, int('700', 8))
|
||||||
except Exception:
|
except Exception:
|
||||||
module.fail_json(msg="failed to create host key directory: %s" % user_ssh_dir)
|
module.fail_json(msg=f"failed to create host key directory: {user_ssh_dir}")
|
||||||
else:
|
else:
|
||||||
module.fail_json(msg="%s does not exist" % user_ssh_dir)
|
module.fail_json(msg=f"{user_ssh_dir} does not exist")
|
||||||
elif not os.path.isdir(user_ssh_dir):
|
elif not os.path.isdir(user_ssh_dir):
|
||||||
module.fail_json(msg="%s is not a directory" % user_ssh_dir)
|
module.fail_json(msg=f"{user_ssh_dir} is not a directory")
|
||||||
|
|
||||||
if port:
|
if port:
|
||||||
this_cmd = "%s -t %s -p %s %s" % (keyscan_cmd, key_type, port, fqdn)
|
this_cmd = f"{keyscan_cmd} -t {key_type} -p {port} {fqdn}"
|
||||||
else:
|
else:
|
||||||
this_cmd = "%s -t %s %s" % (keyscan_cmd, key_type, fqdn)
|
this_cmd = f"{keyscan_cmd} -t {key_type} {fqdn}"
|
||||||
|
|
||||||
rc, out, err = module.run_command(this_cmd)
|
rc, out, err = module.run_command(this_cmd)
|
||||||
# ssh-keyscan gives a 0 exit code and prints nothing on timeout
|
# ssh-keyscan gives a 0 exit code and prints nothing on timeout
|
||||||
if rc != 0 or not out:
|
if rc != 0 or not out:
|
||||||
msg = 'failed to retrieve hostkey'
|
msg = 'failed to retrieve hostkey'
|
||||||
if not out:
|
if not out:
|
||||||
msg += '. "%s" returned no matches.' % this_cmd
|
msg += f'. "{this_cmd}" returned no matches.'
|
||||||
else:
|
else:
|
||||||
msg += ' using command "%s". [stdout]: %s' % (this_cmd, out)
|
msg += f' using command "{this_cmd}". [stdout]: {out}'
|
||||||
|
|
||||||
if err:
|
if err:
|
||||||
msg += ' [stderr]: %s' % err
|
msg += f' [stderr]: {err}'
|
||||||
|
|
||||||
module.fail_json(msg=msg)
|
module.fail_json(msg=msg)
|
||||||
|
|
||||||
|
|
|
@ -91,7 +91,7 @@ class LdapGeneric(object):
|
||||||
if len(explode_dn) > 1:
|
if len(explode_dn) > 1:
|
||||||
try:
|
try:
|
||||||
escaped_value = ldap.filter.escape_filter_chars(explode_dn[0])
|
escaped_value = ldap.filter.escape_filter_chars(explode_dn[0])
|
||||||
filterstr = "(%s)" % escaped_value
|
filterstr = f"({escaped_value})"
|
||||||
dns = self.connection.search_s(','.join(explode_dn[1:]),
|
dns = self.connection.search_s(','.join(explode_dn[1:]),
|
||||||
ldap.SCOPE_ONELEVEL, filterstr)
|
ldap.SCOPE_ONELEVEL, filterstr)
|
||||||
if len(dns) == 1:
|
if len(dns) == 1:
|
||||||
|
|
|
@ -18,4 +18,4 @@ def get_user_agent(module):
|
||||||
from ansible.module_utils.ansible_release import __version__ as ansible_version
|
from ansible.module_utils.ansible_release import __version__ as ansible_version
|
||||||
except ImportError:
|
except ImportError:
|
||||||
ansible_version = 'unknown'
|
ansible_version = 'unknown'
|
||||||
return 'Ansible-%s/%s' % (module, ansible_version)
|
return f'Ansible-{module}/{ansible_version}'
|
||||||
|
|
|
@ -78,7 +78,7 @@ class LXDClient(object):
|
||||||
def do(self, method, url, body_json=None, ok_error_codes=None, timeout=None, wait_for_container=None):
|
def do(self, method, url, body_json=None, ok_error_codes=None, timeout=None, wait_for_container=None):
|
||||||
resp_json = self._send_request(method, url, body_json=body_json, ok_error_codes=ok_error_codes, timeout=timeout)
|
resp_json = self._send_request(method, url, body_json=body_json, ok_error_codes=ok_error_codes, timeout=timeout)
|
||||||
if resp_json['type'] == 'async':
|
if resp_json['type'] == 'async':
|
||||||
url = '{0}/wait'.format(resp_json['operation'])
|
url = f"{resp_json['operation']}/wait"
|
||||||
resp_json = self._send_request('GET', url)
|
resp_json = self._send_request('GET', url)
|
||||||
if wait_for_container:
|
if wait_for_container:
|
||||||
while resp_json['metadata']['status'] == 'Running':
|
while resp_json['metadata']['status'] == 'Running':
|
||||||
|
|
|
@ -93,12 +93,12 @@ class ManageIQ(object):
|
||||||
ca_bundle_path = params['ca_cert']
|
ca_bundle_path = params['ca_cert']
|
||||||
|
|
||||||
self._module = module
|
self._module = module
|
||||||
self._api_url = url + '/api'
|
self._api_url = f"{url}/api"
|
||||||
self._auth = dict(user=username, password=password, token=token)
|
self._auth = dict(user=username, password=password, token=token)
|
||||||
try:
|
try:
|
||||||
self._client = ManageIQClient(self._api_url, self._auth, verify_ssl=verify_ssl, ca_bundle_path=ca_bundle_path)
|
self._client = ManageIQClient(self._api_url, self._auth, verify_ssl=verify_ssl, ca_bundle_path=ca_bundle_path)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.module.fail_json(msg="failed to open connection (%s): %s" % (url, str(e)))
|
self.module.fail_json(msg=f"failed to open connection ({url}): {e}")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def module(self):
|
def module(self):
|
||||||
|
@ -138,7 +138,7 @@ class ManageIQ(object):
|
||||||
except ValueError:
|
except ValueError:
|
||||||
return None
|
return None
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.module.fail_json(msg="failed to find resource {error}".format(error=e))
|
self.module.fail_json(msg=f"failed to find resource {e}")
|
||||||
return vars(entity)
|
return vars(entity)
|
||||||
|
|
||||||
def find_collection_resource_or_fail(self, collection_name, **params):
|
def find_collection_resource_or_fail(self, collection_name, **params):
|
||||||
|
@ -151,8 +151,7 @@ class ManageIQ(object):
|
||||||
if resource:
|
if resource:
|
||||||
return resource
|
return resource
|
||||||
else:
|
else:
|
||||||
msg = "{collection_name} where {params} does not exist in manageiq".format(
|
msg = f"{collection_name} where {params!s} does not exist in manageiq"
|
||||||
collection_name=collection_name, params=str(params))
|
|
||||||
self.module.fail_json(msg=msg)
|
self.module.fail_json(msg=msg)
|
||||||
|
|
||||||
def policies(self, resource_id, resource_type, resource_name):
|
def policies(self, resource_id, resource_type, resource_name):
|
||||||
|
@ -174,8 +173,7 @@ class ManageIQ(object):
|
||||||
if resource:
|
if resource:
|
||||||
return resource["id"]
|
return resource["id"]
|
||||||
else:
|
else:
|
||||||
msg = "{resource_name} {resource_type} does not exist in manageiq".format(
|
msg = f"{resource_name} {resource_type} does not exist in manageiq"
|
||||||
resource_name=resource_name, resource_type=resource_type)
|
|
||||||
self.module.fail_json(msg=msg)
|
self.module.fail_json(msg=msg)
|
||||||
|
|
||||||
|
|
||||||
|
@ -193,10 +191,7 @@ class ManageIQPolicies(object):
|
||||||
|
|
||||||
self.resource_type = resource_type
|
self.resource_type = resource_type
|
||||||
self.resource_id = resource_id
|
self.resource_id = resource_id
|
||||||
self.resource_url = '{api_url}/{resource_type}/{resource_id}'.format(
|
self.resource_url = f'{self.api_url}/{resource_type}/{resource_id}'
|
||||||
api_url=self.api_url,
|
|
||||||
resource_type=resource_type,
|
|
||||||
resource_id=resource_id)
|
|
||||||
|
|
||||||
def query_profile_href(self, profile):
|
def query_profile_href(self, profile):
|
||||||
""" Add or Update the policy_profile href field
|
""" Add or Update the policy_profile href field
|
||||||
|
@ -215,9 +210,7 @@ class ManageIQPolicies(object):
|
||||||
try:
|
try:
|
||||||
response = self.client.get(url.format(resource_url=self.resource_url))
|
response = self.client.get(url.format(resource_url=self.resource_url))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
msg = "Failed to query {resource_type} policies: {error}".format(
|
msg = f"Failed to query {self.resource_type} policies: {e}"
|
||||||
resource_type=self.resource_type,
|
|
||||||
error=e)
|
|
||||||
self.module.fail_json(msg=msg)
|
self.module.fail_json(msg=msg)
|
||||||
|
|
||||||
resources = response.get('resources', [])
|
resources = response.get('resources', [])
|
||||||
|
@ -235,9 +228,7 @@ class ManageIQPolicies(object):
|
||||||
try:
|
try:
|
||||||
response = self.client.get(url.format(api_url=self.api_url, profile_id=profile_id))
|
response = self.client.get(url.format(api_url=self.api_url, profile_id=profile_id))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
msg = "Failed to query {resource_type} policies: {error}".format(
|
msg = f"Failed to query {self.resource_type} policies: {e}"
|
||||||
resource_type=self.resource_type,
|
|
||||||
error=e)
|
|
||||||
self.module.fail_json(msg=msg)
|
self.module.fail_json(msg=msg)
|
||||||
|
|
||||||
resources = response.get('policies', [])
|
resources = response.get('policies', [])
|
||||||
|
@ -316,34 +307,26 @@ class ManageIQPolicies(object):
|
||||||
if not profiles_to_post:
|
if not profiles_to_post:
|
||||||
return dict(
|
return dict(
|
||||||
changed=False,
|
changed=False,
|
||||||
msg="Profiles {profiles} already {action}ed, nothing to do".format(
|
msg=f"Profiles {profiles} already {action}ed, nothing to do")
|
||||||
action=action,
|
|
||||||
profiles=profiles))
|
|
||||||
|
|
||||||
# try to assign or unassign profiles to resource
|
# try to assign or unassign profiles to resource
|
||||||
url = '{resource_url}/policy_profiles'.format(resource_url=self.resource_url)
|
url = f'{self.resource_url}/policy_profiles'
|
||||||
try:
|
try:
|
||||||
response = self.client.post(url, action=action, resources=profiles_to_post)
|
response = self.client.post(url, action=action, resources=profiles_to_post)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
msg = "Failed to {action} profile: {error}".format(
|
msg = f"Failed to {action} profile: {e}"
|
||||||
action=action,
|
|
||||||
error=e)
|
|
||||||
self.module.fail_json(msg=msg)
|
self.module.fail_json(msg=msg)
|
||||||
|
|
||||||
# check all entities in result to be successful
|
# check all entities in result to be successful
|
||||||
for result in response['results']:
|
for result in response['results']:
|
||||||
if not result['success']:
|
if not result['success']:
|
||||||
msg = "Failed to {action}: {message}".format(
|
msg = f"Failed to {action}: {result['message']}"
|
||||||
action=action,
|
|
||||||
message=result['message'])
|
|
||||||
self.module.fail_json(msg=msg)
|
self.module.fail_json(msg=msg)
|
||||||
|
|
||||||
# successfully changed all needed profiles
|
# successfully changed all needed profiles
|
||||||
return dict(
|
return dict(
|
||||||
changed=True,
|
changed=True,
|
||||||
msg="Successfully {action}ed profiles: {profiles}".format(
|
msg=f"Successfully {action}ed profiles: {profiles}")
|
||||||
action=action,
|
|
||||||
profiles=profiles))
|
|
||||||
|
|
||||||
|
|
||||||
class ManageIQTags(object):
|
class ManageIQTags(object):
|
||||||
|
@ -360,17 +343,12 @@ class ManageIQTags(object):
|
||||||
|
|
||||||
self.resource_type = resource_type
|
self.resource_type = resource_type
|
||||||
self.resource_id = resource_id
|
self.resource_id = resource_id
|
||||||
self.resource_url = '{api_url}/{resource_type}/{resource_id}'.format(
|
self.resource_url = f'{self.api_url}/{resource_type}/{resource_id}'
|
||||||
api_url=self.api_url,
|
|
||||||
resource_type=resource_type,
|
|
||||||
resource_id=resource_id)
|
|
||||||
|
|
||||||
def full_tag_name(self, tag):
|
def full_tag_name(self, tag):
|
||||||
""" Returns the full tag name in manageiq
|
""" Returns the full tag name in manageiq
|
||||||
"""
|
"""
|
||||||
return '/managed/{tag_category}/{tag_name}'.format(
|
return f"/managed/{tag['category']}/{tag['name']}"
|
||||||
tag_category=tag['category'],
|
|
||||||
tag_name=tag['name'])
|
|
||||||
|
|
||||||
def clean_tag_object(self, tag):
|
def clean_tag_object(self, tag):
|
||||||
""" Clean a tag object to have human readable form of:
|
""" Clean a tag object to have human readable form of:
|
||||||
|
@ -397,9 +375,7 @@ class ManageIQTags(object):
|
||||||
try:
|
try:
|
||||||
response = self.client.get(url.format(resource_url=self.resource_url))
|
response = self.client.get(url.format(resource_url=self.resource_url))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
msg = "Failed to query {resource_type} tags: {error}".format(
|
msg = f"Failed to query {self.resource_type} tags: {e}"
|
||||||
resource_type=self.resource_type,
|
|
||||||
error=e)
|
|
||||||
self.module.fail_json(msg=msg)
|
self.module.fail_json(msg=msg)
|
||||||
|
|
||||||
resources = response.get('resources', [])
|
resources = response.get('resources', [])
|
||||||
|
@ -442,27 +418,23 @@ class ManageIQTags(object):
|
||||||
if not tags_to_post:
|
if not tags_to_post:
|
||||||
return dict(
|
return dict(
|
||||||
changed=False,
|
changed=False,
|
||||||
msg="Tags already {action}ed, nothing to do".format(action=action))
|
msg=f"Tags already {action}ed, nothing to do")
|
||||||
|
|
||||||
# try to assign or unassign tags to resource
|
# try to assign or unassign tags to resource
|
||||||
url = '{resource_url}/tags'.format(resource_url=self.resource_url)
|
url = f'{self.resource_url}/tags'
|
||||||
try:
|
try:
|
||||||
response = self.client.post(url, action=action, resources=tags)
|
response = self.client.post(url, action=action, resources=tags)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
msg = "Failed to {action} tag: {error}".format(
|
msg = f"Failed to {action} tag: {e}"
|
||||||
action=action,
|
|
||||||
error=e)
|
|
||||||
self.module.fail_json(msg=msg)
|
self.module.fail_json(msg=msg)
|
||||||
|
|
||||||
# check all entities in result to be successful
|
# check all entities in result to be successful
|
||||||
for result in response['results']:
|
for result in response['results']:
|
||||||
if not result['success']:
|
if not result['success']:
|
||||||
msg = "Failed to {action}: {message}".format(
|
msg = f"Failed to {action}: {result['message']}"
|
||||||
action=action,
|
|
||||||
message=result['message'])
|
|
||||||
self.module.fail_json(msg=msg)
|
self.module.fail_json(msg=msg)
|
||||||
|
|
||||||
# successfully changed all needed tags
|
# successfully changed all needed tags
|
||||||
return dict(
|
return dict(
|
||||||
changed=True,
|
changed=True,
|
||||||
msg="Successfully {action}ed tags".format(action=action))
|
msg=f"Successfully {action}ed tags")
|
||||||
|
|
|
@ -55,7 +55,7 @@ def memset_api_call(api_key, api_method, payload=None):
|
||||||
data = urlencode(payload)
|
data = urlencode(payload)
|
||||||
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
|
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
|
||||||
api_uri_base = 'https://api.memset.com/v1/json/'
|
api_uri_base = 'https://api.memset.com/v1/json/'
|
||||||
api_uri = '{0}{1}/' . format(api_uri_base, api_method)
|
api_uri = f'{api_uri_base}{api_method}/'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
resp = open_url(api_uri, data=data, headers=headers, method="POST", force_basic_auth=True, url_username=api_key)
|
resp = open_url(api_uri, data=data, headers=headers, method="POST", force_basic_auth=True, url_username=api_key)
|
||||||
|
@ -72,13 +72,13 @@ def memset_api_call(api_key, api_method, payload=None):
|
||||||
response.status_code = errorcode
|
response.status_code = errorcode
|
||||||
|
|
||||||
if response.status_code is not None:
|
if response.status_code is not None:
|
||||||
msg = "Memset API returned a {0} response ({1}, {2})." . format(response.status_code, response.json()['error_type'], response.json()['error'])
|
msg = f"Memset API returned a {response.status_code} response ({response.json()['error_type']}, {response.json()['error']})."
|
||||||
else:
|
else:
|
||||||
msg = "Memset API returned an error ({0}, {1})." . format(response.json()['error_type'], response.json()['error'])
|
msg = f"Memset API returned an error ({response.json()['error_type']}, {response.json()['error']})."
|
||||||
except urllib_error.URLError as e:
|
except urllib_error.URLError as e:
|
||||||
has_failed = True
|
has_failed = True
|
||||||
msg = "An URLError occurred ({0})." . format(type(e))
|
msg = f"An URLError occurred ({type(e)})."
|
||||||
response.stderr = "{0}" . format(e)
|
response.stderr = f"{e}"
|
||||||
|
|
||||||
if msg is None:
|
if msg is None:
|
||||||
msg = response.json()
|
msg = response.json()
|
||||||
|
|
|
@ -40,7 +40,7 @@ class ModuleHelperBase(object):
|
||||||
def __getattr__(self, attr):
|
def __getattr__(self, attr):
|
||||||
if attr in self._delegated_to_module:
|
if attr in self._delegated_to_module:
|
||||||
return getattr(self.module, attr)
|
return getattr(self.module, attr)
|
||||||
raise AttributeError("ModuleHelperBase has no attribute '%s'" % (attr, ))
|
raise AttributeError(f"ModuleHelperBase has no attribute '{attr}'")
|
||||||
|
|
||||||
def __init_module__(self):
|
def __init_module__(self):
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -38,7 +38,7 @@ def module_fails_on_exception(func):
|
||||||
@wraps(func)
|
@wraps(func)
|
||||||
def wrapper(self, *args, **kwargs):
|
def wrapper(self, *args, **kwargs):
|
||||||
def fix_key(k):
|
def fix_key(k):
|
||||||
return k if k not in conflict_list else "_" + k
|
return k if k not in conflict_list else f"_{k}"
|
||||||
|
|
||||||
def fix_var_conflicts(output):
|
def fix_var_conflicts(output):
|
||||||
result = {fix_key(k): v for k, v in output.items()}
|
result = {fix_key(k): v for k, v in output.items()}
|
||||||
|
@ -56,7 +56,7 @@ def module_fails_on_exception(func):
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# patchy solution to resolve conflict with output variables
|
# patchy solution to resolve conflict with output variables
|
||||||
output = fix_var_conflicts(self.output)
|
output = fix_var_conflicts(self.output)
|
||||||
msg = "Module failed with exception: {0}".format(str(e).strip())
|
msg = f"Module failed with exception: {str(e).strip()}"
|
||||||
self.module.fail_json(msg=msg, exception=traceback.format_exc(),
|
self.module.fail_json(msg=msg, exception=traceback.format_exc(),
|
||||||
output=self.output, vars=self.vars.output(), **output)
|
output=self.output, vars=self.vars.output(), **output)
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
|
@ -10,7 +10,7 @@ from ansible.module_utils.common.text.converters import to_native
|
||||||
|
|
||||||
class ModuleHelperException(Exception):
|
class ModuleHelperException(Exception):
|
||||||
def __init__(self, msg, update_output=None, *args, **kwargs):
|
def __init__(self, msg, update_output=None, *args, **kwargs):
|
||||||
self.msg = to_native(msg or "Module failed with exception: {0}".format(self))
|
self.msg = to_native(msg or f"Module failed with exception: {self}")
|
||||||
if update_output is None:
|
if update_output is None:
|
||||||
update_output = {}
|
update_output = {}
|
||||||
self.update_output = update_output
|
self.update_output = update_output
|
||||||
|
|
|
@ -15,7 +15,7 @@ class DeprecateAttrsMixin(object):
|
||||||
if target is None:
|
if target is None:
|
||||||
target = self
|
target = self
|
||||||
if not hasattr(target, attr):
|
if not hasattr(target, attr):
|
||||||
raise ValueError("Target {0} has no attribute {1}".format(target, attr))
|
raise ValueError(f"Target {target} has no attribute {attr}")
|
||||||
if module is None:
|
if module is None:
|
||||||
if isinstance(target, AnsibleModule):
|
if isinstance(target, AnsibleModule):
|
||||||
module = target
|
module = target
|
||||||
|
@ -57,4 +57,4 @@ class DeprecateAttrsMixin(object):
|
||||||
# override attribute
|
# override attribute
|
||||||
prop = property(_getter)
|
prop = property(_getter)
|
||||||
setattr(target, attr, prop)
|
setattr(target, attr, prop)
|
||||||
setattr(target, "_{0}_setter".format(attr), prop.setter(_setter))
|
setattr(target, f"_{attr}_setter", prop.setter(_setter))
|
||||||
|
|
|
@ -15,7 +15,7 @@ class StateMixin(object):
|
||||||
return self.default_state if state is None else state
|
return self.default_state if state is None else state
|
||||||
|
|
||||||
def _method(self, state):
|
def _method(self, state):
|
||||||
return "{0}_{1}".format(self.state_param, state)
|
return f"{self.state_param}_{state}"
|
||||||
|
|
||||||
def __run__(self):
|
def __run__(self):
|
||||||
state = self._state()
|
state = self._state()
|
||||||
|
@ -35,4 +35,4 @@ class StateMixin(object):
|
||||||
return func()
|
return func()
|
||||||
|
|
||||||
def __state_fallback__(self):
|
def __state_fallback__(self):
|
||||||
raise ValueError("Cannot find method: {0}".format(self._method(self._state())))
|
raise ValueError(f"Cannot find method: {self._method(self._state())}")
|
||||||
|
|
|
@ -62,7 +62,7 @@ def _delete_pritunl_organization(
|
||||||
api_token=api_token,
|
api_token=api_token,
|
||||||
api_secret=api_secret,
|
api_secret=api_secret,
|
||||||
method="DELETE",
|
method="DELETE",
|
||||||
path="/organization/%s" % (organization_id),
|
path=f"/organization/{organization_id}",
|
||||||
validate_certs=validate_certs,
|
validate_certs=validate_certs,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -90,7 +90,7 @@ def _get_pritunl_users(
|
||||||
api_secret=api_secret,
|
api_secret=api_secret,
|
||||||
base_url=base_url,
|
base_url=base_url,
|
||||||
method="GET",
|
method="GET",
|
||||||
path="/user/%s" % organization_id,
|
path=f"/user/{organization_id}",
|
||||||
validate_certs=validate_certs,
|
validate_certs=validate_certs,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -103,7 +103,7 @@ def _delete_pritunl_user(
|
||||||
api_secret=api_secret,
|
api_secret=api_secret,
|
||||||
base_url=base_url,
|
base_url=base_url,
|
||||||
method="DELETE",
|
method="DELETE",
|
||||||
path="/user/%s/%s" % (organization_id, user_id),
|
path=f"/user/{organization_id}/{user_id}",
|
||||||
validate_certs=validate_certs,
|
validate_certs=validate_certs,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -116,7 +116,7 @@ def _post_pritunl_user(
|
||||||
api_secret=api_secret,
|
api_secret=api_secret,
|
||||||
base_url=base_url,
|
base_url=base_url,
|
||||||
method="POST",
|
method="POST",
|
||||||
path="/user/%s" % organization_id,
|
path=f"/user/{organization_id}",
|
||||||
headers={"Content-Type": "application/json"},
|
headers={"Content-Type": "application/json"},
|
||||||
data=json.dumps(user_data),
|
data=json.dumps(user_data),
|
||||||
validate_certs=validate_certs,
|
validate_certs=validate_certs,
|
||||||
|
@ -137,7 +137,7 @@ def _put_pritunl_user(
|
||||||
api_secret=api_secret,
|
api_secret=api_secret,
|
||||||
base_url=base_url,
|
base_url=base_url,
|
||||||
method="PUT",
|
method="PUT",
|
||||||
path="/user/%s/%s" % (organization_id, user_id),
|
path=f"/user/{organization_id}/{user_id}",
|
||||||
headers={"Content-Type": "application/json"},
|
headers={"Content-Type": "application/json"},
|
||||||
data=json.dumps(user_data),
|
data=json.dumps(user_data),
|
||||||
validate_certs=validate_certs,
|
validate_certs=validate_certs,
|
||||||
|
@ -221,7 +221,7 @@ def post_pritunl_organization(
|
||||||
|
|
||||||
if response.getcode() != 200:
|
if response.getcode() != 200:
|
||||||
raise PritunlException(
|
raise PritunlException(
|
||||||
"Could not add organization %s to Pritunl" % (organization_name)
|
f"Could not add organization {organization_name} to Pritunl"
|
||||||
)
|
)
|
||||||
# The user PUT request returns the updated user object
|
# The user PUT request returns the updated user object
|
||||||
return json.loads(response.read())
|
return json.loads(response.read())
|
||||||
|
@ -249,8 +249,7 @@ def post_pritunl_user(
|
||||||
|
|
||||||
if response.getcode() != 200:
|
if response.getcode() != 200:
|
||||||
raise PritunlException(
|
raise PritunlException(
|
||||||
"Could not remove user %s from organization %s from Pritunl"
|
f"Could not remove user {user_id} from organization {organization_id} from Pritunl"
|
||||||
% (user_id, organization_id)
|
|
||||||
)
|
)
|
||||||
# user POST request returns an array of a single item,
|
# user POST request returns an array of a single item,
|
||||||
# so return this item instead of the list
|
# so return this item instead of the list
|
||||||
|
@ -268,8 +267,7 @@ def post_pritunl_user(
|
||||||
|
|
||||||
if response.getcode() != 200:
|
if response.getcode() != 200:
|
||||||
raise PritunlException(
|
raise PritunlException(
|
||||||
"Could not update user %s from organization %s from Pritunl"
|
f"Could not update user {user_id} from organization {organization_id} from Pritunl"
|
||||||
% (user_id, organization_id)
|
|
||||||
)
|
)
|
||||||
# The user PUT request returns the updated user object
|
# The user PUT request returns the updated user object
|
||||||
return json.loads(response.read())
|
return json.loads(response.read())
|
||||||
|
@ -288,7 +286,7 @@ def delete_pritunl_organization(
|
||||||
|
|
||||||
if response.getcode() != 200:
|
if response.getcode() != 200:
|
||||||
raise PritunlException(
|
raise PritunlException(
|
||||||
"Could not remove organization %s from Pritunl" % (organization_id)
|
f"Could not remove organization {organization_id} from Pritunl"
|
||||||
)
|
)
|
||||||
|
|
||||||
return json.loads(response.read())
|
return json.loads(response.read())
|
||||||
|
@ -308,8 +306,7 @@ def delete_pritunl_user(
|
||||||
|
|
||||||
if response.getcode() != 200:
|
if response.getcode() != 200:
|
||||||
raise PritunlException(
|
raise PritunlException(
|
||||||
"Could not remove user %s from organization %s from Pritunl"
|
f"Could not remove user {user_id} from organization {organization_id} from Pritunl"
|
||||||
% (user_id, organization_id)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return json.loads(response.read())
|
return json.loads(response.read())
|
||||||
|
@ -332,9 +329,7 @@ def pritunl_auth_request(
|
||||||
auth_timestamp = str(int(time.time()))
|
auth_timestamp = str(int(time.time()))
|
||||||
auth_nonce = uuid.uuid4().hex
|
auth_nonce = uuid.uuid4().hex
|
||||||
|
|
||||||
auth_string = "&".join(
|
auth_string = f"{api_token}&{auth_timestamp}&{auth_nonce}&{method.upper()}&{path}"
|
||||||
[api_token, auth_timestamp, auth_nonce, method.upper(), path]
|
|
||||||
)
|
|
||||||
|
|
||||||
auth_signature = base64.b64encode(
|
auth_signature = base64.b64encode(
|
||||||
hmac.new(
|
hmac.new(
|
||||||
|
@ -353,7 +348,7 @@ def pritunl_auth_request(
|
||||||
auth_headers.update(headers)
|
auth_headers.update(headers)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
uri = "%s%s" % (base_url, path)
|
uri = f"{base_url}{path}"
|
||||||
|
|
||||||
return open_url(
|
return open_url(
|
||||||
uri,
|
uri,
|
||||||
|
|
|
@ -13,7 +13,6 @@ from urllib.parse import urlparse
|
||||||
|
|
||||||
from ansible.module_utils.urls import open_url
|
from ansible.module_utils.urls import open_url
|
||||||
from ansible.module_utils.common.text.converters import to_native
|
from ansible.module_utils.common.text.converters import to_native
|
||||||
from ansible.module_utils.common.text.converters import to_text
|
|
||||||
|
|
||||||
|
|
||||||
GET_HEADERS = {'accept': 'application/json'}
|
GET_HEADERS = {'accept': 'application/json'}
|
||||||
|
@ -57,16 +56,14 @@ class OcapiUtils(object):
|
||||||
headers = {k.lower(): v for (k, v) in resp.info().items()}
|
headers = {k.lower(): v for (k, v) in resp.info().items()}
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
return {'ret': False,
|
return {'ret': False,
|
||||||
'msg': "HTTP Error %s on GET request to '%s'"
|
'msg': f"HTTP Error {e.code} on GET request to '{uri}'",
|
||||||
% (e.code, uri),
|
|
||||||
'status': e.code}
|
'status': e.code}
|
||||||
except URLError as e:
|
except URLError as e:
|
||||||
return {'ret': False, 'msg': "URL Error on GET request to '%s': '%s'"
|
return {'ret': False, 'msg': f"URL Error on GET request to '{uri}': '{e.reason}'"}
|
||||||
% (uri, e.reason)}
|
|
||||||
# Almost all errors should be caught above, but just in case
|
# Almost all errors should be caught above, but just in case
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return {'ret': False,
|
return {'ret': False,
|
||||||
'msg': "Failed GET request to '%s': '%s'" % (uri, to_text(e))}
|
'msg': f"Failed GET request to '{uri}': '{e}'"}
|
||||||
return {'ret': True, 'data': data, 'headers': headers}
|
return {'ret': True, 'data': data, 'headers': headers}
|
||||||
|
|
||||||
def delete_request(self, uri, etag=None):
|
def delete_request(self, uri, etag=None):
|
||||||
|
@ -87,16 +84,14 @@ class OcapiUtils(object):
|
||||||
headers = {k.lower(): v for (k, v) in resp.info().items()}
|
headers = {k.lower(): v for (k, v) in resp.info().items()}
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
return {'ret': False,
|
return {'ret': False,
|
||||||
'msg': "HTTP Error %s on DELETE request to '%s'"
|
'msg': f"HTTP Error {e.code} on DELETE request to '{uri}'",
|
||||||
% (e.code, uri),
|
|
||||||
'status': e.code}
|
'status': e.code}
|
||||||
except URLError as e:
|
except URLError as e:
|
||||||
return {'ret': False, 'msg': "URL Error on DELETE request to '%s': '%s'"
|
return {'ret': False, 'msg': f"URL Error on DELETE request to '{uri}': '{e.reason}'"}
|
||||||
% (uri, e.reason)}
|
|
||||||
# Almost all errors should be caught above, but just in case
|
# Almost all errors should be caught above, but just in case
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return {'ret': False,
|
return {'ret': False,
|
||||||
'msg': "Failed DELETE request to '%s': '%s'" % (uri, to_text(e))}
|
'msg': f"Failed DELETE request to '{uri}': '{e}'"}
|
||||||
return {'ret': True, 'data': data, 'headers': headers}
|
return {'ret': True, 'data': data, 'headers': headers}
|
||||||
|
|
||||||
def put_request(self, uri, payload, etag=None):
|
def put_request(self, uri, payload, etag=None):
|
||||||
|
@ -114,16 +109,14 @@ class OcapiUtils(object):
|
||||||
headers = {k.lower(): v for (k, v) in resp.info().items()}
|
headers = {k.lower(): v for (k, v) in resp.info().items()}
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
return {'ret': False,
|
return {'ret': False,
|
||||||
'msg': "HTTP Error %s on PUT request to '%s'"
|
'msg': f"HTTP Error {e.code} on PUT request to '{uri}'",
|
||||||
% (e.code, uri),
|
|
||||||
'status': e.code}
|
'status': e.code}
|
||||||
except URLError as e:
|
except URLError as e:
|
||||||
return {'ret': False, 'msg': "URL Error on PUT request to '%s': '%s'"
|
return {'ret': False, 'msg': f"URL Error on PUT request to '{uri}': '{e.reason}'"}
|
||||||
% (uri, e.reason)}
|
|
||||||
# Almost all errors should be caught above, but just in case
|
# Almost all errors should be caught above, but just in case
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return {'ret': False,
|
return {'ret': False,
|
||||||
'msg': "Failed PUT request to '%s': '%s'" % (uri, to_text(e))}
|
'msg': f"Failed PUT request to '{uri}': '{e}'"}
|
||||||
return {'ret': True, 'headers': headers, 'resp': resp}
|
return {'ret': True, 'headers': headers, 'resp': resp}
|
||||||
|
|
||||||
def post_request(self, uri, payload, content_type="application/json", timeout=None):
|
def post_request(self, uri, payload, content_type="application/json", timeout=None):
|
||||||
|
@ -145,16 +138,14 @@ class OcapiUtils(object):
|
||||||
headers = {k.lower(): v for (k, v) in resp.info().items()}
|
headers = {k.lower(): v for (k, v) in resp.info().items()}
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
return {'ret': False,
|
return {'ret': False,
|
||||||
'msg': "HTTP Error %s on POST request to '%s'"
|
'msg': f"HTTP Error {e.code} on POST request to '{uri}'",
|
||||||
% (e.code, uri),
|
|
||||||
'status': e.code}
|
'status': e.code}
|
||||||
except URLError as e:
|
except URLError as e:
|
||||||
return {'ret': False, 'msg': "URL Error on POST request to '%s': '%s'"
|
return {'ret': False, 'msg': f"URL Error on POST request to '{uri}': '{e.reason}'"}
|
||||||
% (uri, e.reason)}
|
|
||||||
# Almost all errors should be caught above, but just in case
|
# Almost all errors should be caught above, but just in case
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return {'ret': False,
|
return {'ret': False,
|
||||||
'msg': "Failed POST request to '%s': '%s'" % (uri, to_text(e))}
|
'msg': f"Failed POST request to '{uri}': '{e}'"}
|
||||||
return {'ret': True, 'headers': headers, 'resp': resp}
|
return {'ret': True, 'headers': headers, 'resp': resp}
|
||||||
|
|
||||||
def get_uri_with_slot_number_query_param(self, uri):
|
def get_uri_with_slot_number_query_param(self, uri):
|
||||||
|
@ -166,7 +157,7 @@ class OcapiUtils(object):
|
||||||
"""
|
"""
|
||||||
if self.proxy_slot_number is not None:
|
if self.proxy_slot_number is not None:
|
||||||
parsed_url = urlparse(uri)
|
parsed_url = urlparse(uri)
|
||||||
return parsed_url._replace(query="slotnumber=" + str(self.proxy_slot_number)).geturl()
|
return parsed_url._replace(query=f"slotnumber={self.proxy_slot_number}").geturl()
|
||||||
else:
|
else:
|
||||||
return uri
|
return uri
|
||||||
|
|
||||||
|
@ -201,7 +192,7 @@ class OcapiUtils(object):
|
||||||
elif command.startswith("PowerMode"):
|
elif command.startswith("PowerMode"):
|
||||||
return self.manage_power_mode(command)
|
return self.manage_power_mode(command)
|
||||||
else:
|
else:
|
||||||
return {'ret': False, 'msg': 'Invalid command: ' + command}
|
return {'ret': False, 'msg': f"Invalid command: {command}"}
|
||||||
|
|
||||||
return {'ret': True}
|
return {'ret': True}
|
||||||
|
|
||||||
|
@ -240,7 +231,7 @@ class OcapiUtils(object):
|
||||||
return response
|
return response
|
||||||
data = response['data']
|
data = response['data']
|
||||||
if key not in data:
|
if key not in data:
|
||||||
return {'ret': False, 'msg': "Key %s not found" % key}
|
return {'ret': False, 'msg': f"Key {key} not found"}
|
||||||
if 'ID' not in data[key]:
|
if 'ID' not in data[key]:
|
||||||
return {'ret': False, 'msg': 'IndicatorLED for resource has no ID.'}
|
return {'ret': False, 'msg': 'IndicatorLED for resource has no ID.'}
|
||||||
|
|
||||||
|
@ -283,7 +274,7 @@ class OcapiUtils(object):
|
||||||
return response
|
return response
|
||||||
data = response['data']
|
data = response['data']
|
||||||
if key not in data:
|
if key not in data:
|
||||||
return {'ret': False, 'msg': "Key %s not found" % key}
|
return {'ret': False, 'msg': f"Key {key} not found"}
|
||||||
if 'ID' not in data[key]:
|
if 'ID' not in data[key]:
|
||||||
return {'ret': False, 'msg': 'PowerState for resource has no ID.'}
|
return {'ret': False, 'msg': 'PowerState for resource has no ID.'}
|
||||||
|
|
||||||
|
@ -305,7 +296,7 @@ class OcapiUtils(object):
|
||||||
if response['ret'] is False:
|
if response['ret'] is False:
|
||||||
return response
|
return response
|
||||||
else:
|
else:
|
||||||
return {'ret': False, 'msg': 'Invalid command: ' + command}
|
return {'ret': False, 'msg': f"Invalid command: {command}"}
|
||||||
|
|
||||||
return {'ret': True}
|
return {'ret': True}
|
||||||
|
|
||||||
|
@ -322,14 +313,14 @@ class OcapiUtils(object):
|
||||||
this method sends the file as binary.
|
this method sends the file as binary.
|
||||||
"""
|
"""
|
||||||
boundary = str(uuid.uuid4()) # Generate a random boundary
|
boundary = str(uuid.uuid4()) # Generate a random boundary
|
||||||
body = "--" + boundary + '\r\n'
|
body = f"--{boundary}\r\n"
|
||||||
body += 'Content-Disposition: form-data; name="FirmwareFile"; filename="%s"\r\n' % to_native(os.path.basename(filename))
|
body += f'Content-Disposition: form-data; name="FirmwareFile"; filename="{to_native(os.path.basename(filename))}"\r\n'
|
||||||
body += 'Content-Type: application/octet-stream\r\n\r\n'
|
body += 'Content-Type: application/octet-stream\r\n\r\n'
|
||||||
body_bytes = bytearray(body, 'utf-8')
|
body_bytes = bytearray(body, 'utf-8')
|
||||||
with open(filename, 'rb') as f:
|
with open(filename, 'rb') as f:
|
||||||
body_bytes += f.read()
|
body_bytes += f.read()
|
||||||
body_bytes += bytearray("\r\n--%s--" % boundary, 'utf-8')
|
body_bytes += bytearray(f"\r\n--{boundary}--", 'utf-8')
|
||||||
return ("multipart/form-data; boundary=%s" % boundary,
|
return (f"multipart/form-data; boundary={boundary}",
|
||||||
body_bytes)
|
body_bytes)
|
||||||
|
|
||||||
def upload_firmware_image(self, update_image_path):
|
def upload_firmware_image(self, update_image_path):
|
||||||
|
@ -339,7 +330,7 @@ class OcapiUtils(object):
|
||||||
"""
|
"""
|
||||||
if not (os.path.exists(update_image_path) and os.path.isfile(update_image_path)):
|
if not (os.path.exists(update_image_path) and os.path.isfile(update_image_path)):
|
||||||
return {'ret': False, 'msg': 'File does not exist.'}
|
return {'ret': False, 'msg': 'File does not exist.'}
|
||||||
url = self.root_uri + "OperatingSystem"
|
url = f"{self.root_uri}OperatingSystem"
|
||||||
url = self.get_uri_with_slot_number_query_param(url)
|
url = self.get_uri_with_slot_number_query_param(url)
|
||||||
content_type, b_form_data = self.prepare_multipart_firmware_upload(update_image_path)
|
content_type, b_form_data = self.prepare_multipart_firmware_upload(update_image_path)
|
||||||
|
|
||||||
|
|
|
@ -209,7 +209,7 @@ def wait_for_resource_creation_completion(oneandone_conn,
|
||||||
(resource_type != OneAndOneResources.server and resource_state.lower() == 'active')):
|
(resource_type != OneAndOneResources.server and resource_state.lower() == 'active')):
|
||||||
return
|
return
|
||||||
elif resource_state.lower() == 'failed':
|
elif resource_state.lower() == 'failed':
|
||||||
raise Exception('%s creation failed for %s' % (resource_type, resource_id))
|
raise Exception(f'{resource_type} creation failed for {resource_id}')
|
||||||
elif resource_state.lower() in ('active',
|
elif resource_state.lower() in ('active',
|
||||||
'enabled',
|
'enabled',
|
||||||
'deploying',
|
'deploying',
|
||||||
|
@ -217,10 +217,10 @@ def wait_for_resource_creation_completion(oneandone_conn,
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
'Unknown %s state %s' % (resource_type, resource_state))
|
f'Unknown {resource_type} state {resource_state}')
|
||||||
|
|
||||||
raise Exception(
|
raise Exception(
|
||||||
'Timed out waiting for %s completion for %s' % (resource_type, resource_id))
|
f'Timed out waiting for {resource_type} completion for {resource_id}')
|
||||||
|
|
||||||
|
|
||||||
def wait_for_resource_deletion_completion(oneandone_conn,
|
def wait_for_resource_deletion_completion(oneandone_conn,
|
||||||
|
@ -246,7 +246,7 @@ def wait_for_resource_deletion_completion(oneandone_conn,
|
||||||
_type = 'PRIVATENETWORK'
|
_type = 'PRIVATENETWORK'
|
||||||
else:
|
else:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
'Unsupported wait_for delete operation for %s resource' % resource_type)
|
f'Unsupported wait_for delete operation for {resource_type} resource')
|
||||||
|
|
||||||
for log in logs:
|
for log in logs:
|
||||||
if (log['resource']['id'] == resource_id and
|
if (log['resource']['id'] == resource_id and
|
||||||
|
@ -255,4 +255,4 @@ def wait_for_resource_deletion_completion(oneandone_conn,
|
||||||
log['status']['state'] == 'OK'):
|
log['status']['state'] == 'OK'):
|
||||||
return
|
return
|
||||||
raise Exception(
|
raise Exception(
|
||||||
'Timed out waiting for %s deletion for %s' % (resource_type, resource_id))
|
f'Timed out waiting for {resource_type} deletion for {resource_id}')
|
||||||
|
|
|
@ -395,11 +395,11 @@ class OneViewModuleBase(object, metaclass=abc.ABCMeta):
|
||||||
resource1 = first_resource
|
resource1 = first_resource
|
||||||
resource2 = second_resource
|
resource2 = second_resource
|
||||||
|
|
||||||
debug_resources = "resource1 = {0}, resource2 = {1}".format(resource1, resource2)
|
debug_resources = f"resource1 = {resource1}, resource2 = {resource2}"
|
||||||
|
|
||||||
# The first resource is True / Not Null and the second resource is False / Null
|
# The first resource is True / Not Null and the second resource is False / Null
|
||||||
if resource1 and not resource2:
|
if resource1 and not resource2:
|
||||||
self.module.log("resource1 and not resource2. " + debug_resources)
|
self.module.log(f"resource1 and not resource2. {debug_resources}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Checks all keys in first dict against the second dict
|
# Checks all keys in first dict against the second dict
|
||||||
|
@ -449,15 +449,15 @@ class OneViewModuleBase(object, metaclass=abc.ABCMeta):
|
||||||
resource1 = first_resource
|
resource1 = first_resource
|
||||||
resource2 = second_resource
|
resource2 = second_resource
|
||||||
|
|
||||||
debug_resources = "resource1 = {0}, resource2 = {1}".format(resource1, resource2)
|
debug_resources = f"resource1 = {resource1}, resource2 = {resource2}"
|
||||||
|
|
||||||
# The second list is null / empty / False
|
# The second list is null / empty / False
|
||||||
if not resource2:
|
if not resource2:
|
||||||
self.module.log("resource 2 is null. " + debug_resources)
|
self.module.log(f"resource 2 is null. {debug_resources}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if len(resource1) != len(resource2):
|
if len(resource1) != len(resource2):
|
||||||
self.module.log("resources have different length. " + debug_resources)
|
self.module.log(f"resources have different length. {debug_resources}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
resource1 = sorted(resource1, key=_str_sorted)
|
resource1 = sorted(resource1, key=_str_sorted)
|
||||||
|
@ -467,15 +467,15 @@ class OneViewModuleBase(object, metaclass=abc.ABCMeta):
|
||||||
if isinstance(val, Mapping):
|
if isinstance(val, Mapping):
|
||||||
# change comparison function to compare dictionaries
|
# change comparison function to compare dictionaries
|
||||||
if not self.compare(val, resource2[i]):
|
if not self.compare(val, resource2[i]):
|
||||||
self.module.log("resources are different. " + debug_resources)
|
self.module.log(f"resources are different. {debug_resources}")
|
||||||
return False
|
return False
|
||||||
elif isinstance(val, list):
|
elif isinstance(val, list):
|
||||||
# recursive call
|
# recursive call
|
||||||
if not self.compare_list(val, resource2[i]):
|
if not self.compare_list(val, resource2[i]):
|
||||||
self.module.log("lists are different. " + debug_resources)
|
self.module.log(f"lists are different. {debug_resources}")
|
||||||
return False
|
return False
|
||||||
elif _standardize_value(val) != _standardize_value(resource2[i]):
|
elif _standardize_value(val) != _standardize_value(resource2[i]):
|
||||||
self.module.log("values are different. " + debug_resources)
|
self.module.log(f"values are different. {debug_resources}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# no differences found
|
# no differences found
|
||||||
|
|
|
@ -46,16 +46,16 @@ def render(to_render):
|
||||||
if value is None:
|
if value is None:
|
||||||
continue
|
continue
|
||||||
if isinstance(value, dict):
|
if isinstance(value, dict):
|
||||||
yield '{0:}=[{1:}]'.format(key, ','.join(recurse(value)))
|
yield f"{key}=[{','.join(recurse(value))}]"
|
||||||
continue
|
continue
|
||||||
if isinstance(value, list):
|
if isinstance(value, list):
|
||||||
for item in value:
|
for item in value:
|
||||||
yield '{0:}=[{1:}]'.format(key, ','.join(recurse(item)))
|
yield f"{key}=[{','.join(recurse(item))}]"
|
||||||
continue
|
continue
|
||||||
if isinstance(value, str):
|
if isinstance(value, str):
|
||||||
yield '{0:}="{1:}"'.format(key, value.replace('\\', '\\\\').replace('"', '\\"'))
|
yield '{0:}="{1:}"'.format(key, value.replace('\\', '\\\\').replace('"', '\\"'))
|
||||||
continue
|
continue
|
||||||
yield '{0:}="{1:}"'.format(key, value)
|
yield f'{key}="{value}"'
|
||||||
return '\n'.join(recurse(to_render))
|
return '\n'.join(recurse(to_render))
|
||||||
|
|
||||||
|
|
||||||
|
@ -124,7 +124,7 @@ class OpenNebulaModule:
|
||||||
else:
|
else:
|
||||||
self.fail("Either api_password or the environment variable ONE_PASSWORD must be provided")
|
self.fail("Either api_password or the environment variable ONE_PASSWORD must be provided")
|
||||||
|
|
||||||
session = "%s:%s" % (username, password)
|
session = f"{username}:{password}"
|
||||||
|
|
||||||
if not self.module.params.get("validate_certs") and "PYTHONHTTPSVERIFY" not in environ:
|
if not self.module.params.get("validate_certs") and "PYTHONHTTPSVERIFY" not in environ:
|
||||||
return OneServer(url, session=session, context=no_ssl_validation_context)
|
return OneServer(url, session=session, context=no_ssl_validation_context)
|
||||||
|
@ -312,11 +312,11 @@ class OpenNebulaModule:
|
||||||
current_state = state()
|
current_state = state()
|
||||||
|
|
||||||
if current_state in invalid_states:
|
if current_state in invalid_states:
|
||||||
self.fail('invalid %s state %s' % (element_name, state_name(current_state)))
|
self.fail(f'invalid {element_name} state {state_name(current_state)}')
|
||||||
|
|
||||||
if transition_states:
|
if transition_states:
|
||||||
if current_state not in transition_states:
|
if current_state not in transition_states:
|
||||||
self.fail('invalid %s transition state %s' % (element_name, state_name(current_state)))
|
self.fail(f'invalid {element_name} transition state {state_name(current_state)}')
|
||||||
|
|
||||||
if current_state in target_states:
|
if current_state in target_states:
|
||||||
return True
|
return True
|
||||||
|
@ -334,7 +334,7 @@ class OpenNebulaModule:
|
||||||
try:
|
try:
|
||||||
self.run(self.one, self.module, self.result)
|
self.run(self.one, self.module, self.result)
|
||||||
except OneException as e:
|
except OneException as e:
|
||||||
self.fail(msg="OpenNebula Exception: %s" % e)
|
self.fail(msg=f"OpenNebula Exception: {e}")
|
||||||
|
|
||||||
def run(self, one, module, result):
|
def run(self, one, module, result):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -47,7 +47,7 @@ class PackageRequirement:
|
||||||
return req.name, req
|
return req.name, req
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise ValueError("Invalid package specification for '{0}': {1}".format(name, e)) from e
|
raise ValueError(f"Invalid package specification for '{name}': {e}") from e
|
||||||
|
|
||||||
def matches_version(self, version):
|
def matches_version(self, version):
|
||||||
"""
|
"""
|
||||||
|
@ -68,4 +68,4 @@ class PackageRequirement:
|
||||||
return ver in self.requirement.specifier
|
return ver in self.requirement.specifier
|
||||||
|
|
||||||
except InvalidVersion as e:
|
except InvalidVersion as e:
|
||||||
raise ValueError("Invalid version '{0}': {1}".format(version, e)) from e
|
raise ValueError(f"Invalid version '{version}': {e}") from e
|
||||||
|
|
|
@ -25,7 +25,7 @@ class PythonRunner(CmdRunner):
|
||||||
path_prefix.append(os.path.join(venv, "bin"))
|
path_prefix.append(os.path.join(venv, "bin"))
|
||||||
if environ_update is None:
|
if environ_update is None:
|
||||||
environ_update = {}
|
environ_update = {}
|
||||||
environ_update["PATH"] = "%s:%s" % (":".join(path_prefix), os.environ["PATH"])
|
environ_update["PATH"] = f"{':'.join(path_prefix)}:{os.environ['PATH']}"
|
||||||
environ_update["VIRTUAL_ENV"] = venv
|
environ_update["VIRTUAL_ENV"] = venv
|
||||||
|
|
||||||
python_cmd = [self.python] + _ensure_list(command)
|
python_cmd = [self.python] + _ensure_list(command)
|
||||||
|
|
|
@ -100,5 +100,5 @@ class RedisAnsible(object):
|
||||||
try:
|
try:
|
||||||
return Redis(**redis_auth_params(self.module))
|
return Redis(**redis_auth_params(self.module))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.module.fail_json(msg='{0}'.format(str(e)))
|
self.module.fail_json(msg=f'{e}')
|
||||||
return None
|
return None
|
||||||
|
|
|
@ -53,11 +53,7 @@ def api_request(module, endpoint, data=None, method="GET", content_type="applica
|
||||||
|
|
||||||
response, info = fetch_url(
|
response, info = fetch_url(
|
||||||
module=module,
|
module=module,
|
||||||
url="%s/api/%s/%s" % (
|
url=f"{module.params['url']}/api/{module.params['api_version']}/{endpoint}",
|
||||||
module.params["url"],
|
|
||||||
module.params["api_version"],
|
|
||||||
endpoint
|
|
||||||
),
|
|
||||||
data=json.dumps(data),
|
data=json.dumps(data),
|
||||||
method=method,
|
method=method,
|
||||||
headers={
|
headers={
|
||||||
|
|
|
@ -105,35 +105,31 @@ def prohibited_output_profile(string):
|
||||||
for c in string:
|
for c in string:
|
||||||
# RFC4013 2.3. Prohibited Output:
|
# RFC4013 2.3. Prohibited Output:
|
||||||
if in_table_c12(c):
|
if in_table_c12(c):
|
||||||
raise ValueError('%s: prohibited non-ASCII space characters '
|
raise ValueError(f'{RFC}: prohibited non-ASCII space characters that cannot be replaced (C.1.2).')
|
||||||
'that cannot be replaced (C.1.2).' % RFC)
|
|
||||||
if in_table_c21_c22(c):
|
if in_table_c21_c22(c):
|
||||||
raise ValueError('%s: prohibited control characters (C.2.1).' % RFC)
|
raise ValueError(f'{RFC}: prohibited control characters (C.2.1).')
|
||||||
if in_table_c3(c):
|
if in_table_c3(c):
|
||||||
raise ValueError('%s: prohibited private Use characters (C.3).' % RFC)
|
raise ValueError(f'{RFC}: prohibited private Use characters (C.3).')
|
||||||
if in_table_c4(c):
|
if in_table_c4(c):
|
||||||
raise ValueError('%s: prohibited non-character code points (C.4).' % RFC)
|
raise ValueError(f'{RFC}: prohibited non-character code points (C.4).')
|
||||||
if in_table_c5(c):
|
if in_table_c5(c):
|
||||||
raise ValueError('%s: prohibited surrogate code points (C.5).' % RFC)
|
raise ValueError(f'{RFC}: prohibited surrogate code points (C.5).')
|
||||||
if in_table_c6(c):
|
if in_table_c6(c):
|
||||||
raise ValueError('%s: prohibited inappropriate for plain text '
|
raise ValueError(f'{RFC}: prohibited inappropriate for plain text characters (C.6).')
|
||||||
'characters (C.6).' % RFC)
|
|
||||||
if in_table_c7(c):
|
if in_table_c7(c):
|
||||||
raise ValueError('%s: prohibited inappropriate for canonical '
|
raise ValueError(f'{RFC}: prohibited inappropriate for canonical representation characters (C.7).')
|
||||||
'representation characters (C.7).' % RFC)
|
|
||||||
if in_table_c8(c):
|
if in_table_c8(c):
|
||||||
raise ValueError('%s: prohibited change display properties / '
|
raise ValueError(f'{RFC}: prohibited change display properties / deprecated characters (C.8).')
|
||||||
'deprecated characters (C.8).' % RFC)
|
|
||||||
if in_table_c9(c):
|
if in_table_c9(c):
|
||||||
raise ValueError('%s: prohibited tagging characters (C.9).' % RFC)
|
raise ValueError(f'{RFC}: prohibited tagging characters (C.9).')
|
||||||
|
|
||||||
# RFC4013, 2.4. Bidirectional Characters:
|
# RFC4013, 2.4. Bidirectional Characters:
|
||||||
if is_prohibited_bidi_ch(c):
|
if is_prohibited_bidi_ch(c):
|
||||||
raise ValueError('%s: prohibited bidi characters (%s).' % (RFC, bidi_table))
|
raise ValueError(f'{RFC}: prohibited bidi characters ({bidi_table}).')
|
||||||
|
|
||||||
# RFC4013, 2.5. Unassigned Code Points:
|
# RFC4013, 2.5. Unassigned Code Points:
|
||||||
if in_table_a1(c):
|
if in_table_a1(c):
|
||||||
raise ValueError('%s: prohibited unassigned code points (A.1).' % RFC)
|
raise ValueError(f'{RFC}: prohibited unassigned code points (A.1).')
|
||||||
|
|
||||||
|
|
||||||
def saslprep(string):
|
def saslprep(string):
|
||||||
|
@ -155,7 +151,7 @@ def saslprep(string):
|
||||||
# comprised of characters from the Unicode [Unicode] character set."
|
# comprised of characters from the Unicode [Unicode] character set."
|
||||||
# Validate the string is a Unicode string
|
# Validate the string is a Unicode string
|
||||||
if not is_unicode_str(string):
|
if not is_unicode_str(string):
|
||||||
raise TypeError('input must be of type str, not %s' % type(string))
|
raise TypeError(f'input must be of type str, not {type(string)}')
|
||||||
|
|
||||||
# RFC4013: 2.1. Mapping.
|
# RFC4013: 2.1. Mapping.
|
||||||
string = mapping_profile(string)
|
string = mapping_profile(string)
|
||||||
|
|
|
@ -35,7 +35,7 @@ def snap_runner(module, **kwargs):
|
||||||
_set=cmd_runner_fmt.as_fixed("set"),
|
_set=cmd_runner_fmt.as_fixed("set"),
|
||||||
get=cmd_runner_fmt.as_fixed(["get", "-d"]),
|
get=cmd_runner_fmt.as_fixed(["get", "-d"]),
|
||||||
classic=cmd_runner_fmt.as_bool("--classic"),
|
classic=cmd_runner_fmt.as_bool("--classic"),
|
||||||
channel=cmd_runner_fmt.as_func(lambda v: [] if v == 'stable' else ['--channel', '{0}'.format(v)]),
|
channel=cmd_runner_fmt.as_func(lambda v: [] if v == 'stable' else ['--channel', f'{v}']),
|
||||||
options=cmd_runner_fmt.as_list(),
|
options=cmd_runner_fmt.as_list(),
|
||||||
info=cmd_runner_fmt.as_fixed("info"),
|
info=cmd_runner_fmt.as_fixed("info"),
|
||||||
dangerous=cmd_runner_fmt.as_bool("--dangerous"),
|
dangerous=cmd_runner_fmt.as_bool("--dangerous"),
|
||||||
|
|
|
@ -54,14 +54,14 @@ class BitbucketHelper:
|
||||||
if info['status'] == 200:
|
if info['status'] == 200:
|
||||||
self.access_token = content['access_token']
|
self.access_token = content['access_token']
|
||||||
else:
|
else:
|
||||||
self.module.fail_json(msg='Failed to retrieve access token: {0}'.format(info))
|
self.module.fail_json(msg=f'Failed to retrieve access token: {info}')
|
||||||
|
|
||||||
def request(self, api_url, method, data=None, headers=None):
|
def request(self, api_url, method, data=None, headers=None):
|
||||||
headers = headers or {}
|
headers = headers or {}
|
||||||
|
|
||||||
if self.access_token:
|
if self.access_token:
|
||||||
headers.update({
|
headers.update({
|
||||||
'Authorization': 'Bearer {0}'.format(self.access_token),
|
'Authorization': f'Bearer {self.access_token}',
|
||||||
})
|
})
|
||||||
elif self.module.params['user'] and self.module.params['password']:
|
elif self.module.params['user'] and self.module.params['password']:
|
||||||
headers.update({
|
headers.update({
|
||||||
|
|
|
@ -13,7 +13,7 @@ import os
|
||||||
|
|
||||||
def determine_config_file(user, config_file):
|
def determine_config_file(user, config_file):
|
||||||
if user:
|
if user:
|
||||||
config_file = os.path.join(os.path.expanduser('~%s' % user), '.ssh', 'config')
|
config_file = os.path.join(os.path.expanduser(f'~{user}'), '.ssh', 'config')
|
||||||
elif config_file is None:
|
elif config_file is None:
|
||||||
config_file = '/etc/ssh/ssh_config'
|
config_file = '/etc/ssh/ssh_config'
|
||||||
return config_file
|
return config_file
|
||||||
|
|
|
@ -20,7 +20,7 @@ def convert_to_binary_multiple(size_with_unit):
|
||||||
if float(size) < 0:
|
if float(size) < 0:
|
||||||
return -1
|
return -1
|
||||||
if not valid_unit:
|
if not valid_unit:
|
||||||
raise ValueError("%s does not have a valid unit. The unit must be one of %s" % (size_with_unit, valid_units))
|
raise ValueError(f"{size_with_unit} does not have a valid unit. The unit must be one of {valid_units}")
|
||||||
|
|
||||||
size = size_with_unit.replace(" ", "").split('iB')[0]
|
size = size_with_unit.replace(" ", "").split('iB')[0]
|
||||||
size_kib = basic.human_to_bytes(size)
|
size_kib = basic.human_to_bytes(size)
|
||||||
|
|
|
@ -89,7 +89,7 @@ def uldap():
|
||||||
def construct():
|
def construct():
|
||||||
try:
|
try:
|
||||||
secret_file = open('/etc/ldap.secret', 'r')
|
secret_file = open('/etc/ldap.secret', 'r')
|
||||||
bind_dn = 'cn=admin,{0}'.format(base_dn())
|
bind_dn = f'cn=admin,{base_dn()}'
|
||||||
except IOError: # pragma: no cover
|
except IOError: # pragma: no cover
|
||||||
secret_file = open('/etc/machine.secret', 'r')
|
secret_file = open('/etc/machine.secret', 'r')
|
||||||
bind_dn = config_registry()["ldap/hostdn"]
|
bind_dn = config_registry()["ldap/hostdn"]
|
||||||
|
@ -186,7 +186,7 @@ def module_by_name(module_name_):
|
||||||
univention.admin.modules.init(uldap(), position_base_dn(), module)
|
univention.admin.modules.init(uldap(), position_base_dn(), module)
|
||||||
return module
|
return module
|
||||||
|
|
||||||
return _singleton('module/%s' % module_name_, construct)
|
return _singleton(f'module/{module_name_}', construct)
|
||||||
|
|
||||||
|
|
||||||
def get_umc_admin_objects():
|
def get_umc_admin_objects():
|
||||||
|
|
|
@ -71,8 +71,9 @@ class UTM:
|
||||||
"""
|
"""
|
||||||
self.info_only = info_only
|
self.info_only = info_only
|
||||||
self.module = module
|
self.module = module
|
||||||
self.request_url = module.params.get('utm_protocol') + "://" + module.params.get('utm_host') + ":" + to_native(
|
self.request_url = (
|
||||||
module.params.get('utm_port')) + "/api/objects/" + endpoint + "/"
|
f"{module.params.get('utm_protocol')}://{module.params.get('utm_host')}:{module.params.get('utm_port')}/api/objects/{endpoint}/"
|
||||||
|
)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
The change_relevant_keys will be checked for changes to determine whether the object needs to be updated
|
The change_relevant_keys will be checked for changes to determine whether the object needs to be updated
|
||||||
|
@ -82,9 +83,8 @@ class UTM:
|
||||||
self.module.params['url_password'] = module.params.get('utm_token')
|
self.module.params['url_password'] = module.params.get('utm_token')
|
||||||
if all(elem in self.change_relevant_keys for elem in module.params.keys()):
|
if all(elem in self.change_relevant_keys for elem in module.params.keys()):
|
||||||
raise UTMModuleConfigurationError(
|
raise UTMModuleConfigurationError(
|
||||||
"The keys " + to_native(
|
f"The keys {self.change_relevant_keys} to check are not in the modules keys:\n{list(module.params.keys())}"
|
||||||
self.change_relevant_keys) + " to check are not in the modules keys:\n" + to_native(
|
)
|
||||||
list(module.params.keys())))
|
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -98,8 +98,9 @@ class _Variable(object):
|
||||||
return
|
return
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "<Variable: value={0!r}, initial={1!r}, diff={2}, output={3}, change={4}, verbosity={5}>".format(
|
return (
|
||||||
self.value, self.initial_value, self.diff, self.output, self.change, self.verbosity
|
f"<Variable: value={self.value!r}, initial={self.initial_value!r}, diff={self.diff}, "
|
||||||
|
f"output={self.output}, change={self.change}, verbosity={self.verbosity}>"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -163,7 +164,7 @@ class VarDict(object):
|
||||||
ValueError: Raised if trying to set a variable with a reserved name.
|
ValueError: Raised if trying to set a variable with a reserved name.
|
||||||
"""
|
"""
|
||||||
if name in self.reserved_names:
|
if name in self.reserved_names:
|
||||||
raise ValueError("Name {0} is reserved".format(name))
|
raise ValueError(f"Name {name} is reserved")
|
||||||
if name in self.__vars__:
|
if name in self.__vars__:
|
||||||
var = self._var(name)
|
var = self._var(name)
|
||||||
var.set_meta(**kwargs)
|
var.set_meta(**kwargs)
|
||||||
|
|
|
@ -12,7 +12,6 @@ try:
|
||||||
except ImportError:
|
except ImportError:
|
||||||
HAS_VEXATAPI = False
|
HAS_VEXATAPI = False
|
||||||
|
|
||||||
from ansible.module_utils.common.text.converters import to_native
|
|
||||||
from ansible.module_utils.basic import env_fallback
|
from ansible.module_utils.basic import env_fallback
|
||||||
|
|
||||||
VXOS_VERSION = None
|
VXOS_VERSION = None
|
||||||
|
@ -58,7 +57,7 @@ def get_array(module):
|
||||||
else:
|
else:
|
||||||
module.fail_json(msg='Test connection to array failed.')
|
module.fail_json(msg='Test connection to array failed.')
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
module.fail_json(msg='Vexata API access failed: {0}'.format(to_native(e)))
|
module.fail_json(msg=f'Vexata API access failed: {e}')
|
||||||
|
|
||||||
|
|
||||||
def argument_spec():
|
def argument_spec():
|
||||||
|
|
|
@ -64,7 +64,7 @@ class WdcRedfishUtils(RedfishUtils):
|
||||||
A URI is considered good if we can GET uri/redfish/v1.
|
A URI is considered good if we can GET uri/redfish/v1.
|
||||||
"""
|
"""
|
||||||
for root_uri in root_uris:
|
for root_uri in root_uris:
|
||||||
uri = root_uri + "/redfish/v1"
|
uri = f"{root_uri}/redfish/v1"
|
||||||
response = self.get_request(uri)
|
response = self.get_request(uri)
|
||||||
if response['ret']:
|
if response['ret']:
|
||||||
self.root_uri = root_uri
|
self.root_uri = root_uri
|
||||||
|
@ -86,7 +86,7 @@ class WdcRedfishUtils(RedfishUtils):
|
||||||
:return: True/False if the enclosure is multi-tenant or not and return enclosure generation;
|
:return: True/False if the enclosure is multi-tenant or not and return enclosure generation;
|
||||||
None if unable to determine.
|
None if unable to determine.
|
||||||
"""
|
"""
|
||||||
response = self.get_request(self.root_uri + self.service_root + "Chassis/Enclosure")
|
response = self.get_request(f"{self.root_uri}{self.service_root}Chassis/Enclosure")
|
||||||
if response['ret'] is False:
|
if response['ret'] is False:
|
||||||
return None
|
return None
|
||||||
pattern = r".*-[A,B]"
|
pattern = r".*-[A,B]"
|
||||||
|
@ -114,7 +114,7 @@ class WdcRedfishUtils(RedfishUtils):
|
||||||
|
|
||||||
# Simple update status URI is not provided via GET /redfish/v1/UpdateService
|
# Simple update status URI is not provided via GET /redfish/v1/UpdateService
|
||||||
# So we have to hard code it.
|
# So we have to hard code it.
|
||||||
self.simple_update_status_uri = "{0}/Status".format(self.simple_update_uri)
|
self.simple_update_status_uri = f"{self.simple_update_uri}/Status"
|
||||||
|
|
||||||
# FWActivate URI
|
# FWActivate URI
|
||||||
if 'Oem' not in data['Actions']:
|
if 'Oem' not in data['Actions']:
|
||||||
|
@ -267,9 +267,7 @@ class WdcRedfishUtils(RedfishUtils):
|
||||||
parsed_url = urlparse(update_opts["update_image_uri"])
|
parsed_url = urlparse(update_opts["update_image_uri"])
|
||||||
if update_creds:
|
if update_creds:
|
||||||
original_netloc = parsed_url.netloc
|
original_netloc = parsed_url.netloc
|
||||||
parsed_url = parsed_url._replace(netloc="{0}:{1}@{2}".format(update_creds.get("username"),
|
parsed_url = parsed_url._replace(netloc=f"{update_creds.get('username')}:{update_creds.get('password')}@{original_netloc}")
|
||||||
update_creds.get("password"),
|
|
||||||
original_netloc))
|
|
||||||
update_opts["update_image_uri"] = urlunparse(parsed_url)
|
update_opts["update_image_uri"] = urlunparse(parsed_url)
|
||||||
del update_opts["update_creds"]
|
del update_opts["update_creds"]
|
||||||
|
|
||||||
|
@ -294,9 +292,7 @@ class WdcRedfishUtils(RedfishUtils):
|
||||||
]:
|
]:
|
||||||
return {
|
return {
|
||||||
'ret': False,
|
'ret': False,
|
||||||
'msg': 'Target is not ready for FW update. Current status: {0} ({1})'.format(
|
'msg': f'Target is not ready for FW update. Current status: {status_code} ({status_description})'}
|
||||||
status_code, status_description
|
|
||||||
)}
|
|
||||||
|
|
||||||
# Check the FW version in the bundle file, and compare it to what is already on the IOMs
|
# Check the FW version in the bundle file, and compare it to what is already on the IOMs
|
||||||
|
|
||||||
|
@ -314,20 +310,14 @@ class WdcRedfishUtils(RedfishUtils):
|
||||||
if is_enclosure_multi_tenant != is_bundle_multi_tenant:
|
if is_enclosure_multi_tenant != is_bundle_multi_tenant:
|
||||||
return {
|
return {
|
||||||
'ret': False,
|
'ret': False,
|
||||||
'msg': 'Enclosure multi-tenant is {0} but bundle multi-tenant is {1}'.format(
|
'msg': f'Enclosure multi-tenant is {is_enclosure_multi_tenant} but bundle multi-tenant is {is_bundle_multi_tenant}'
|
||||||
is_enclosure_multi_tenant,
|
|
||||||
is_bundle_multi_tenant,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Verify that the bundle is compliant with the target enclosure
|
# Verify that the bundle is compliant with the target enclosure
|
||||||
if enclosure_gen != bundle_gen:
|
if enclosure_gen != bundle_gen:
|
||||||
return {
|
return {
|
||||||
'ret': False,
|
'ret': False,
|
||||||
'msg': 'Enclosure generation is {0} but bundle is of {1}'.format(
|
'msg': f'Enclosure generation is {enclosure_gen} but bundle is of {bundle_gen}'
|
||||||
enclosure_gen,
|
|
||||||
bundle_gen,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Version number installed on IOMs
|
# Version number installed on IOMs
|
||||||
|
@ -355,7 +345,7 @@ class WdcRedfishUtils(RedfishUtils):
|
||||||
return {
|
return {
|
||||||
'ret': True,
|
'ret': True,
|
||||||
'changed': False,
|
'changed': False,
|
||||||
'msg': 'Version {0} already installed'.format(bundle_firmware_version)
|
'msg': f'Version {bundle_firmware_version} already installed'
|
||||||
}
|
}
|
||||||
|
|
||||||
# Version numbers don't match the bundle -- proceed with update (unless we are in check mode)
|
# Version numbers don't match the bundle -- proceed with update (unless we are in check mode)
|
||||||
|
@ -425,9 +415,7 @@ class WdcRedfishUtils(RedfishUtils):
|
||||||
if status_code != self.UPDATE_STATUS_CODE_FW_UPDATE_COMPLETED_WAITING_FOR_ACTIVATION:
|
if status_code != self.UPDATE_STATUS_CODE_FW_UPDATE_COMPLETED_WAITING_FOR_ACTIVATION:
|
||||||
return {
|
return {
|
||||||
'ret': False,
|
'ret': False,
|
||||||
'msg': 'Target is not ready for FW activation after update. Current status: {0} ({1})'.format(
|
'msg': f'Target is not ready for FW activation after update. Current status: {status_code} ({status_description})'}
|
||||||
status_code, status_description
|
|
||||||
)}
|
|
||||||
|
|
||||||
self.firmware_activate(update_opts)
|
self.firmware_activate(update_opts)
|
||||||
return {'ret': True, 'changed': True,
|
return {'ret': True, 'changed': True,
|
||||||
|
@ -447,7 +435,7 @@ class WdcRedfishUtils(RedfishUtils):
|
||||||
# The other will return an error with message "IOM Module A/B cannot be read"
|
# The other will return an error with message "IOM Module A/B cannot be read"
|
||||||
which_iom_is_this = None
|
which_iom_is_this = None
|
||||||
for iom_letter in ['A', 'B']:
|
for iom_letter in ['A', 'B']:
|
||||||
iom_uri = "Chassis/IOModule{0}FRU".format(iom_letter)
|
iom_uri = f"Chassis/IOModule{iom_letter}FRU"
|
||||||
response = self.get_request(self.root_uri + self.service_root + iom_uri)
|
response = self.get_request(self.root_uri + self.service_root + iom_uri)
|
||||||
if response['ret'] is False:
|
if response['ret'] is False:
|
||||||
continue
|
continue
|
||||||
|
@ -505,7 +493,7 @@ class WdcRedfishUtils(RedfishUtils):
|
||||||
result['ret'] = True
|
result['ret'] = True
|
||||||
data = response['data']
|
data = response['data']
|
||||||
if key not in data:
|
if key not in data:
|
||||||
return {'ret': False, 'msg': "Key %s not found" % key}
|
return {'ret': False, 'msg': f"Key {key} not found"}
|
||||||
current_led_status = data[key]
|
current_led_status = data[key]
|
||||||
if current_led_status == current_led_status_map[command]:
|
if current_led_status == current_led_status_map[command]:
|
||||||
return {'ret': True, 'changed': False}
|
return {'ret': True, 'changed': False}
|
||||||
|
|
|
@ -14,7 +14,7 @@ def _values_fmt(values, value_types):
|
||||||
for value, value_type in zip(values, value_types):
|
for value, value_type in zip(values, value_types):
|
||||||
if value_type == 'bool':
|
if value_type == 'bool':
|
||||||
value = 'true' if boolean(value) else 'false'
|
value = 'true' if boolean(value) else 'false'
|
||||||
result.extend(['--type', '{0}'.format(value_type), '--set', '{0}'.format(value)])
|
result.extend(['--type', f'{value_type}', '--set', f'{value}'])
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue