Relocating extras into lib/ansible/modules/ after merge

This commit is contained in:
James Cammarata 2016-12-08 00:36:57 -05:00 committed by Matt Clay
commit 011ea55a8f
596 changed files with 0 additions and 266 deletions

View file

@ -0,0 +1,84 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: dpkg_selections
short_description: Dpkg package selection selections
description:
- Change dpkg package selection state via --get-selections and --set-selections.
version_added: "2.0"
author: Brian Brazil <brian.brazil@boxever.com>
options:
name:
description:
- Name of the package
required: true
selection:
description:
- The selection state to set the package to.
choices: [ 'install', 'hold', 'deinstall', 'purge' ]
required: true
notes:
- This module won't cause any packages to be installed/removed/purged, use the C(apt) module for that.
'''
EXAMPLES = '''
# Prevent python from being upgraded.
- dpkg_selections:
name: python
selection: hold
'''
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True),
selection = dict(choices=['install', 'hold', 'deinstall', 'purge'])
),
supports_check_mode=True,
)
dpkg = module.get_bin_path('dpkg', True)
name = module.params['name']
selection = module.params['selection']
# Get current settings.
rc, out, err = module.run_command([dpkg, '--get-selections', name], check_rc=True)
if not out:
current = 'not present'
else:
current = out.split()[1]
changed = current != selection
if module.check_mode or not changed:
module.exit_json(changed=changed, before=current, after=selection)
module.run_command([dpkg, '--set-selections'], data="%s %s" % (name, selection), check_rc=True)
module.exit_json(changed=changed, before=current, after=selection)
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()

View file

@ -0,0 +1,226 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
"""
Ansible module to manage elasticsearch plugins
(c) 2015, Mathew Davies <thepixeldeveloper@googlemail.com>
This file is part of Ansible
Ansible is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Ansible is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: elasticsearch_plugin
short_description: Manage Elasticsearch plugins
description:
- Manages Elasticsearch plugins.
version_added: "2.0"
author: Mathew Davies (@ThePixelDeveloper)
options:
name:
description:
- Name of the plugin to install. In ES 2.x, the name can be an url or file location
required: True
state:
description:
- Desired state of a plugin.
required: False
choices: ["present", "absent"]
default: present
url:
description:
- Set exact URL to download the plugin from (Only works for ES 1.x)
required: False
default: None
timeout:
description:
- "Timeout setting: 30s, 1m, 1h..."
required: False
default: 1m
plugin_bin:
description:
- Location of the plugin binary
required: False
default: /usr/share/elasticsearch/bin/plugin
plugin_dir:
description:
- Your configured plugin directory specified in Elasticsearch
required: False
default: /usr/share/elasticsearch/plugins/
proxy_host:
description:
- Proxy host to use during plugin installation
required: False
default: None
version_added: "2.1"
proxy_port:
description:
- Proxy port to use during plugin installation
required: False
default: None
version_added: "2.1"
version:
description:
- Version of the plugin to be installed.
If plugin exists with previous version, it will NOT be updated
required: False
default: None
'''
EXAMPLES = '''
# Install Elasticsearch head plugin
- elasticsearch_plugin:
state: present
name: mobz/elasticsearch-head
# Install specific version of a plugin
- elasticsearch_plugin:
state: present
name: com.github.kzwang/elasticsearch-image
version: '1.2.0'
# Uninstall Elasticsearch head plugin
- elasticsearch_plugin:
state: absent
name: mobz/elasticsearch-head
'''
PACKAGE_STATE_MAP = dict(
present="install",
absent="remove"
)
def parse_plugin_repo(string):
elements = string.split("/")
# We first consider the simplest form: pluginname
repo = elements[0]
# We consider the form: username/pluginname
if len(elements) > 1:
repo = elements[1]
# remove elasticsearch- prefix
# remove es- prefix
for string in ("elasticsearch-", "es-"):
if repo.startswith(string):
return repo[len(string):]
return repo
def is_plugin_present(plugin_dir, working_dir):
return os.path.isdir(os.path.join(working_dir, plugin_dir))
def parse_error(string):
reason = "reason: "
try:
return string[string.index(reason) + len(reason):].strip()
except ValueError:
return string
def install_plugin(module, plugin_bin, plugin_name, version, url, proxy_host, proxy_port, timeout):
cmd_args = [plugin_bin, PACKAGE_STATE_MAP["present"], plugin_name]
if version:
plugin_name = plugin_name + '/' + version
if proxy_host and proxy_port:
cmd_args.append("-DproxyHost=%s -DproxyPort=%s" % (proxy_host, proxy_port))
if url:
cmd_args.append("--url %s" % url)
if timeout:
cmd_args.append("--timeout %s" % timeout)
cmd = " ".join(cmd_args)
if module.check_mode:
rc, out, err = 0, "check mode", ""
else:
rc, out, err = module.run_command(cmd)
if rc != 0:
reason = parse_error(out)
module.fail_json(msg=reason)
return True, cmd, out, err
def remove_plugin(module, plugin_bin, plugin_name):
cmd_args = [plugin_bin, PACKAGE_STATE_MAP["absent"], parse_plugin_repo(plugin_name)]
cmd = " ".join(cmd_args)
if module.check_mode:
rc, out, err = 0, "check mode", ""
else:
rc, out, err = module.run_command(cmd)
if rc != 0:
reason = parse_error(out)
module.fail_json(msg=reason)
return True, cmd, out, err
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True),
state=dict(default="present", choices=PACKAGE_STATE_MAP.keys()),
url=dict(default=None),
timeout=dict(default="1m"),
plugin_bin=dict(default="/usr/share/elasticsearch/bin/plugin", type="path"),
plugin_dir=dict(default="/usr/share/elasticsearch/plugins/", type="path"),
proxy_host=dict(default=None),
proxy_port=dict(default=None),
version=dict(default=None)
),
supports_check_mode=True
)
name = module.params["name"]
state = module.params["state"]
url = module.params["url"]
timeout = module.params["timeout"]
plugin_bin = module.params["plugin_bin"]
plugin_dir = module.params["plugin_dir"]
proxy_host = module.params["proxy_host"]
proxy_port = module.params["proxy_port"]
version = module.params["version"]
present = is_plugin_present(parse_plugin_repo(name), plugin_dir)
# skip if the state is correct
if (present and state == "present") or (state == "absent" and not present):
module.exit_json(changed=False, name=name, state=state)
if state == "present":
changed, cmd, out, err = install_plugin(module, plugin_bin, name, version, url, proxy_host, proxy_port, timeout)
elif state == "absent":
changed, cmd, out, err = remove_plugin(module, plugin_bin, name)
module.exit_json(changed=changed, cmd=cmd, name=name, state=state, url=url, timeout=timeout, stdout=out, stderr=err)
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()

View file

@ -0,0 +1,248 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Ansible module to manage elasticsearch shield role
(c) 2016, Thierno IB. BARRY @barryib
Sponsored by Polyconseil http://polyconseil.fr.
This file is part of Ansible
Ansible is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Ansible is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
import os
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: kibana_plugin
short_description: Manage Kibana plugins
description:
- Manages Kibana plugins.
version_added: "2.2"
author: Thierno IB. BARRY (@barryib)
options:
name:
description:
- Name of the plugin to install
required: True
state:
description:
- Desired state of a plugin.
required: False
choices: ["present", "absent"]
default: present
url:
description:
- Set exact URL to download the plugin from.
For local file, prefix its absolute path with file://
required: False
default: None
timeout:
description:
- "Timeout setting: 30s, 1m, 1h..."
required: False
default: 1m
plugin_bin:
description:
- Location of the plugin binary
required: False
default: /opt/kibana/bin/kibana
plugin_dir:
description:
- Your configured plugin directory specified in Kibana
required: False
default: /opt/kibana/installedPlugins/
version:
description:
- Version of the plugin to be installed.
If plugin exists with previous version, it will NOT be updated if C(force) is not set to yes
required: False
default: None
force:
description:
- Delete and re-install the plugin. Can be useful for plugins update
required: False
choices: ["yes", "no"]
default: no
'''
EXAMPLES = '''
# Install Elasticsearch head plugin
- kibana_plugin:
state: present
name=: elasticsearch/marvel
# Install specific version of a plugin
- kibana_plugin:
state: present
name: elasticsearch/marvel
version: '2.3.3'
# Uninstall Elasticsearch head plugin
- kibana_plugin:
state: absent
name: elasticsearch/marvel
'''
RETURN = '''
cmd:
description: the launched command during plugin mangement (install / remove)
returned: success
type: string
name:
description: the plugin name to install or remove
returned: success
type: string
url:
description: the url from where the plugin is installed from
returned: success
type: string
timeout:
description: the timout for plugin download
returned: success
type: string
stdout:
description: the command stdout
returned: success
type: string
stderr:
description: the command stderr
returned: success
type: string
state:
description: the state for the managed plugin
returned: success
type: string
'''
PACKAGE_STATE_MAP = dict(
present="--install",
absent="--remove"
)
def parse_plugin_repo(string):
elements = string.split("/")
# We first consider the simplest form: pluginname
repo = elements[0]
# We consider the form: username/pluginname
if len(elements) > 1:
repo = elements[1]
# remove elasticsearch- prefix
# remove es- prefix
for string in ("elasticsearch-", "es-"):
if repo.startswith(string):
return repo[len(string):]
return repo
def is_plugin_present(plugin_dir, working_dir):
return os.path.isdir(os.path.join(working_dir, plugin_dir))
def parse_error(string):
reason = "reason: "
try:
return string[string.index(reason) + len(reason):].strip()
except ValueError:
return string
def install_plugin(module, plugin_bin, plugin_name, url, timeout):
cmd_args = [plugin_bin, "plugin", PACKAGE_STATE_MAP["present"], plugin_name]
if url:
cmd_args.append("--url %s" % url)
if timeout:
cmd_args.append("--timeout %s" % timeout)
cmd = " ".join(cmd_args)
if module.check_mode:
return True, cmd, "check mode", ""
rc, out, err = module.run_command(cmd)
if rc != 0:
reason = parse_error(out)
module.fail_json(msg=reason)
return True, cmd, out, err
def remove_plugin(module, plugin_bin, plugin_name):
cmd_args = [plugin_bin, "plugin", PACKAGE_STATE_MAP["absent"], plugin_name]
cmd = " ".join(cmd_args)
if module.check_mode:
return True, cmd, "check mode", ""
rc, out, err = module.run_command(cmd)
if rc != 0:
reason = parse_error(out)
module.fail_json(msg=reason)
return True, cmd, out, err
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True),
state=dict(default="present", choices=PACKAGE_STATE_MAP.keys()),
url=dict(default=None),
timeout=dict(default="1m"),
plugin_bin=dict(default="/opt/kibana/bin/kibana", type="path"),
plugin_dir=dict(default="/opt/kibana/installedPlugins/", type="path"),
version=dict(default=None),
force=dict(default="no", type="bool")
),
supports_check_mode=True,
)
name = module.params["name"]
state = module.params["state"]
url = module.params["url"]
timeout = module.params["timeout"]
plugin_bin = module.params["plugin_bin"]
plugin_dir = module.params["plugin_dir"]
version = module.params["version"]
force = module.params["force"]
present = is_plugin_present(parse_plugin_repo(name), plugin_dir)
# skip if the state is correct
if (present and state == "present" and not force) or (state == "absent" and not present and not force):
module.exit_json(changed=False, name=name, state=state)
if (version):
name = name + '/' + version
if state == "present":
if force:
remove_plugin(module, plugin_bin, name)
changed, cmd, out, err = install_plugin(module, plugin_bin, name, url, timeout)
elif state == "absent":
changed, cmd, out, err = remove_plugin(module, plugin_bin, name)
module.exit_json(changed=changed, cmd=cmd, name=name, state=state, url=url, timeout=timeout, stdout=out, stderr=err)
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()

View file

@ -0,0 +1,244 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Michael Warkentin <mwarkentin@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: bower
short_description: Manage bower packages with bower
description:
- Manage bower packages with bower
version_added: 1.9
author: "Michael Warkentin (@mwarkentin)"
options:
name:
description:
- The name of a bower package to install
required: false
offline:
description:
- Install packages from local cache, if the packages were installed before
required: false
default: no
choices: [ "yes", "no" ]
production:
description:
- Install with --production flag
required: false
default: no
choices: [ "yes", "no" ]
version_added: "2.0"
path:
description:
- The base path where to install the bower packages
required: true
relative_execpath:
description:
- Relative path to bower executable from install path
default: null
required: false
version_added: "2.1"
state:
description:
- The state of the bower package
required: false
default: present
choices: [ "present", "absent", "latest" ]
version:
description:
- The version to be installed
required: false
'''
EXAMPLES = '''
- name: Install "bootstrap" bower package.
bower:
name: bootstrap
- name: Install "bootstrap" bower package on version 3.1.1.
bower:
name: bootstrap
version: '3.1.1'
- name: Remove the "bootstrap" bower package.
bower:
name: bootstrap
state: absent
- name: Install packages based on bower.json.
bower:
path: /app/location
- name: Update packages based on bower.json to their latest version.
bower:
path: /app/location
state: latest
# install bower locally and run from there
- npm:
path: /app/location
name: bower
global: no
- bower:
path: /app/location
relative_execpath: node_modules/.bin
'''
class Bower(object):
def __init__(self, module, **kwargs):
self.module = module
self.name = kwargs['name']
self.offline = kwargs['offline']
self.production = kwargs['production']
self.path = kwargs['path']
self.relative_execpath = kwargs['relative_execpath']
self.version = kwargs['version']
if kwargs['version']:
self.name_version = self.name + '#' + self.version
else:
self.name_version = self.name
def _exec(self, args, run_in_check_mode=False, check_rc=True):
if not self.module.check_mode or (self.module.check_mode and run_in_check_mode):
cmd = []
if self.relative_execpath:
cmd.append(os.path.join(self.path, self.relative_execpath, "bower"))
if not os.path.isfile(cmd[-1]):
self.module.fail_json(msg="bower not found at relative path %s" % self.relative_execpath)
else:
cmd.append("bower")
cmd.extend(args)
cmd.extend(['--config.interactive=false', '--allow-root'])
if self.name:
cmd.append(self.name_version)
if self.offline:
cmd.append('--offline')
if self.production:
cmd.append('--production')
# If path is specified, cd into that path and run the command.
cwd = None
if self.path:
if not os.path.exists(self.path):
os.makedirs(self.path)
if not os.path.isdir(self.path):
self.module.fail_json(msg="path %s is not a directory" % self.path)
cwd = self.path
rc, out, err = self.module.run_command(cmd, check_rc=check_rc, cwd=cwd)
return out
return ''
def list(self):
cmd = ['list', '--json']
installed = list()
missing = list()
outdated = list()
data = json.loads(self._exec(cmd, True, False))
if 'dependencies' in data:
for dep in data['dependencies']:
dep_data = data['dependencies'][dep]
if dep_data.get('missing', False):
missing.append(dep)
elif ('version' in dep_data['pkgMeta'] and
'update' in dep_data and
dep_data['pkgMeta']['version'] != dep_data['update']['latest']):
outdated.append(dep)
elif dep_data.get('incompatible', False):
outdated.append(dep)
else:
installed.append(dep)
# Named dependency not installed
else:
missing.append(self.name)
return installed, missing, outdated
def install(self):
return self._exec(['install'])
def update(self):
return self._exec(['update'])
def uninstall(self):
return self._exec(['uninstall'])
def main():
arg_spec = dict(
name=dict(default=None),
offline=dict(default='no', type='bool'),
production=dict(default='no', type='bool'),
path=dict(required=True, type='path'),
relative_execpath=dict(default=None, required=False, type='path'),
state=dict(default='present', choices=['present', 'absent', 'latest', ]),
version=dict(default=None),
)
module = AnsibleModule(
argument_spec=arg_spec
)
name = module.params['name']
offline = module.params['offline']
production = module.params['production']
path = os.path.expanduser(module.params['path'])
relative_execpath = module.params['relative_execpath']
state = module.params['state']
version = module.params['version']
if state == 'absent' and not name:
module.fail_json(msg='uninstalling a package is only available for named packages')
bower = Bower(module, name=name, offline=offline, production=production, path=path, relative_execpath=relative_execpath, version=version)
changed = False
if state == 'present':
installed, missing, outdated = bower.list()
if len(missing):
changed = True
bower.install()
elif state == 'latest':
installed, missing, outdated = bower.list()
if len(missing) or len(outdated):
changed = True
bower.update()
else: # Absent
installed, missing, outdated = bower.list()
if name in installed:
changed = True
bower.uninstall()
module.exit_json(changed=changed)
# Import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()

View file

@ -0,0 +1,225 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Tim Hoiberg <tim.hoiberg@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION='''
---
module: bundler
short_description: Manage Ruby Gem dependencies with Bundler
description:
- Manage installation and Gem version dependencies for Ruby using the Bundler gem
version_added: "2.0.0"
options:
executable:
description:
- The path to the bundler executable
required: false
default: null
state:
description:
- The desired state of the Gem bundle. C(latest) updates gems to the most recent, acceptable version
required: false
choices: [present, latest]
default: present
chdir:
description:
- The directory to execute the bundler commands from. This directoy
needs to contain a valid Gemfile or .bundle/ directory
required: false
default: temporary working directory
exclude_groups:
description:
- A list of Gemfile groups to exclude during operations. This only
applies when state is C(present). Bundler considers this
a 'remembered' property for the Gemfile and will automatically exclude
groups in future operations even if C(exclude_groups) is not set
required: false
default: null
clean:
description:
- Only applies if state is C(present). If set removes any gems on the
target host that are not in the gemfile
required: false
choices: [yes, no]
default: "no"
gemfile:
description:
- Only applies if state is C(present). The path to the gemfile to use to install gems.
required: false
default: Gemfile in current directory
local:
description:
- If set only installs gems from the cache on the target host
required: false
choices: [yes, no]
default: "no"
deployment_mode:
description:
- Only applies if state is C(present). If set it will only install gems
that are in the default or production groups. Requires a Gemfile.lock
file to have been created prior
required: false
choices: [yes, no]
default: "no"
user_install:
description:
- Only applies if state is C(present). Installs gems in the local user's cache or for all users
required: false
choices: [yes, no]
default: "yes"
gem_path:
description:
- Only applies if state is C(present). Specifies the directory to
install the gems into. If C(chdir) is set then this path is relative to
C(chdir)
required: false
default: RubyGems gem paths
binstub_directory:
description:
- Only applies if state is C(present). Specifies the directory to
install any gem bins files to. When executed the bin files will run
within the context of the Gemfile and fail if any required gem
dependencies are not installed. If C(chdir) is set then this path is
relative to C(chdir)
required: false
default: null
extra_args:
description:
- A space separated string of additional commands that can be applied to
the Bundler command. Refer to the Bundler documentation for more
information
required: false
default: null
author: "Tim Hoiberg (@thoiberg)"
'''
EXAMPLES='''
# Installs gems from a Gemfile in the current directory
- bundler:
state: present
executable: ~/.rvm/gems/2.1.5/bin/bundle
# Excludes the production group from installing
- bundler:
state: present
exclude_groups: production
# Only install gems from the default and production groups
- bundler:
state: present
deployment_mode: yes
# Installs gems using a Gemfile in another directory
- bundler:
state: present
gemfile: ../rails_project/Gemfile
# Updates Gemfile in another directory
- bundler:
state: latest
chdir: ~/rails_project
'''
def get_bundler_executable(module):
if module.params.get('executable'):
return module.params.get('executable').split(' ')
else:
return [ module.get_bin_path('bundle', True) ]
def main():
module = AnsibleModule(
argument_spec=dict(
executable=dict(default=None, required=False),
state=dict(default='present', required=False, choices=['present', 'latest']),
chdir=dict(default=None, required=False, type='path'),
exclude_groups=dict(default=None, required=False, type='list'),
clean=dict(default=False, required=False, type='bool'),
gemfile=dict(default=None, required=False, type='path'),
local=dict(default=False, required=False, type='bool'),
deployment_mode=dict(default=False, required=False, type='bool'),
user_install=dict(default=True, required=False, type='bool'),
gem_path=dict(default=None, required=False, type='path'),
binstub_directory=dict(default=None, required=False, type='path'),
extra_args=dict(default=None, required=False),
),
supports_check_mode=True
)
executable = module.params.get('executable')
state = module.params.get('state')
chdir = module.params.get('chdir')
exclude_groups = module.params.get('exclude_groups')
clean = module.params.get('clean')
gemfile = module.params.get('gemfile')
local = module.params.get('local')
deployment_mode = module.params.get('deployment_mode')
user_install = module.params.get('user_install')
gem_path = module.params.get('gem_path')
binstub_directory = module.params.get('binstub_directory')
extra_args = module.params.get('extra_args')
cmd = get_bundler_executable(module)
if module.check_mode:
cmd.append('check')
rc, out, err = module.run_command(cmd, cwd=chdir, check_rc=False)
module.exit_json(changed=rc != 0, state=state, stdout=out, stderr=err)
if state == 'present':
cmd.append('install')
if exclude_groups:
cmd.extend(['--without', ':'.join(exclude_groups)])
if clean:
cmd.append('--clean')
if gemfile:
cmd.extend(['--gemfile', gemfile])
if local:
cmd.append('--local')
if deployment_mode:
cmd.append('--deployment')
if not user_install:
cmd.append('--system')
if gem_path:
cmd.extend(['--path', gem_path])
if binstub_directory:
cmd.extend(['--binstubs', binstub_directory])
else:
cmd.append('update')
if local:
cmd.append('--local')
if extra_args:
cmd.extend(extra_args.split(' '))
rc, out, err = module.run_command(cmd, cwd=chdir, check_rc=True)
module.exit_json(changed='Installing' in out, state=state, stdout=out, stderr=err)
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()

View file

@ -0,0 +1,239 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Dimitrios Tydeas Mengidis <tydeas.dr@gmail.com>
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: composer
author:
- "Dimitrios Tydeas Mengidis (@dmtrs)"
- "René Moser (@resmo)"
short_description: Dependency Manager for PHP
version_added: "1.6"
description:
- Composer is a tool for dependency management in PHP. It allows you to declare the dependent libraries your project needs and it will install them in your project for you
options:
command:
version_added: "1.8"
description:
- Composer command like "install", "update" and so on
required: false
default: install
arguments:
version_added: "2.0"
description:
- Composer arguments like required package, version and so on
required: false
default: null
working_dir:
description:
- Directory of your project ( see --working-dir )
required: true
default: null
aliases: [ "working-dir" ]
prefer_source:
description:
- Forces installation from package sources when possible ( see --prefer-source )
required: false
default: "no"
choices: [ "yes", "no" ]
aliases: [ "prefer-source" ]
prefer_dist:
description:
- Forces installation from package dist even for dev versions ( see --prefer-dist )
required: false
default: "no"
choices: [ "yes", "no" ]
aliases: [ "prefer-dist" ]
no_dev:
description:
- Disables installation of require-dev packages ( see --no-dev )
required: false
default: "yes"
choices: [ "yes", "no" ]
aliases: [ "no-dev" ]
no_scripts:
description:
- Skips the execution of all scripts defined in composer.json ( see --no-scripts )
required: false
default: "no"
choices: [ "yes", "no" ]
aliases: [ "no-scripts" ]
no_plugins:
description:
- Disables all plugins ( see --no-plugins )
required: false
default: "no"
choices: [ "yes", "no" ]
aliases: [ "no-plugins" ]
optimize_autoloader:
description:
- Optimize autoloader during autoloader dump ( see --optimize-autoloader ). Convert PSR-0/4 autoloading to classmap to get a faster autoloader. This is recommended especially for production, but can take a bit of time to run so it is currently not done by default.
required: false
default: "yes"
choices: [ "yes", "no" ]
aliases: [ "optimize-autoloader" ]
ignore_platform_reqs:
version_added: "2.0"
description:
- Ignore php, hhvm, lib-* and ext-* requirements and force the installation even if the local machine does not fulfill these.
required: false
default: "no"
choices: [ "yes", "no" ]
aliases: [ "ignore-platform-reqs" ]
requirements:
- php
- composer installed in bin path (recommended /usr/local/bin)
notes:
- Default options that are always appended in each execution are --no-ansi, --no-interaction and --no-progress if available.
- We received reports about issues on macOS if composer was installed by Homebrew. Please use the official install method to avoid it.
'''
EXAMPLES = '''
# Downloads and installs all the libs and dependencies outlined in the /path/to/project/composer.lock
- composer:
command: install
working_dir: /path/to/project
- composer:
command: require
arguments: my/package
working_dir: /path/to/project
# Clone project and install with all dependencies
- composer:
command: create-project
arguments: package/package /path/to/project ~1.0
working_dir: /path/to/project
prefer_dist: yes
'''
import os
import re
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
# Let snippet from module_utils/basic.py return a proper error in this case
pass
def parse_out(string):
return re.sub("\s+", " ", string).strip()
def has_changed(string):
return "Nothing to install or update" not in string
def get_available_options(module, command='install'):
# get all availabe options from a composer command using composer help to json
rc, out, err = composer_command(module, "help %s --format=json" % command)
if rc != 0:
output = parse_out(err)
module.fail_json(msg=output)
command_help_json = json.loads(out)
return command_help_json['definition']['options']
def composer_command(module, command, arguments = "", options=[]):
php_path = module.get_bin_path("php", True, ["/usr/local/bin"])
composer_path = module.get_bin_path("composer", True, ["/usr/local/bin"])
cmd = "%s %s %s %s %s" % (php_path, composer_path, command, " ".join(options), arguments)
return module.run_command(cmd)
def main():
module = AnsibleModule(
argument_spec = dict(
command = dict(default="install", type="str", required=False),
arguments = dict(default="", type="str", required=False),
working_dir = dict(aliases=["working-dir"], required=True),
prefer_source = dict(default="no", type="bool", aliases=["prefer-source"]),
prefer_dist = dict(default="no", type="bool", aliases=["prefer-dist"]),
no_dev = dict(default="yes", type="bool", aliases=["no-dev"]),
no_scripts = dict(default="no", type="bool", aliases=["no-scripts"]),
no_plugins = dict(default="no", type="bool", aliases=["no-plugins"]),
optimize_autoloader = dict(default="yes", type="bool", aliases=["optimize-autoloader"]),
ignore_platform_reqs = dict(default="no", type="bool", aliases=["ignore-platform-reqs"]),
),
supports_check_mode=True
)
# Get composer command with fallback to default
command = module.params['command']
if re.search(r"\s", command):
module.fail_json(msg="Use the 'arguments' param for passing arguments with the 'command'")
arguments = module.params['arguments']
available_options = get_available_options(module=module, command=command)
options = []
# Default options
default_options = [
'no-ansi',
'no-interaction',
'no-progress',
]
for option in default_options:
if option in available_options:
option = "--%s" % option
options.append(option)
options.extend(['--working-dir', os.path.abspath(module.params['working_dir'])])
option_params = {
'prefer_source': 'prefer-source',
'prefer_dist': 'prefer-dist',
'no_dev': 'no-dev',
'no_scripts': 'no-scripts',
'no_plugins': 'no_plugins',
'optimize_autoloader': 'optimize-autoloader',
'ignore_platform_reqs': 'ignore-platform-reqs',
}
for param, option in option_params.iteritems():
if module.params.get(param) and option in available_options:
option = "--%s" % option
options.append(option)
if module.check_mode:
options.append('--dry-run')
rc, out, err = composer_command(module, command, arguments, options)
if rc != 0:
output = parse_out(err)
module.fail_json(msg=output, stdout=err)
else:
# Composer version > 1.0.0-alpha9 now use stderr for standard notification messages
output = parse_out(out + err)
module.exit_json(changed=has_changed(output), msg=output, stdout=out+err)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()

View file

@ -0,0 +1,239 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Franck Cuny <franck@lumberjaph.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: cpanm
short_description: Manages Perl library dependencies.
description:
- Manage Perl library dependencies.
version_added: "1.6"
options:
name:
description:
- The name of the Perl library to install. You may use the "full distribution path", e.g. MIYAGAWA/Plack-0.99_05.tar.gz
required: false
default: null
aliases: ["pkg"]
from_path:
description:
- The local directory from where to install
required: false
default: null
notest:
description:
- Do not run unit tests
required: false
default: false
locallib:
description:
- Specify the install base to install modules
required: false
default: false
mirror:
description:
- Specifies the base URL for the CPAN mirror to use
required: false
default: false
mirror_only:
description:
- Use the mirror's index file instead of the CPAN Meta DB
required: false
default: false
installdeps:
description:
- Only install dependencies
required: false
default: false
version_added: "2.0"
version:
description:
- minimum version of perl module to consider acceptable
required: false
default: false
version_added: "2.1"
system_lib:
description:
- Use this if you want to install modules to the system perl include path. You must be root or have "passwordless" sudo for this to work.
- This uses the cpanm commandline option '--sudo', which has nothing to do with ansible privilege escalation.
required: false
default: false
version_added: "2.0"
aliases: ['use_sudo']
executable:
description:
- Override the path to the cpanm executable
required: false
default: null
version_added: "2.1"
notes:
- Please note that U(http://search.cpan.org/dist/App-cpanminus/bin/cpanm, cpanm) must be installed on the remote host.
author: "Franck Cuny (@franckcuny)"
'''
EXAMPLES = '''
# install Dancer perl package
- cpanm:
name: Dancer
# install version 0.99_05 of the Plack perl package
- cpanm:
name: MIYAGAWA/Plack-0.99_05.tar.gz
# install Dancer into the specified locallib
- cpanm:
name: Dancer
locallib: /srv/webapps/my_app/extlib
# install perl dependencies from local directory
- cpanm:
from_path: /srv/webapps/my_app/src/
# install Dancer perl package without running the unit tests in indicated locallib
- cpanm:
name: Dancer
notest: True
locallib: /srv/webapps/my_app/extlib
# install Dancer perl package from a specific mirror
- cpanm:
name: Dancer
mirror: 'http://cpan.cpantesters.org/'
# install Dancer perl package into the system root path
- cpanm:
name: Dancer
system_lib: yes
# install Dancer if it's not already installed
# OR the installed version is older than version 1.0
- cpanm:
name: Dancer
version: '1.0'
'''
def _is_package_installed(module, name, locallib, cpanm, version):
cmd = ""
if locallib:
os.environ["PERL5LIB"] = "%s/lib/perl5" % locallib
cmd = "%s perl -e ' use %s" % (cmd, name)
if version:
cmd = "%s %s;'" % (cmd, version)
else:
cmd = "%s;'" % cmd
res, stdout, stderr = module.run_command(cmd, check_rc=False)
if res == 0:
return True
else:
return False
def _build_cmd_line(name, from_path, notest, locallib, mirror, mirror_only, installdeps, cpanm, use_sudo):
# this code should use "%s" like everything else and just return early but not fixing all of it now.
# don't copy stuff like this
if from_path:
cmd = cpanm + " " + from_path
else:
cmd = cpanm + " " + name
if notest is True:
cmd = cmd + " -n"
if locallib is not None:
cmd = cmd + " -l " + locallib
if mirror is not None:
cmd = cmd + " --mirror " + mirror
if mirror_only is True:
cmd = cmd + " --mirror-only"
if installdeps is True:
cmd = cmd + " --installdeps"
if use_sudo is True:
cmd = cmd + " --sudo"
return cmd
def _get_cpanm_path(module):
if module.params['executable']:
return module.params['executable']
else:
return module.get_bin_path('cpanm', True)
def main():
arg_spec = dict(
name=dict(default=None, required=False, aliases=['pkg']),
from_path=dict(default=None, required=False, type='path'),
notest=dict(default=False, type='bool'),
locallib=dict(default=None, required=False, type='path'),
mirror=dict(default=None, required=False),
mirror_only=dict(default=False, type='bool'),
installdeps=dict(default=False, type='bool'),
system_lib=dict(default=False, type='bool', aliases=['use_sudo']),
version=dict(default=None, required=False),
executable=dict(required=False, type='path'),
)
module = AnsibleModule(
argument_spec=arg_spec,
required_one_of=[['name', 'from_path']],
)
cpanm = _get_cpanm_path(module)
name = module.params['name']
from_path = module.params['from_path']
notest = module.boolean(module.params.get('notest', False))
locallib = module.params['locallib']
mirror = module.params['mirror']
mirror_only = module.params['mirror_only']
installdeps = module.params['installdeps']
use_sudo = module.params['system_lib']
version = module.params['version']
changed = False
installed = _is_package_installed(module, name, locallib, cpanm, version)
if not installed:
cmd = _build_cmd_line(name, from_path, notest, locallib, mirror, mirror_only, installdeps, cpanm, use_sudo)
rc_cpanm, out_cpanm, err_cpanm = module.run_command(cmd, check_rc=False)
if rc_cpanm != 0:
module.fail_json(msg=err_cpanm, cmd=cmd)
if (err_cpanm.find('is up to date') == -1 and out_cpanm.find('is up to date') == -1):
changed = True
module.exit_json(changed=changed, binary=cpanm, name=name)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()

View file

@ -0,0 +1,424 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2014, Chris Schmidt <chris.schmidt () contrastsecurity.com>
#
# Built using https://github.com/hamnis/useful-scripts/blob/master/python/download-maven-artifact
# as a reference and starting point.
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
__author__ = 'cschmidt'
from lxml import etree
import os
import hashlib
import sys
import posixpath
import urlparse
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
try:
import boto3
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: maven_artifact
short_description: Downloads an Artifact from a Maven Repository
version_added: "2.0"
description:
- Downloads an artifact from a maven repository given the maven coordinates provided to the module. Can retrieve
- snapshots or release versions of the artifact and will resolve the latest available version if one is not
- available.
author: "Chris Schmidt (@chrisisbeef)"
requirements:
- "python >= 2.6"
- lxml
- boto if using a S3 repository (s3://...)
options:
group_id:
description:
- The Maven groupId coordinate
required: true
artifact_id:
description:
- The maven artifactId coordinate
required: true
version:
description:
- The maven version coordinate
required: false
default: latest
classifier:
description:
- The maven classifier coordinate
required: false
default: null
extension:
description:
- The maven type/extension coordinate
required: false
default: jar
repository_url:
description:
- The URL of the Maven Repository to download from.
- Use s3://... if the repository is hosted on Amazon S3, added in version 2.2.
required: false
default: http://repo1.maven.org/maven2
username:
description:
- The username to authenticate as to the Maven Repository. Use AWS secret key of the repository is hosted on S3
required: false
default: null
aliases: [ "aws_secret_key" ]
password:
description:
- The password to authenticate with to the Maven Repository. Use AWS secret access key of the repository is hosted on S3
required: false
default: null
aliases: [ "aws_secret_access_key" ]
dest:
description:
- The path where the artifact should be written to
required: true
default: false
state:
description:
- The desired state of the artifact
required: true
default: present
choices: [present,absent]
timeout:
description:
- Specifies a timeout in seconds for the connection attempt
required: false
default: 10
version_added: "2.3"
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be set to C(no) when no other option exists.
required: false
default: 'yes'
choices: ['yes', 'no']
version_added: "1.9.3"
'''
EXAMPLES = '''
# Download the latest version of the JUnit framework artifact from Maven Central
- maven_artifact:
group_id: junit
artifact_id: junit
dest: /tmp/junit-latest.jar
# Download JUnit 4.11 from Maven Central
- maven_artifact:
group_id: junit
artifact_id: junit
version: 4.11
dest: /tmp/junit-4.11.jar
# Download an artifact from a private repository requiring authentication
- maven_artifact:
group_id: com.company
artifact_id: library-name
repository_url: 'https://repo.company.com/maven'
username: user
password: pass
dest: /tmp/library-name-latest.jar
# Download a WAR File to the Tomcat webapps directory to be deployed
- maven_artifact:
group_id: com.company
artifact_id: web-app
extension: war
repository_url: 'https://repo.company.com/maven'
dest: /var/lib/tomcat7/webapps/web-app.war
'''
class Artifact(object):
def __init__(self, group_id, artifact_id, version, classifier=None, extension='jar'):
if not group_id:
raise ValueError("group_id must be set")
if not artifact_id:
raise ValueError("artifact_id must be set")
self.group_id = group_id
self.artifact_id = artifact_id
self.version = version
self.classifier = classifier
if not extension:
self.extension = "jar"
else:
self.extension = extension
def is_snapshot(self):
return self.version and self.version.endswith("SNAPSHOT")
def path(self, with_version=True):
base = posixpath.join(self.group_id.replace(".", "/"), self.artifact_id)
if with_version and self.version:
return posixpath.join(base, self.version)
else:
return base
def _generate_filename(self):
if not self.classifier:
return self.artifact_id + "." + self.extension
else:
return self.artifact_id + "-" + self.classifier + "." + self.extension
def get_filename(self, filename=None):
if not filename:
filename = self._generate_filename()
elif os.path.isdir(filename):
filename = os.path.join(filename, self._generate_filename())
return filename
def __str__(self):
if self.classifier:
return "%s:%s:%s:%s:%s" % (self.group_id, self.artifact_id, self.extension, self.classifier, self.version)
elif self.extension != "jar":
return "%s:%s:%s:%s" % (self.group_id, self.artifact_id, self.extension, self.version)
else:
return "%s:%s:%s" % (self.group_id, self.artifact_id, self.version)
@staticmethod
def parse(input):
parts = input.split(":")
if len(parts) >= 3:
g = parts[0]
a = parts[1]
v = parts[len(parts) - 1]
t = None
c = None
if len(parts) == 4:
t = parts[2]
if len(parts) == 5:
t = parts[2]
c = parts[3]
return Artifact(g, a, v, c, t)
else:
return None
class MavenDownloader:
def __init__(self, module, base="http://repo1.maven.org/maven2"):
self.module = module
if base.endswith("/"):
base = base.rstrip("/")
self.base = base
self.user_agent = "Maven Artifact Downloader/1.0"
def _find_latest_version_available(self, artifact):
path = "/%s/maven-metadata.xml" % (artifact.path(False))
xml = self._request(self.base + path, "Failed to download maven-metadata.xml", lambda r: etree.parse(r))
v = xml.xpath("/metadata/versioning/versions/version[last()]/text()")
if v:
return v[0]
def find_uri_for_artifact(self, artifact):
if artifact.version == "latest":
artifact.version = self._find_latest_version_available(artifact)
if artifact.is_snapshot():
path = "/%s/maven-metadata.xml" % (artifact.path())
xml = self._request(self.base + path, "Failed to download maven-metadata.xml", lambda r: etree.parse(r))
timestamp = xml.xpath("/metadata/versioning/snapshot/timestamp/text()")[0]
buildNumber = xml.xpath("/metadata/versioning/snapshot/buildNumber/text()")[0]
for snapshotArtifact in xml.xpath("/metadata/versioning/snapshotVersions/snapshotVersion"):
if len(snapshotArtifact.xpath("classifier/text()")) > 0 and snapshotArtifact.xpath("classifier/text()")[0] == artifact.classifier and len(snapshotArtifact.xpath("extension/text()")) > 0 and snapshotArtifact.xpath("extension/text()")[0] == artifact.extension:
return self._uri_for_artifact(artifact, snapshotArtifact.xpath("value/text()")[0])
return self._uri_for_artifact(artifact, artifact.version.replace("SNAPSHOT", timestamp + "-" + buildNumber))
return self._uri_for_artifact(artifact, artifact.version)
def _uri_for_artifact(self, artifact, version=None):
if artifact.is_snapshot() and not version:
raise ValueError("Expected uniqueversion for snapshot artifact " + str(artifact))
elif not artifact.is_snapshot():
version = artifact.version
if artifact.classifier:
return posixpath.join(self.base, artifact.path(), artifact.artifact_id + "-" + version + "-" + artifact.classifier + "." + artifact.extension)
return posixpath.join(self.base, artifact.path(), artifact.artifact_id + "-" + version + "." + artifact.extension)
def _request(self, url, failmsg, f):
url_to_use = url
parsed_url = urlparse(url)
if parsed_url.scheme=='s3':
parsed_url = urlparse(url)
bucket_name = parsed_url.netloc
key_name = parsed_url.path[1:]
client = boto3.client('s3',aws_access_key_id=self.module.params.get('username', ''), aws_secret_access_key=self.module.params.get('password', ''))
url_to_use = client.generate_presigned_url('get_object',Params={'Bucket':bucket_name,'Key':key_name},ExpiresIn=10)
req_timeout = self.module.params.get('timeout')
# Hack to add parameters in the way that fetch_url expects
self.module.params['url_username'] = self.module.params.get('username', '')
self.module.params['url_password'] = self.module.params.get('password', '')
self.module.params['http_agent'] = self.module.params.get('user_agent', None)
response, info = fetch_url(self.module, url_to_use, timeout=req_timeout)
if info['status'] != 200:
raise ValueError(failmsg + " because of " + info['msg'] + "for URL " + url_to_use)
else:
return f(response)
def download(self, artifact, filename=None):
filename = artifact.get_filename(filename)
if not artifact.version or artifact.version == "latest":
artifact = Artifact(artifact.group_id, artifact.artifact_id, self._find_latest_version_available(artifact),
artifact.classifier, artifact.extension)
url = self.find_uri_for_artifact(artifact)
if not self.verify_md5(filename, url + ".md5"):
response = self._request(url, "Failed to download artifact " + str(artifact), lambda r: r)
if response:
f = open(filename, 'w')
# f.write(response.read())
self._write_chunks(response, f, report_hook=self.chunk_report)
f.close()
return True
else:
return False
else:
return True
def chunk_report(self, bytes_so_far, chunk_size, total_size):
percent = float(bytes_so_far) / total_size
percent = round(percent * 100, 2)
sys.stdout.write("Downloaded %d of %d bytes (%0.2f%%)\r" %
(bytes_so_far, total_size, percent))
if bytes_so_far >= total_size:
sys.stdout.write('\n')
def _write_chunks(self, response, file, chunk_size=8192, report_hook=None):
total_size = response.info().getheader('Content-Length').strip()
total_size = int(total_size)
bytes_so_far = 0
while 1:
chunk = response.read(chunk_size)
bytes_so_far += len(chunk)
if not chunk:
break
file.write(chunk)
if report_hook:
report_hook(bytes_so_far, chunk_size, total_size)
return bytes_so_far
def verify_md5(self, file, remote_md5):
if not os.path.exists(file):
return False
else:
local_md5 = self._local_md5(file)
remote = self._request(remote_md5, "Failed to download MD5", lambda r: r.read())
return local_md5 == remote
def _local_md5(self, file):
md5 = hashlib.md5()
f = open(file, 'rb')
for chunk in iter(lambda: f.read(8192), ''):
md5.update(chunk)
f.close()
return md5.hexdigest()
def main():
module = AnsibleModule(
argument_spec = dict(
group_id = dict(default=None),
artifact_id = dict(default=None),
version = dict(default="latest"),
classifier = dict(default=None),
extension = dict(default='jar'),
repository_url = dict(default=None),
username = dict(default=None,aliases=['aws_secret_key']),
password = dict(default=None, no_log=True,aliases=['aws_secret_access_key']),
state = dict(default="present", choices=["present","absent"]), # TODO - Implement a "latest" state
timeout = dict(default=10, type='int'),
dest = dict(type="path", default=None),
validate_certs = dict(required=False, default=True, type='bool'),
)
)
try:
parsed_url = urlparse(module.params["repository_url"])
except AttributeError as e:
module.fail_json(msg='url parsing went wrong %s' % e)
if parsed_url.scheme=='s3' and not HAS_BOTO:
module.fail_json(msg='boto3 required for this module, when using s3:// repository URLs')
group_id = module.params["group_id"]
artifact_id = module.params["artifact_id"]
version = module.params["version"]
classifier = module.params["classifier"]
extension = module.params["extension"]
repository_url = module.params["repository_url"]
repository_username = module.params["username"]
repository_password = module.params["password"]
state = module.params["state"]
dest = module.params["dest"]
if not repository_url:
repository_url = "http://repo1.maven.org/maven2"
#downloader = MavenDownloader(module, repository_url, repository_username, repository_password)
downloader = MavenDownloader(module, repository_url)
try:
artifact = Artifact(group_id, artifact_id, version, classifier, extension)
except ValueError as e:
module.fail_json(msg=e.args[0])
prev_state = "absent"
if os.path.isdir(dest):
dest = posixpath.join(dest, artifact_id + "-" + version + "." + extension)
if os.path.lexists(dest) and downloader.verify_md5(dest, downloader.find_uri_for_artifact(artifact) + '.md5'):
prev_state = "present"
else:
path = os.path.dirname(dest)
if not os.path.exists(path):
os.makedirs(path)
if prev_state == "present":
module.exit_json(dest=dest, state=state, changed=False)
try:
if downloader.download(artifact, dest):
module.exit_json(state=state, dest=dest, group_id=group_id, artifact_id=artifact_id, version=version, classifier=classifier, extension=extension, repository_url=repository_url, changed=True)
else:
module.fail_json(msg="Unable to download the artifact")
except ValueError as e:
module.fail_json(msg=e.args[0])
if __name__ == '__main__':
main()

View file

@ -0,0 +1,295 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Chris Hoffman <christopher.hoffman@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: npm
short_description: Manage node.js packages with npm
description:
- Manage node.js packages with Node Package Manager (npm)
version_added: 1.2
author: "Chris Hoffman (@chrishoffman)"
options:
name:
description:
- The name of a node.js library to install
required: false
path:
description:
- The base path where to install the node.js libraries
required: false
version:
description:
- The version to be installed
required: false
global:
description:
- Install the node.js library globally
required: false
default: no
choices: [ "yes", "no" ]
executable:
description:
- The executable location for npm.
- This is useful if you are using a version manager, such as nvm
required: false
ignore_scripts:
description:
- Use the --ignore-scripts flag when installing.
required: false
choices: [ "yes", "no" ]
default: no
version_added: "1.8"
production:
description:
- Install dependencies in production mode, excluding devDependencies
required: false
choices: [ "yes", "no" ]
default: no
registry:
description:
- The registry to install modules from.
required: false
version_added: "1.6"
state:
description:
- The state of the node.js library
required: false
default: present
choices: [ "present", "absent", "latest" ]
'''
EXAMPLES = '''
description: Install "coffee-script" node.js package.
- npm:
name: coffee-script
path: /app/location
description: Install "coffee-script" node.js package on version 1.6.1.
- npm:
name: coffee-script
version: '1.6.1'
path: /app/location
description: Install "coffee-script" node.js package globally.
- npm:
name: coffee-script
global: yes
description: Remove the globally package "coffee-script".
- npm:
name: coffee-script
global: yes
state: absent
description: Install "coffee-script" node.js package from custom registry.
- npm:
name: coffee-script
registry: 'http://registry.mysite.com'
description: Install packages based on package.json.
- npm:
path: /app/location
description: Update packages based on package.json to their latest version.
- npm:
path: /app/location
state: latest
description: Install packages based on package.json using the npm installed with nvm v0.10.1.
- npm:
path: /app/location
executable: /opt/nvm/v0.10.1/bin/npm
state: present
'''
import os
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
# Let snippet from module_utils/basic.py return a proper error in this case
pass
class Npm(object):
def __init__(self, module, **kwargs):
self.module = module
self.glbl = kwargs['glbl']
self.name = kwargs['name']
self.version = kwargs['version']
self.path = kwargs['path']
self.registry = kwargs['registry']
self.production = kwargs['production']
self.ignore_scripts = kwargs['ignore_scripts']
if kwargs['executable']:
self.executable = kwargs['executable'].split(' ')
else:
self.executable = [module.get_bin_path('npm', True)]
if kwargs['version']:
self.name_version = self.name + '@' + str(self.version)
else:
self.name_version = self.name
def _exec(self, args, run_in_check_mode=False, check_rc=True):
if not self.module.check_mode or (self.module.check_mode and run_in_check_mode):
cmd = self.executable + args
if self.glbl:
cmd.append('--global')
if self.production:
cmd.append('--production')
if self.ignore_scripts:
cmd.append('--ignore-scripts')
if self.name:
cmd.append(self.name_version)
if self.registry:
cmd.append('--registry')
cmd.append(self.registry)
#If path is specified, cd into that path and run the command.
cwd = None
if self.path:
if not os.path.exists(self.path):
os.makedirs(self.path)
if not os.path.isdir(self.path):
self.module.fail_json(msg="path %s is not a directory" % self.path)
cwd = self.path
rc, out, err = self.module.run_command(cmd, check_rc=check_rc, cwd=cwd)
return out
return ''
def list(self):
cmd = ['list', '--json']
installed = list()
missing = list()
data = json.loads(self._exec(cmd, True, False))
if 'dependencies' in data:
for dep in data['dependencies']:
if 'missing' in data['dependencies'][dep] and data['dependencies'][dep]['missing']:
missing.append(dep)
elif 'invalid' in data['dependencies'][dep] and data['dependencies'][dep]['invalid']:
missing.append(dep)
else:
installed.append(dep)
if self.name and self.name not in installed:
missing.append(self.name)
#Named dependency not installed
else:
missing.append(self.name)
return installed, missing
def install(self):
return self._exec(['install'])
def update(self):
return self._exec(['update'])
def uninstall(self):
return self._exec(['uninstall'])
def list_outdated(self):
outdated = list()
data = self._exec(['outdated'], True, False)
for dep in data.splitlines():
if dep:
# node.js v0.10.22 changed the `npm outdated` module separator
# from "@" to " ". Split on both for backwards compatibility.
pkg, other = re.split('\s|@', dep, 1)
outdated.append(pkg)
return outdated
def main():
arg_spec = dict(
name=dict(default=None),
path=dict(default=None, type='path'),
version=dict(default=None),
production=dict(default='no', type='bool'),
executable=dict(default=None, type='path'),
registry=dict(default=None),
state=dict(default='present', choices=['present', 'absent', 'latest']),
ignore_scripts=dict(default=False, type='bool'),
)
arg_spec['global'] = dict(default='no', type='bool')
module = AnsibleModule(
argument_spec=arg_spec,
supports_check_mode=True
)
name = module.params['name']
path = module.params['path']
version = module.params['version']
glbl = module.params['global']
production = module.params['production']
executable = module.params['executable']
registry = module.params['registry']
state = module.params['state']
ignore_scripts = module.params['ignore_scripts']
if not path and not glbl:
module.fail_json(msg='path must be specified when not using global')
if state == 'absent' and not name:
module.fail_json(msg='uninstalling a package is only available for named packages')
npm = Npm(module, name=name, path=path, version=version, glbl=glbl, production=production, \
executable=executable, registry=registry, ignore_scripts=ignore_scripts)
changed = False
if state == 'present':
installed, missing = npm.list()
if len(missing):
changed = True
npm.install()
elif state == 'latest':
installed, missing = npm.list()
outdated = npm.list_outdated()
if len(missing):
changed = True
npm.install()
if len(outdated):
changed = True
npm.update()
else: #absent
installed, missing = npm.list()
if name in installed:
changed = True
npm.uninstall()
module.exit_json(changed=changed)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()

View file

@ -0,0 +1,240 @@
#!/usr/bin/python -tt
# -*- coding: utf-8 -*-
# (c) 2012, Afterburn <http://github.com/afterburn>
# (c) 2013, Aaron Bull Schaefer <aaron@elasticdog.com>
# (c) 2015, Jonathan Lestrelin <jonathan.lestrelin@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: pear
short_description: Manage pear/pecl packages
description:
- Manage PHP packages with the pear package manager.
version_added: 2.0
author:
- "'jonathan.lestrelin' <jonathan.lestrelin@gmail.com>"
options:
name:
description:
- Name of the package to install, upgrade, or remove.
required: true
state:
description:
- Desired state of the package.
required: false
default: "present"
choices: ["present", "absent", "latest"]
'''
EXAMPLES = '''
# Install pear package
- pear:
name: Net_URL2
state: present
# Install pecl package
- pear:
name: pecl/json_post
state: present
# Upgrade package
- pear:
name: Net_URL2
state: latest
# Remove packages
- pear:
name: Net_URL2,pecl/json_post
state: absent
'''
import os
def get_local_version(pear_output):
"""Take pear remoteinfo output and get the installed version"""
lines = pear_output.split('\n')
for line in lines:
if 'Installed ' in line:
installed = line.rsplit(None, 1)[-1].strip()
if installed == '-': continue
return installed
return None
def get_repository_version(pear_output):
"""Take pear remote-info output and get the latest version"""
lines = pear_output.split('\n')
for line in lines:
if 'Latest ' in line:
return line.rsplit(None, 1)[-1].strip()
return None
def query_package(module, name, state="present"):
"""Query the package status in both the local system and the repository.
Returns a boolean to indicate if the package is installed,
and a second boolean to indicate if the package is up-to-date."""
if state == "present":
lcmd = "pear info %s" % (name)
lrc, lstdout, lstderr = module.run_command(lcmd, check_rc=False)
if lrc != 0:
# package is not installed locally
return False, False
rcmd = "pear remote-info %s" % (name)
rrc, rstdout, rstderr = module.run_command(rcmd, check_rc=False)
# get the version installed locally (if any)
lversion = get_local_version(rstdout)
# get the version in the repository
rversion = get_repository_version(rstdout)
if rrc == 0:
# Return True to indicate that the package is installed locally,
# and the result of the version number comparison
# to determine if the package is up-to-date.
return True, (lversion == rversion)
return False, False
def remove_packages(module, packages):
remove_c = 0
# Using a for loop incase of error, we can report the package that failed
for package in packages:
# Query the package first, to see if we even need to remove
installed, updated = query_package(module, package)
if not installed:
continue
cmd = "pear uninstall %s" % (package)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0:
module.fail_json(msg="failed to remove %s" % (package))
remove_c += 1
if remove_c > 0:
module.exit_json(changed=True, msg="removed %s package(s)" % remove_c)
module.exit_json(changed=False, msg="package(s) already absent")
def install_packages(module, state, packages):
install_c = 0
for i, package in enumerate(packages):
# if the package is installed and state == present
# or state == latest and is up-to-date then skip
installed, updated = query_package(module, package)
if installed and (state == 'present' or (state == 'latest' and updated)):
continue
if state == 'present':
command = 'install'
if state == 'latest':
command = 'upgrade'
cmd = "pear %s %s" % (command, package)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0:
module.fail_json(msg="failed to install %s" % (package))
install_c += 1
if install_c > 0:
module.exit_json(changed=True, msg="installed %s package(s)" % (install_c))
module.exit_json(changed=False, msg="package(s) already installed")
def check_packages(module, packages, state):
would_be_changed = []
for package in packages:
installed, updated = query_package(module, package)
if ((state in ["present", "latest"] and not installed) or
(state == "absent" and installed) or
(state == "latest" and not updated)):
would_be_changed.append(package)
if would_be_changed:
if state == "absent":
state = "removed"
module.exit_json(changed=True, msg="%s package(s) would be %s" % (
len(would_be_changed), state))
else:
module.exit_json(change=False, msg="package(s) already %s" % state)
def exe_exists(program):
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if os.path.isfile(exe_file) and os.access(exe_file, os.X_OK):
return True
return False
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(aliases=['pkg']),
state = dict(default='present', choices=['present', 'installed', "latest", 'absent', 'removed'])),
required_one_of = [['name']],
supports_check_mode = True)
if not exe_exists("pear"):
module.fail_json(msg="cannot find pear executable in PATH")
p = module.params
# normalize the state parameter
if p['state'] in ['present', 'installed']:
p['state'] = 'present'
elif p['state'] in ['absent', 'removed']:
p['state'] = 'absent'
if p['name']:
pkgs = p['name'].split(',')
pkg_files = []
for i, pkg in enumerate(pkgs):
pkg_files.append(None)
if module.check_mode:
check_packages(module, pkgs, p['state'])
if p['state'] in ['present', 'latest']:
install_packages(module, p['state'], pkgs)
elif p['state'] == 'absent':
remove_packages(module, pkgs)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()

View file

@ -0,0 +1,272 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Kevin Brebanov <https://github.com/kbrebanov>
# Based on pacman (Afterburn <http://github.com/afterburn>, Aaron Bull Schaefer <aaron@elasticdog.com>)
# and apt (Matthew Williams <matthew@flowroute.com>>) modules.
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: apk
short_description: Manages apk packages
description:
- Manages I(apk) packages for Alpine Linux.
version_added: "2.0"
options:
name:
description:
- A package name, like C(foo), or mutliple packages, like C(foo, bar).
required: false
default: null
state:
description:
- Indicates the desired package(s) state.
- C(present) ensures the package(s) is/are present.
- C(absent) ensures the package(s) is/are absent.
- C(latest) ensures the package(s) is/are present and the latest version(s).
required: false
default: present
choices: [ "present", "absent", "latest" ]
update_cache:
description:
- Update repository indexes. Can be run with other steps or on it's own.
required: false
default: no
choices: [ "yes", "no" ]
upgrade:
description:
- Upgrade all installed packages to their latest version.
required: false
default: no
choices: [ "yes", "no" ]
notes:
- '"name" and "upgrade" are mutually exclusive.'
'''
EXAMPLES = '''
# Update repositories and install "foo" package
- apk:
name: foo
update_cache: yes
# Update repositories and install "foo" and "bar" packages
- apk:
name: foo,bar
update_cache: yes
# Remove "foo" package
- apk:
name: foo
state: absent
# Remove "foo" and "bar" packages
- apk:
name: foo,bar
state: absent
# Install the package "foo"
- apk:
name: foo
state: present
# Install the packages "foo" and "bar"
- apk:
name: foo,bar
state: present
# Update repositories and update package "foo" to latest version
- apk:
name: foo
state: latest
update_cache: yes
# Update repositories and update packages "foo" and "bar" to latest versions
- apk:
name: foo,bar
state: latest
update_cache: yes
# Update all installed packages to the latest versions
- apk:
upgrade: yes
# Update repositories as a separate step
- apk:
update_cache: yes
'''
import os
import re
def update_package_db(module):
cmd = "%s update" % (APK_PATH)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc == 0:
return True
else:
module.fail_json(msg="could not update package db")
def query_package(module, name):
cmd = "%s -v info --installed %s" % (APK_PATH, name)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc == 0:
return True
else:
return False
def query_latest(module, name):
cmd = "%s version %s" % (APK_PATH, name)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
search_pattern = "(%s)-[\d\.\w]+-[\d\w]+\s+(.)\s+[\d\.\w]+-[\d\w]+\s+" % (name)
match = re.search(search_pattern, stdout)
if match and match.group(2) == "<":
return False
return True
def query_virtual(module, name):
cmd = "%s -v info --description %s" % (APK_PATH, name)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
search_pattern = "^%s: virtual meta package" % (name)
if re.search(search_pattern, stdout):
return True
return False
def get_dependencies(module, name):
cmd = "%s -v info --depends %s" % (APK_PATH, name)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
dependencies = stdout.split()
if len(dependencies) > 1:
return dependencies[1:]
else:
return []
def upgrade_packages(module):
if module.check_mode:
cmd = "%s upgrade --simulate" % (APK_PATH)
else:
cmd = "%s upgrade" % (APK_PATH)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0:
module.fail_json(msg="failed to upgrade packages")
if re.search('^OK', stdout):
module.exit_json(changed=False, msg="packages already upgraded")
module.exit_json(changed=True, msg="upgraded packages")
def install_packages(module, names, state):
upgrade = False
to_install = []
to_upgrade = []
for name in names:
# Check if virtual package
if query_virtual(module, name):
# Get virtual package dependencies
dependencies = get_dependencies(module, name)
for dependency in dependencies:
if state == 'latest' and not query_latest(module, dependency):
to_upgrade.append(dependency)
else:
if not query_package(module, name):
to_install.append(name)
elif state == 'latest' and not query_latest(module, name):
to_upgrade.append(name)
if to_upgrade:
upgrade = True
if not to_install and not upgrade:
module.exit_json(changed=False, msg="package(s) already installed")
packages = " ".join(to_install) + " ".join(to_upgrade)
if upgrade:
if module.check_mode:
cmd = "%s add --upgrade --simulate %s" % (APK_PATH, packages)
else:
cmd = "%s add --upgrade %s" % (APK_PATH, packages)
else:
if module.check_mode:
cmd = "%s add --simulate %s" % (APK_PATH, packages)
else:
cmd = "%s add %s" % (APK_PATH, packages)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0:
module.fail_json(msg="failed to install %s" % (packages))
module.exit_json(changed=True, msg="installed %s package(s)" % (packages))
def remove_packages(module, names):
installed = []
for name in names:
if query_package(module, name):
installed.append(name)
if not installed:
module.exit_json(changed=False, msg="package(s) already removed")
names = " ".join(installed)
if module.check_mode:
cmd = "%s del --purge --simulate %s" % (APK_PATH, names)
else:
cmd = "%s del --purge %s" % (APK_PATH, names)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0:
module.fail_json(msg="failed to remove %s package(s)" % (names))
module.exit_json(changed=True, msg="removed %s package(s)" % (names))
# ==========================================
# Main control flow.
def main():
module = AnsibleModule(
argument_spec = dict(
state = dict(default='present', choices=['present', 'installed', 'absent', 'removed', 'latest']),
name = dict(type='list'),
update_cache = dict(default='no', type='bool'),
upgrade = dict(default='no', type='bool'),
),
required_one_of = [['name', 'update_cache', 'upgrade']],
mutually_exclusive = [['name', 'upgrade']],
supports_check_mode = True
)
# Set LANG env since we parse stdout
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C', LC_CTYPE='C')
global APK_PATH
APK_PATH = module.get_bin_path('apk', required=True)
p = module.params
# normalize the state parameter
if p['state'] in ['present', 'installed']:
p['state'] = 'present'
if p['state'] in ['absent', 'removed']:
p['state'] = 'absent'
if p['update_cache']:
update_package_db(module)
if not p['name']:
module.exit_json(changed=True, msg='updated repository indexes')
if p['upgrade']:
upgrade_packages(module)
if p['state'] in ['present', 'latest']:
install_packages(module, p['name'], p['state'])
elif p['state'] == 'absent':
remove_packages(module, p['name'])
# Import module snippets.
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()

View file

@ -0,0 +1,479 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2015 Cristian van Ee <cristian at cvee.org>
# Copyright 2015 Igor Gnatenko <i.gnatenko.brain@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'core',
'version': '1.0'}
DOCUMENTATION = '''
---
module: dnf
version_added: 1.9
short_description: Manages packages with the I(dnf) package manager
description:
- Installs, upgrade, removes, and lists packages and groups with the I(dnf) package manager.
options:
name:
description:
- "Package name, or package specifier with version, like C(name-1.0). When using state=latest, this can be '*' which means run: dnf -y update. You can also pass a url or a local path to a rpm file."
required: true
default: null
aliases: []
list:
description:
- Various (non-idempotent) commands for usage with C(/usr/bin/ansible) and I(not) playbooks. See examples.
required: false
default: null
state:
description:
- Whether to install (C(present), C(latest)), or remove (C(absent)) a package.
required: false
choices: [ "present", "latest", "absent" ]
default: "present"
enablerepo:
description:
- I(Repoid) of repositories to enable for the install/update operation.
These repos will not persist beyond the transaction.
When specifying multiple repos, separate them with a ",".
required: false
default: null
aliases: []
disablerepo:
description:
- I(Repoid) of repositories to disable for the install/update operation.
These repos will not persist beyond the transaction.
When specifying multiple repos, separate them with a ",".
required: false
default: null
aliases: []
conf_file:
description:
- The remote dnf configuration file to use for the transaction.
required: false
default: null
aliases: []
disable_gpg_check:
description:
- Whether to disable the GPG checking of signatures of packages being
installed. Has an effect only if state is I(present) or I(latest).
required: false
default: "no"
choices: ["yes", "no"]
aliases: []
notes: []
# informational: requirements for nodes
requirements:
- "python >= 2.6"
- python-dnf
author:
- '"Igor Gnatenko (@ignatenkobrain)" <i.gnatenko.brain@gmail.com>'
- '"Cristian van Ee (@DJMuggs)" <cristian at cvee.org>'
'''
EXAMPLES = '''
- name: install the latest version of Apache
dnf:
name: httpd
state: latest
- name: remove the Apache package
dnf:
name: httpd
state: absent
- name: install the latest version of Apache from the testing repo
dnf:
name: httpd
enablerepo: testing
state: present
- name: upgrade all packages
dnf:
name: *
state: latest
- name: install the nginx rpm from a remote repo
dnf:
name: 'http://nginx.org/packages/centos/6/noarch/RPMS/nginx-release-centos-6-0.el6.ngx.noarch.rpm'
state: present
- name: install nginx rpm from a local file
dnf:
name: /usr/local/src/nginx-release-centos-6-0.el6.ngx.noarch.rpm
state: present
- name: install the 'Development tools' package group
dnf:
name: '@Development tools'
state: present
'''
import os
try:
import dnf
import dnf
import dnf.cli
import dnf.const
import dnf.exceptions
import dnf.subject
import dnf.util
HAS_DNF = True
except ImportError:
HAS_DNF = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import PY2
def _ensure_dnf(module):
if not HAS_DNF:
if PY2:
package = 'python2-dnf'
else:
package = 'python3-dnf'
if module.check_mode:
module.fail_json(msg="`{0}` is not installed, but it is required"
" for the Ansible dnf module.".format(package))
module.run_command(['dnf', 'install', '-y', package], check_rc=True)
global dnf
try:
import dnf
import dnf.cli
import dnf.const
import dnf.exceptions
import dnf.subject
import dnf.util
except ImportError:
module.fail_json(msg="Could not import the dnf python module."
" Please install `{0}` package.".format(package))
def _configure_base(module, base, conf_file, disable_gpg_check):
"""Configure the dnf Base object."""
conf = base.conf
# Turn off debug messages in the output
conf.debuglevel = 0
# Set whether to check gpg signatures
conf.gpgcheck = not disable_gpg_check
# Don't prompt for user confirmations
conf.assumeyes = True
# Change the configuration file path if provided
if conf_file:
# Fail if we can't read the configuration file.
if not os.access(conf_file, os.R_OK):
module.fail_json(
msg="cannot read configuration file", conf_file=conf_file)
else:
conf.config_file_path = conf_file
# Read the configuration file
conf.read()
def _specify_repositories(base, disablerepo, enablerepo):
"""Enable and disable repositories matching the provided patterns."""
base.read_all_repos()
repos = base.repos
# Disable repositories
for repo_pattern in disablerepo:
for repo in repos.get_matching(repo_pattern):
repo.disable()
# Enable repositories
for repo_pattern in enablerepo:
for repo in repos.get_matching(repo_pattern):
repo.enable()
def _base(module, conf_file, disable_gpg_check, disablerepo, enablerepo):
"""Return a fully configured dnf Base object."""
base = dnf.Base()
_configure_base(module, base, conf_file, disable_gpg_check)
_specify_repositories(base, disablerepo, enablerepo)
base.fill_sack(load_system_repo='auto')
return base
def _package_dict(package):
"""Return a dictionary of information for the package."""
# NOTE: This no longer contains the 'dnfstate' field because it is
# already known based on the query type.
result = {
'name': package.name,
'arch': package.arch,
'epoch': str(package.epoch),
'release': package.release,
'version': package.version,
'repo': package.repoid}
result['nevra'] = '{epoch}:{name}-{version}-{release}.{arch}'.format(
**result)
return result
def list_items(module, base, command):
"""List package info based on the command."""
# Rename updates to upgrades
if command == 'updates':
command = 'upgrades'
# Return the corresponding packages
if command in ['installed', 'upgrades', 'available']:
results = [
_package_dict(package)
for package in getattr(base.sack.query(), command)()]
# Return the enabled repository ids
elif command in ['repos', 'repositories']:
results = [
{'repoid': repo.id, 'state': 'enabled'}
for repo in base.repos.iter_enabled()]
# Return any matching packages
else:
packages = dnf.subject.Subject(command).get_best_query(base.sack)
results = [_package_dict(package) for package in packages]
module.exit_json(results=results)
def _mark_package_install(module, base, pkg_spec):
"""Mark the package for install."""
try:
base.install(pkg_spec)
except dnf.exceptions.MarkingError:
module.fail_json(msg="No package {} available.".format(pkg_spec))
def _parse_spec_group_file(names):
pkg_specs, grp_specs, filenames = [], [], []
for name in names:
if name.endswith(".rpm"):
filenames.append(name)
elif name.startswith("@"):
grp_specs.append(name[1:])
else:
pkg_specs.append(name)
return pkg_specs, grp_specs, filenames
def _install_remote_rpms(base, filenames):
if int(dnf.__version__.split(".")[0]) >= 2:
pkgs = list(sorted(base.add_remote_rpms(list(filenames)), reverse=True))
else:
pkgs = []
for filename in filenames:
pkgs.append(base.add_remote_rpm(filename))
for pkg in pkgs:
base.package_install(pkg)
def ensure(module, base, state, names):
# Accumulate failures. Package management modules install what they can
# and fail with a message about what they can't.
failures = []
allow_erasing = False
if names == ['*'] and state == 'latest':
base.upgrade_all()
else:
pkg_specs, group_specs, filenames = _parse_spec_group_file(names)
if group_specs:
base.read_comps()
pkg_specs = [p.strip() for p in pkg_specs]
filenames = [f.strip() for f in filenames]
groups = []
environments = []
for group_spec in (g.strip() for g in group_specs):
group = base.comps.group_by_pattern(group_spec)
if group:
groups.append(group)
else:
environment = base.comps.environment_by_pattern(group_spec)
if environment:
environments.append(environment.id)
else:
module.fail_json(
msg="No group {} available.".format(group_spec))
if state in ['installed', 'present']:
# Install files.
_install_remote_rpms(base, filenames)
# Install groups.
for group in groups:
try:
base.group_install(group, dnf.const.GROUP_PACKAGE_TYPES)
except dnf.exceptions.Error as e:
# In dnf 2.0 if all the mandatory packages in a group do
# not install, an error is raised. We want to capture
# this but still install as much as possible.
failures.append((group, e))
for environment in environments:
try:
base.environment_install(environment, dnf.const.GROUP_PACKAGE_TYPES)
except dnf.exceptions.Error as e:
failures.append((group, e))
# Install packages.
for pkg_spec in pkg_specs:
_mark_package_install(module, base, pkg_spec)
elif state == 'latest':
# "latest" is same as "installed" for filenames.
_install_remote_rpms(base, filenames)
for group in groups:
try:
try:
base.group_upgrade(group)
except dnf.exceptions.CompsError:
# If not already installed, try to install.
base.group_install(group, dnf.const.GROUP_PACKAGE_TYPES)
except dnf.exceptions.Error as e:
failures.append((group, e))
for environment in environments:
try:
try:
base.environment_upgrade(environment)
except dnf.exceptions.CompsError:
# If not already installed, try to install.
base.environment_install(group, dnf.const.GROUP_PACKAGE_TYPES)
except dnf.exceptions.Error as e:
failures.append((group, e))
for pkg_spec in pkg_specs:
# best effort causes to install the latest package
# even if not previously installed
base.conf.best = True
base.install(pkg_spec)
else:
# state == absent
if filenames:
module.fail_json(
msg="Cannot remove paths -- please specify package name.")
for group in groups:
try:
base.group_remove(group)
except dnf.exceptions.CompsError:
# Group is already uninstalled.
pass
for envioronment in environments:
try:
base.environment_remove(environment)
except dnf.exceptions.CompsError:
# Environment is already uninstalled.
pass
installed = base.sack.query().installed()
for pkg_spec in pkg_specs:
if installed.filter(name=pkg_spec):
base.remove(pkg_spec)
# Like the dnf CLI we want to allow recursive removal of dependent
# packages
allow_erasing = True
if not base.resolve(allow_erasing=allow_erasing):
if failures:
module.fail_json(msg='Failed to install some of the specified packages',
failures=failures)
module.exit_json(msg="Nothing to do")
else:
if module.check_mode:
if failures:
module.fail_json(msg='Failed to install some of the specified packages',
failures=failures)
module.exit_json(changed=True)
base.download_packages(base.transaction.install_set)
base.do_transaction()
response = {'changed': True, 'results': []}
for package in base.transaction.install_set:
response['results'].append("Installed: {0}".format(package))
for package in base.transaction.remove_set:
response['results'].append("Removed: {0}".format(package))
if failures:
module.fail_json(msg='Failed to install some of the specified packages',
failures=failures)
module.exit_json(**response)
def main():
"""The main function."""
module = AnsibleModule(
argument_spec=dict(
name=dict(aliases=['pkg'], type='list'),
state=dict(
default='installed',
choices=[
'absent', 'present', 'installed', 'removed', 'latest']),
enablerepo=dict(type='list', default=[]),
disablerepo=dict(type='list', default=[]),
list=dict(),
conf_file=dict(default=None, type='path'),
disable_gpg_check=dict(default=False, type='bool'),
),
required_one_of=[['name', 'list']],
mutually_exclusive=[['name', 'list']],
supports_check_mode=True)
params = module.params
_ensure_dnf(module)
if params['list']:
base = _base(
module, params['conf_file'], params['disable_gpg_check'],
params['disablerepo'], params['enablerepo'])
list_items(module, base, params['list'])
else:
# Note: base takes a long time to run so we want to check for failure
# before running it.
if not dnf.util.am_i_root():
module.fail_json(msg="This command has to be run under the root user.")
base = _base(
module, params['conf_file'], params['disable_gpg_check'],
params['disablerepo'], params['enablerepo'])
ensure(module, base, params['state'], params['name'])
if __name__ == '__main__':
main()

View file

@ -0,0 +1,904 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Andrew Dunham <andrew@du.nham.ca>
# (c) 2013, Daniel Jaouen <dcj24@cornell.edu>
# (c) 2015, Indrajit Raychaudhuri <irc+code@indrajit.com>
#
# Based on macports (Jimmy Tang <jcftang@gmail.com>)
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: homebrew
author:
- "Indrajit Raychaudhuri (@indrajitr)"
- "Daniel Jaouen (@danieljaouen)"
- "Andrew Dunham (@andrew-d)"
requirements:
- "python >= 2.6"
short_description: Package manager for Homebrew
description:
- Manages Homebrew packages
version_added: "1.1"
options:
name:
description:
- name of package to install/remove
required: false
default: None
aliases: ['pkg', 'package', 'formula']
path:
description:
- "':' separated list of paths to search for 'brew' executable. Since A package (I(formula) in homebrew parlance) location is prefixed relative to the actual path of I(brew) command, providing an alternative I(brew) path enables managing different set of packages in an alternative location in the system."
required: false
default: '/usr/local/bin'
state:
description:
- state of the package
choices: [ 'head', 'latest', 'present', 'absent', 'linked', 'unlinked' ]
required: false
default: present
update_homebrew:
description:
- update homebrew itself first
required: false
default: no
choices: [ "yes", "no" ]
aliases: ['update-brew']
upgrade_all:
description:
- upgrade all homebrew packages
required: false
default: no
choices: [ "yes", "no" ]
aliases: ['upgrade']
install_options:
description:
- options flags to install a package
required: false
default: null
aliases: ['options']
version_added: "1.4"
notes: []
'''
EXAMPLES = '''
# Install formula foo with 'brew' in default path (C(/usr/local/bin))
- homebrew:
name: foo
state: present
# Install formula foo with 'brew' in alternate path C(/my/other/location/bin)
- homebrew:
name: foo
path: /my/other/location/bin
state: present
# Update homebrew first and install formula foo with 'brew' in default path
- homebrew:
name: foo
state: present
update_homebrew: yes
# Update homebrew first and upgrade formula foo to latest available with 'brew' in default path
- homebrew:
name: foo
state: latest
update_homebrew: yes
# Update homebrew and upgrade all packages
- homebrew:
update_homebrew: yes
upgrade_all: yes
# Miscellaneous other examples
- homebrew:
name: foo
state: head
- homebrew:
name: foo
state: linked
- homebrew:
name: foo
state: absent
- homebrew:
name: foo,bar
state: absent
- homebrew:
name: foo
state: present
install_options: with-baz,enable-debug
'''
import os.path
import re
from ansible.module_utils.six import iteritems
# exceptions -------------------------------------------------------------- {{{
class HomebrewException(Exception):
pass
# /exceptions ------------------------------------------------------------- }}}
# utils ------------------------------------------------------------------- {{{
def _create_regex_group(s):
lines = (line.strip() for line in s.split('\n') if line.strip())
chars = filter(None, (line.split('#')[0].strip() for line in lines))
group = r'[^' + r''.join(chars) + r']'
return re.compile(group)
# /utils ------------------------------------------------------------------ }}}
class Homebrew(object):
'''A class to manage Homebrew packages.'''
# class regexes ------------------------------------------------ {{{
VALID_PATH_CHARS = r'''
\w # alphanumeric characters (i.e., [a-zA-Z0-9_])
\s # spaces
: # colons
{sep} # the OS-specific path separator
. # dots
- # dashes
'''.format(sep=os.path.sep)
VALID_BREW_PATH_CHARS = r'''
\w # alphanumeric characters (i.e., [a-zA-Z0-9_])
\s # spaces
{sep} # the OS-specific path separator
. # dots
- # dashes
'''.format(sep=os.path.sep)
VALID_PACKAGE_CHARS = r'''
\w # alphanumeric characters (i.e., [a-zA-Z0-9_])
. # dots
/ # slash (for taps)
\+ # plusses
- # dashes
: # colons (for URLs)
'''
INVALID_PATH_REGEX = _create_regex_group(VALID_PATH_CHARS)
INVALID_BREW_PATH_REGEX = _create_regex_group(VALID_BREW_PATH_CHARS)
INVALID_PACKAGE_REGEX = _create_regex_group(VALID_PACKAGE_CHARS)
# /class regexes ----------------------------------------------- }}}
# class validations -------------------------------------------- {{{
@classmethod
def valid_path(cls, path):
'''
`path` must be one of:
- list of paths
- a string containing only:
- alphanumeric characters
- dashes
- dots
- spaces
- colons
- os.path.sep
'''
if isinstance(path, basestring):
return not cls.INVALID_PATH_REGEX.search(path)
try:
iter(path)
except TypeError:
return False
else:
paths = path
return all(cls.valid_brew_path(path_) for path_ in paths)
@classmethod
def valid_brew_path(cls, brew_path):
'''
`brew_path` must be one of:
- None
- a string containing only:
- alphanumeric characters
- dashes
- dots
- spaces
- os.path.sep
'''
if brew_path is None:
return True
return (
isinstance(brew_path, basestring)
and not cls.INVALID_BREW_PATH_REGEX.search(brew_path)
)
@classmethod
def valid_package(cls, package):
'''A valid package is either None or alphanumeric.'''
if package is None:
return True
return (
isinstance(package, basestring)
and not cls.INVALID_PACKAGE_REGEX.search(package)
)
@classmethod
def valid_state(cls, state):
'''
A valid state is one of:
- None
- installed
- upgraded
- head
- linked
- unlinked
- absent
'''
if state is None:
return True
else:
return (
isinstance(state, basestring)
and state.lower() in (
'installed',
'upgraded',
'head',
'linked',
'unlinked',
'absent',
)
)
@classmethod
def valid_module(cls, module):
'''A valid module is an instance of AnsibleModule.'''
return isinstance(module, AnsibleModule)
# /class validations ------------------------------------------- }}}
# class properties --------------------------------------------- {{{
@property
def module(self):
return self._module
@module.setter
def module(self, module):
if not self.valid_module(module):
self._module = None
self.failed = True
self.message = 'Invalid module: {0}.'.format(module)
raise HomebrewException(self.message)
else:
self._module = module
return module
@property
def path(self):
return self._path
@path.setter
def path(self, path):
if not self.valid_path(path):
self._path = []
self.failed = True
self.message = 'Invalid path: {0}.'.format(path)
raise HomebrewException(self.message)
else:
if isinstance(path, basestring):
self._path = path.split(':')
else:
self._path = path
return path
@property
def brew_path(self):
return self._brew_path
@brew_path.setter
def brew_path(self, brew_path):
if not self.valid_brew_path(brew_path):
self._brew_path = None
self.failed = True
self.message = 'Invalid brew_path: {0}.'.format(brew_path)
raise HomebrewException(self.message)
else:
self._brew_path = brew_path
return brew_path
@property
def params(self):
return self._params
@params.setter
def params(self, params):
self._params = self.module.params
return self._params
@property
def current_package(self):
return self._current_package
@current_package.setter
def current_package(self, package):
if not self.valid_package(package):
self._current_package = None
self.failed = True
self.message = 'Invalid package: {0}.'.format(package)
raise HomebrewException(self.message)
else:
self._current_package = package
return package
# /class properties -------------------------------------------- }}}
def __init__(self, module, path, packages=None, state=None,
update_homebrew=False, upgrade_all=False,
install_options=None):
if not install_options:
install_options = list()
self._setup_status_vars()
self._setup_instance_vars(module=module, path=path, packages=packages,
state=state, update_homebrew=update_homebrew,
upgrade_all=upgrade_all,
install_options=install_options, )
self._prep()
# prep --------------------------------------------------------- {{{
def _setup_status_vars(self):
self.failed = False
self.changed = False
self.changed_count = 0
self.unchanged_count = 0
self.message = ''
def _setup_instance_vars(self, **kwargs):
for key, val in iteritems(kwargs):
setattr(self, key, val)
def _prep(self):
self._prep_brew_path()
def _prep_brew_path(self):
if not self.module:
self.brew_path = None
self.failed = True
self.message = 'AnsibleModule not set.'
raise HomebrewException(self.message)
self.brew_path = self.module.get_bin_path(
'brew',
required=True,
opt_dirs=self.path,
)
if not self.brew_path:
self.brew_path = None
self.failed = True
self.message = 'Unable to locate homebrew executable.'
raise HomebrewException('Unable to locate homebrew executable.')
return self.brew_path
def _status(self):
return (self.failed, self.changed, self.message)
# /prep -------------------------------------------------------- }}}
def run(self):
try:
self._run()
except HomebrewException:
pass
if not self.failed and (self.changed_count + self.unchanged_count > 1):
self.message = "Changed: %d, Unchanged: %d" % (
self.changed_count,
self.unchanged_count,
)
(failed, changed, message) = self._status()
return (failed, changed, message)
# checks ------------------------------------------------------- {{{
def _current_package_is_installed(self):
if not self.valid_package(self.current_package):
self.failed = True
self.message = 'Invalid package: {0}.'.format(self.current_package)
raise HomebrewException(self.message)
cmd = [
"{brew_path}".format(brew_path=self.brew_path),
"info",
self.current_package,
]
rc, out, err = self.module.run_command(cmd)
for line in out.split('\n'):
if (
re.search(r'Built from source', line)
or re.search(r'Poured from bottle', line)
):
return True
return False
def _current_package_is_outdated(self):
if not self.valid_package(self.current_package):
return False
rc, out, err = self.module.run_command([
self.brew_path,
'outdated',
self.current_package,
])
return rc != 0
def _current_package_is_installed_from_head(self):
if not Homebrew.valid_package(self.current_package):
return False
elif not self._current_package_is_installed():
return False
rc, out, err = self.module.run_command([
self.brew_path,
'info',
self.current_package,
])
try:
version_info = [line for line in out.split('\n') if line][0]
except IndexError:
return False
return version_info.split(' ')[-1] == 'HEAD'
# /checks ------------------------------------------------------ }}}
# commands ----------------------------------------------------- {{{
def _run(self):
if self.update_homebrew:
self._update_homebrew()
if self.upgrade_all:
self._upgrade_all()
if self.packages:
if self.state == 'installed':
return self._install_packages()
elif self.state == 'upgraded':
return self._upgrade_packages()
elif self.state == 'head':
return self._install_packages()
elif self.state == 'linked':
return self._link_packages()
elif self.state == 'unlinked':
return self._unlink_packages()
elif self.state == 'absent':
return self._uninstall_packages()
# updated -------------------------------- {{{
def _update_homebrew(self):
rc, out, err = self.module.run_command([
self.brew_path,
'update',
])
if rc == 0:
if out and isinstance(out, basestring):
already_updated = any(
re.search(r'Already up-to-date.', s.strip(), re.IGNORECASE)
for s in out.split('\n')
if s
)
if not already_updated:
self.changed = True
self.message = 'Homebrew updated successfully.'
else:
self.message = 'Homebrew already up-to-date.'
return True
else:
self.failed = True
self.message = err.strip()
raise HomebrewException(self.message)
# /updated ------------------------------- }}}
# _upgrade_all --------------------------- {{{
def _upgrade_all(self):
rc, out, err = self.module.run_command([
self.brew_path,
'upgrade',
])
if rc == 0:
if not out:
self.message = 'Homebrew packages already upgraded.'
else:
self.changed = True
self.message = 'Homebrew upgraded.'
return True
else:
self.failed = True
self.message = err.strip()
raise HomebrewException(self.message)
# /_upgrade_all -------------------------- }}}
# installed ------------------------------ {{{
def _install_current_package(self):
if not self.valid_package(self.current_package):
self.failed = True
self.message = 'Invalid package: {0}.'.format(self.current_package)
raise HomebrewException(self.message)
if self._current_package_is_installed():
self.unchanged_count += 1
self.message = 'Package already installed: {0}'.format(
self.current_package,
)
return True
if self.module.check_mode:
self.changed = True
self.message = 'Package would be installed: {0}'.format(
self.current_package
)
raise HomebrewException(self.message)
if self.state == 'head':
head = '--HEAD'
else:
head = None
opts = (
[self.brew_path, 'install']
+ self.install_options
+ [self.current_package, head]
)
cmd = [opt for opt in opts if opt]
rc, out, err = self.module.run_command(cmd)
if self._current_package_is_installed():
self.changed_count += 1
self.changed = True
self.message = 'Package installed: {0}'.format(self.current_package)
return True
else:
self.failed = True
self.message = err.strip()
raise HomebrewException(self.message)
def _install_packages(self):
for package in self.packages:
self.current_package = package
self._install_current_package()
return True
# /installed ----------------------------- }}}
# upgraded ------------------------------- {{{
def _upgrade_current_package(self):
command = 'upgrade'
if not self.valid_package(self.current_package):
self.failed = True
self.message = 'Invalid package: {0}.'.format(self.current_package)
raise HomebrewException(self.message)
if not self._current_package_is_installed():
command = 'install'
if self._current_package_is_installed() and not self._current_package_is_outdated():
self.message = 'Package is already upgraded: {0}'.format(
self.current_package,
)
self.unchanged_count += 1
return True
if self.module.check_mode:
self.changed = True
self.message = 'Package would be upgraded: {0}'.format(
self.current_package
)
raise HomebrewException(self.message)
opts = (
[self.brew_path, command]
+ self.install_options
+ [self.current_package]
)
cmd = [opt for opt in opts if opt]
rc, out, err = self.module.run_command(cmd)
if self._current_package_is_installed() and not self._current_package_is_outdated():
self.changed_count += 1
self.changed = True
self.message = 'Package upgraded: {0}'.format(self.current_package)
return True
else:
self.failed = True
self.message = err.strip()
raise HomebrewException(self.message)
def _upgrade_all_packages(self):
opts = (
[self.brew_path, 'upgrade']
+ self.install_options
)
cmd = [opt for opt in opts if opt]
rc, out, err = self.module.run_command(cmd)
if rc == 0:
self.changed = True
self.message = 'All packages upgraded.'
return True
else:
self.failed = True
self.message = err.strip()
raise HomebrewException(self.message)
def _upgrade_packages(self):
if not self.packages:
self._upgrade_all_packages()
else:
for package in self.packages:
self.current_package = package
self._upgrade_current_package()
return True
# /upgraded ------------------------------ }}}
# uninstalled ---------------------------- {{{
def _uninstall_current_package(self):
if not self.valid_package(self.current_package):
self.failed = True
self.message = 'Invalid package: {0}.'.format(self.current_package)
raise HomebrewException(self.message)
if not self._current_package_is_installed():
self.unchanged_count += 1
self.message = 'Package already uninstalled: {0}'.format(
self.current_package,
)
return True
if self.module.check_mode:
self.changed = True
self.message = 'Package would be uninstalled: {0}'.format(
self.current_package
)
raise HomebrewException(self.message)
opts = (
[self.brew_path, 'uninstall']
+ self.install_options
+ [self.current_package]
)
cmd = [opt for opt in opts if opt]
rc, out, err = self.module.run_command(cmd)
if not self._current_package_is_installed():
self.changed_count += 1
self.changed = True
self.message = 'Package uninstalled: {0}'.format(self.current_package)
return True
else:
self.failed = True
self.message = err.strip()
raise HomebrewException(self.message)
def _uninstall_packages(self):
for package in self.packages:
self.current_package = package
self._uninstall_current_package()
return True
# /uninstalled ----------------------------- }}}
# linked --------------------------------- {{{
def _link_current_package(self):
if not self.valid_package(self.current_package):
self.failed = True
self.message = 'Invalid package: {0}.'.format(self.current_package)
raise HomebrewException(self.message)
if not self._current_package_is_installed():
self.failed = True
self.message = 'Package not installed: {0}.'.format(self.current_package)
raise HomebrewException(self.message)
if self.module.check_mode:
self.changed = True
self.message = 'Package would be linked: {0}'.format(
self.current_package
)
raise HomebrewException(self.message)
opts = (
[self.brew_path, 'link']
+ self.install_options
+ [self.current_package]
)
cmd = [opt for opt in opts if opt]
rc, out, err = self.module.run_command(cmd)
if rc == 0:
self.changed_count += 1
self.changed = True
self.message = 'Package linked: {0}'.format(self.current_package)
return True
else:
self.failed = True
self.message = 'Package could not be linked: {0}.'.format(self.current_package)
raise HomebrewException(self.message)
def _link_packages(self):
for package in self.packages:
self.current_package = package
self._link_current_package()
return True
# /linked -------------------------------- }}}
# unlinked ------------------------------- {{{
def _unlink_current_package(self):
if not self.valid_package(self.current_package):
self.failed = True
self.message = 'Invalid package: {0}.'.format(self.current_package)
raise HomebrewException(self.message)
if not self._current_package_is_installed():
self.failed = True
self.message = 'Package not installed: {0}.'.format(self.current_package)
raise HomebrewException(self.message)
if self.module.check_mode:
self.changed = True
self.message = 'Package would be unlinked: {0}'.format(
self.current_package
)
raise HomebrewException(self.message)
opts = (
[self.brew_path, 'unlink']
+ self.install_options
+ [self.current_package]
)
cmd = [opt for opt in opts if opt]
rc, out, err = self.module.run_command(cmd)
if rc == 0:
self.changed_count += 1
self.changed = True
self.message = 'Package unlinked: {0}'.format(self.current_package)
return True
else:
self.failed = True
self.message = 'Package could not be unlinked: {0}.'.format(self.current_package)
raise HomebrewException(self.message)
def _unlink_packages(self):
for package in self.packages:
self.current_package = package
self._unlink_current_package()
return True
# /unlinked ------------------------------ }}}
# /commands ---------------------------------------------------- }}}
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(
aliases=["pkg", "package", "formula"],
required=False,
type='list',
),
path=dict(
default="/usr/local/bin",
required=False,
type='path',
),
state=dict(
default="present",
choices=[
"present", "installed",
"latest", "upgraded", "head",
"linked", "unlinked",
"absent", "removed", "uninstalled",
],
),
update_homebrew=dict(
default=False,
aliases=["update-brew"],
type='bool',
),
upgrade_all=dict(
default=False,
aliases=["upgrade"],
type='bool',
),
install_options=dict(
default=None,
aliases=['options'],
type='list',
)
),
supports_check_mode=True,
)
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C', LC_CTYPE='C')
p = module.params
if p['name']:
packages = p['name']
else:
packages = None
path = p['path']
if path:
path = path.split(':')
state = p['state']
if state in ('present', 'installed'):
state = 'installed'
if state in ('head', ):
state = 'head'
if state in ('latest', 'upgraded'):
state = 'upgraded'
if state == 'linked':
state = 'linked'
if state == 'unlinked':
state = 'unlinked'
if state in ('absent', 'removed', 'uninstalled'):
state = 'absent'
update_homebrew = p['update_homebrew']
upgrade_all = p['upgrade_all']
p['install_options'] = p['install_options'] or []
install_options = ['--{0}'.format(install_option)
for install_option in p['install_options']]
brew = Homebrew(module=module, path=path, packages=packages,
state=state, update_homebrew=update_homebrew,
upgrade_all=upgrade_all, install_options=install_options)
(failed, changed, message) = brew.run()
if failed:
module.fail_json(msg=message)
else:
module.exit_json(changed=changed, msg=message)
# this is magic, see lib/ansible/module_common.py
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()

View file

@ -0,0 +1,609 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Daniel Jaouen <dcj24@cornell.edu>
# (c) 2016, Indrajit Raychaudhuri <irc+code@indrajit.com>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: homebrew_cask
author:
- "Indrajit Raychaudhuri (@indrajitr)"
- "Daniel Jaouen (@danieljaouen)"
- "Enric Lluelles (@enriclluelles)"
requirements:
- "python >= 2.6"
short_description: Install/uninstall homebrew casks.
description:
- Manages Homebrew casks.
version_added: "1.6"
options:
name:
description:
- name of cask to install/remove
required: true
aliases: ['pkg', 'package', 'cask']
path:
description:
- "':' separated list of paths to search for 'brew' executable."
required: false
default: '/usr/local/bin'
state:
description:
- state of the cask
choices: [ 'present', 'absent' ]
required: false
default: present
update_homebrew:
description:
- update homebrew itself first. Note that C(brew cask update) is
a synonym for C(brew update).
required: false
default: no
choices: [ "yes", "no" ]
aliases: ['update-brew']
version_added: "2.2"
install_options:
description:
- options flags to install a package
required: false
default: null
aliases: ['options']
version_added: "2.2"
'''
EXAMPLES = '''
- homebrew_cask:
name: alfred
state: present
- homebrew_cask:
name: alfred
state: absent
- homebrew_cask:
name: alfred
state: present
install_options: 'appdir=/Applications'
- homebrew_cask:
name: alfred
state: present
install_options: 'debug,appdir=/Applications'
- homebrew_cask:
name: alfred
state: absent
install_options: force
'''
import os.path
import re
from ansible.module_utils.six import iteritems
# exceptions -------------------------------------------------------------- {{{
class HomebrewCaskException(Exception):
pass
# /exceptions ------------------------------------------------------------- }}}
# utils ------------------------------------------------------------------- {{{
def _create_regex_group(s):
lines = (line.strip() for line in s.split('\n') if line.strip())
chars = filter(None, (line.split('#')[0].strip() for line in lines))
group = r'[^' + r''.join(chars) + r']'
return re.compile(group)
# /utils ------------------------------------------------------------------ }}}
class HomebrewCask(object):
'''A class to manage Homebrew casks.'''
# class regexes ------------------------------------------------ {{{
VALID_PATH_CHARS = r'''
\w # alphanumeric characters (i.e., [a-zA-Z0-9_])
\s # spaces
: # colons
{sep} # the OS-specific path separator
. # dots
- # dashes
'''.format(sep=os.path.sep)
VALID_BREW_PATH_CHARS = r'''
\w # alphanumeric characters (i.e., [a-zA-Z0-9_])
\s # spaces
{sep} # the OS-specific path separator
. # dots
- # dashes
'''.format(sep=os.path.sep)
VALID_CASK_CHARS = r'''
\w # alphanumeric characters (i.e., [a-zA-Z0-9_])
. # dots
/ # slash (for taps)
- # dashes
'''
INVALID_PATH_REGEX = _create_regex_group(VALID_PATH_CHARS)
INVALID_BREW_PATH_REGEX = _create_regex_group(VALID_BREW_PATH_CHARS)
INVALID_CASK_REGEX = _create_regex_group(VALID_CASK_CHARS)
# /class regexes ----------------------------------------------- }}}
# class validations -------------------------------------------- {{{
@classmethod
def valid_path(cls, path):
'''
`path` must be one of:
- list of paths
- a string containing only:
- alphanumeric characters
- dashes
- dots
- spaces
- colons
- os.path.sep
'''
if isinstance(path, basestring):
return not cls.INVALID_PATH_REGEX.search(path)
try:
iter(path)
except TypeError:
return False
else:
paths = path
return all(cls.valid_brew_path(path_) for path_ in paths)
@classmethod
def valid_brew_path(cls, brew_path):
'''
`brew_path` must be one of:
- None
- a string containing only:
- alphanumeric characters
- dashes
- dots
- spaces
- os.path.sep
'''
if brew_path is None:
return True
return (
isinstance(brew_path, basestring)
and not cls.INVALID_BREW_PATH_REGEX.search(brew_path)
)
@classmethod
def valid_cask(cls, cask):
'''A valid cask is either None or alphanumeric + backslashes.'''
if cask is None:
return True
return (
isinstance(cask, basestring)
and not cls.INVALID_CASK_REGEX.search(cask)
)
@classmethod
def valid_state(cls, state):
'''
A valid state is one of:
- installed
- absent
'''
if state is None:
return True
else:
return (
isinstance(state, basestring)
and state.lower() in (
'installed',
'absent',
)
)
@classmethod
def valid_module(cls, module):
'''A valid module is an instance of AnsibleModule.'''
return isinstance(module, AnsibleModule)
# /class validations ------------------------------------------- }}}
# class properties --------------------------------------------- {{{
@property
def module(self):
return self._module
@module.setter
def module(self, module):
if not self.valid_module(module):
self._module = None
self.failed = True
self.message = 'Invalid module: {0}.'.format(module)
raise HomebrewCaskException(self.message)
else:
self._module = module
return module
@property
def path(self):
return self._path
@path.setter
def path(self, path):
if not self.valid_path(path):
self._path = []
self.failed = True
self.message = 'Invalid path: {0}.'.format(path)
raise HomebrewCaskException(self.message)
else:
if isinstance(path, basestring):
self._path = path.split(':')
else:
self._path = path
return path
@property
def brew_path(self):
return self._brew_path
@brew_path.setter
def brew_path(self, brew_path):
if not self.valid_brew_path(brew_path):
self._brew_path = None
self.failed = True
self.message = 'Invalid brew_path: {0}.'.format(brew_path)
raise HomebrewCaskException(self.message)
else:
self._brew_path = brew_path
return brew_path
@property
def params(self):
return self._params
@params.setter
def params(self, params):
self._params = self.module.params
return self._params
@property
def current_cask(self):
return self._current_cask
@current_cask.setter
def current_cask(self, cask):
if not self.valid_cask(cask):
self._current_cask = None
self.failed = True
self.message = 'Invalid cask: {0}.'.format(cask)
raise HomebrewCaskException(self.message)
else:
self._current_cask = cask
return cask
# /class properties -------------------------------------------- }}}
def __init__(self, module, path=path, casks=None, state=None,
update_homebrew=False, install_options=None):
if not install_options:
install_options = list()
self._setup_status_vars()
self._setup_instance_vars(module=module, path=path, casks=casks,
state=state, update_homebrew=update_homebrew,
install_options=install_options,)
self._prep()
# prep --------------------------------------------------------- {{{
def _setup_status_vars(self):
self.failed = False
self.changed = False
self.changed_count = 0
self.unchanged_count = 0
self.message = ''
def _setup_instance_vars(self, **kwargs):
for key, val in iteritems(kwargs):
setattr(self, key, val)
def _prep(self):
self._prep_brew_path()
def _prep_brew_path(self):
if not self.module:
self.brew_path = None
self.failed = True
self.message = 'AnsibleModule not set.'
raise HomebrewCaskException(self.message)
self.brew_path = self.module.get_bin_path(
'brew',
required=True,
opt_dirs=self.path,
)
if not self.brew_path:
self.brew_path = None
self.failed = True
self.message = 'Unable to locate homebrew executable.'
raise HomebrewCaskException('Unable to locate homebrew executable.')
return self.brew_path
def _status(self):
return (self.failed, self.changed, self.message)
# /prep -------------------------------------------------------- }}}
def run(self):
try:
self._run()
except HomebrewCaskException:
pass
if not self.failed and (self.changed_count + self.unchanged_count > 1):
self.message = "Changed: %d, Unchanged: %d" % (
self.changed_count,
self.unchanged_count,
)
(failed, changed, message) = self._status()
return (failed, changed, message)
# checks ------------------------------------------------------- {{{
def _current_cask_is_installed(self):
if not self.valid_cask(self.current_cask):
self.failed = True
self.message = 'Invalid cask: {0}.'.format(self.current_cask)
raise HomebrewCaskException(self.message)
cmd = [
"{brew_path}".format(brew_path=self.brew_path),
"cask",
"list"
]
rc, out, err = self.module.run_command(cmd)
if 'nothing to list' in err:
return False
elif rc == 0:
casks = [cask_.strip() for cask_ in out.split('\n') if cask_.strip()]
return self.current_cask in casks
else:
self.failed = True
self.message = err.strip()
raise HomebrewCaskException(self.message)
# /checks ------------------------------------------------------ }}}
# commands ----------------------------------------------------- {{{
def _run(self):
if self.update_homebrew:
self._update_homebrew()
if self.state == 'installed':
return self._install_casks()
elif self.state == 'absent':
return self._uninstall_casks()
if self.command:
return self._command()
# updated -------------------------------- {{{
def _update_homebrew(self):
rc, out, err = self.module.run_command([
self.brew_path,
'update',
])
if rc == 0:
if out and isinstance(out, basestring):
already_updated = any(
re.search(r'Already up-to-date.', s.strip(), re.IGNORECASE)
for s in out.split('\n')
if s
)
if not already_updated:
self.changed = True
self.message = 'Homebrew updated successfully.'
else:
self.message = 'Homebrew already up-to-date.'
return True
else:
self.failed = True
self.message = err.strip()
raise HomebrewCaskException(self.message)
# /updated ------------------------------- }}}
# installed ------------------------------ {{{
def _install_current_cask(self):
if not self.valid_cask(self.current_cask):
self.failed = True
self.message = 'Invalid cask: {0}.'.format(self.current_cask)
raise HomebrewCaskException(self.message)
if self._current_cask_is_installed():
self.unchanged_count += 1
self.message = 'Cask already installed: {0}'.format(
self.current_cask,
)
return True
if self.module.check_mode:
self.changed = True
self.message = 'Cask would be installed: {0}'.format(
self.current_cask
)
raise HomebrewCaskException(self.message)
opts = (
[self.brew_path, 'cask', 'install', self.current_cask]
+ self.install_options
)
cmd = [opt for opt in opts if opt]
rc, out, err = self.module.run_command(cmd)
if self._current_cask_is_installed():
self.changed_count += 1
self.changed = True
self.message = 'Cask installed: {0}'.format(self.current_cask)
return True
else:
self.failed = True
self.message = err.strip()
raise HomebrewCaskException(self.message)
def _install_casks(self):
for cask in self.casks:
self.current_cask = cask
self._install_current_cask()
return True
# /installed ----------------------------- }}}
# uninstalled ---------------------------- {{{
def _uninstall_current_cask(self):
if not self.valid_cask(self.current_cask):
self.failed = True
self.message = 'Invalid cask: {0}.'.format(self.current_cask)
raise HomebrewCaskException(self.message)
if not self._current_cask_is_installed():
self.unchanged_count += 1
self.message = 'Cask already uninstalled: {0}'.format(
self.current_cask,
)
return True
if self.module.check_mode:
self.changed = True
self.message = 'Cask would be uninstalled: {0}'.format(
self.current_cask
)
raise HomebrewCaskException(self.message)
cmd = [opt
for opt in (self.brew_path, 'cask', 'uninstall', self.current_cask)
if opt]
rc, out, err = self.module.run_command(cmd)
if not self._current_cask_is_installed():
self.changed_count += 1
self.changed = True
self.message = 'Cask uninstalled: {0}'.format(self.current_cask)
return True
else:
self.failed = True
self.message = err.strip()
raise HomebrewCaskException(self.message)
def _uninstall_casks(self):
for cask in self.casks:
self.current_cask = cask
self._uninstall_current_cask()
return True
# /uninstalled ----------------------------- }}}
# /commands ---------------------------------------------------- }}}
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(
aliases=["pkg", "package", "cask"],
required=False,
type='list',
),
path=dict(
default="/usr/local/bin",
required=False,
type='path',
),
state=dict(
default="present",
choices=[
"present", "installed",
"absent", "removed", "uninstalled",
],
),
update_homebrew=dict(
default=False,
aliases=["update-brew"],
type='bool',
),
install_options=dict(
default=None,
aliases=['options'],
type='list',
)
),
supports_check_mode=True,
)
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C', LC_CTYPE='C')
p = module.params
if p['name']:
casks = p['name']
else:
casks = None
path = p['path']
if path:
path = path.split(':')
state = p['state']
if state in ('present', 'installed'):
state = 'installed'
if state in ('absent', 'removed', 'uninstalled'):
state = 'absent'
update_homebrew = p['update_homebrew']
p['install_options'] = p['install_options'] or []
install_options = ['--{0}'.format(install_option)
for install_option in p['install_options']]
brew_cask = HomebrewCask(module=module, path=path, casks=casks,
state=state, update_homebrew=update_homebrew,
install_options=install_options)
(failed, changed, message) = brew_cask.run()
if failed:
module.fail_json(msg=message)
else:
module.exit_json(changed=changed, msg=message)
# this is magic, see lib/ansible/module_common.py
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()

View file

@ -0,0 +1,264 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Daniel Jaouen <dcj24@cornell.edu>
# (c) 2016, Indrajit Raychaudhuri <irc+code@indrajit.com>
#
# Based on homebrew (Andrew Dunham <andrew@du.nham.ca>)
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import re
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: homebrew_tap
author:
- "Indrajit Raychaudhuri (@indrajitr)"
- "Daniel Jaouen (@danieljaouen)"
short_description: Tap a Homebrew repository.
description:
- Tap external Homebrew repositories.
version_added: "1.6"
options:
name:
description:
- The GitHub user/organization repository to tap.
required: true
aliases: ['tap']
url:
description:
- The optional git URL of the repository to tap. The URL is not
assumed to be on GitHub, and the protocol doesn't have to be HTTP.
Any location and protocol that git can handle is fine.
required: false
version_added: "2.2"
note:
- I(name) option may not be a list of multiple taps (but a single
tap instead) when this option is provided.
state:
description:
- state of the repository.
choices: [ 'present', 'absent' ]
required: false
default: 'present'
requirements: [ homebrew ]
'''
EXAMPLES = '''
- homebrew_tap:
name: homebrew/dupes
- homebrew_tap:
name: homebrew/dupes
state: absent
- homebrew_tap:
name: homebrew/dupes,homebrew/science
state: present
- homebrew_tap:
name: telemachus/brew
url: 'https://bitbucket.org/telemachus/brew'
'''
def a_valid_tap(tap):
'''Returns True if the tap is valid.'''
regex = re.compile(r'^([\w-]+)/(homebrew-)?([\w-]+)$')
return regex.match(tap)
def already_tapped(module, brew_path, tap):
'''Returns True if already tapped.'''
rc, out, err = module.run_command([
brew_path,
'tap',
])
taps = [tap_.strip().lower() for tap_ in out.split('\n') if tap_]
tap_name = re.sub('homebrew-', '', tap.lower())
return tap_name in taps
def add_tap(module, brew_path, tap, url=None):
'''Adds a single tap.'''
failed, changed, msg = False, False, ''
if not a_valid_tap(tap):
failed = True
msg = 'not a valid tap: %s' % tap
elif not already_tapped(module, brew_path, tap):
if module.check_mode:
module.exit_json(changed=True)
rc, out, err = module.run_command([
brew_path,
'tap',
tap,
url,
])
if already_tapped(module, brew_path, tap):
changed = True
msg = 'successfully tapped: %s' % tap
else:
failed = True
msg = 'failed to tap: %s' % tap
else:
msg = 'already tapped: %s' % tap
return (failed, changed, msg)
def add_taps(module, brew_path, taps):
'''Adds one or more taps.'''
failed, unchanged, added, msg = False, 0, 0, ''
for tap in taps:
(failed, changed, msg) = add_tap(module, brew_path, tap)
if failed:
break
if changed:
added += 1
else:
unchanged += 1
if failed:
msg = 'added: %d, unchanged: %d, error: ' + msg
msg = msg % (added, unchanged)
elif added:
changed = True
msg = 'added: %d, unchanged: %d' % (added, unchanged)
else:
msg = 'added: %d, unchanged: %d' % (added, unchanged)
return (failed, changed, msg)
def remove_tap(module, brew_path, tap):
'''Removes a single tap.'''
failed, changed, msg = False, False, ''
if not a_valid_tap(tap):
failed = True
msg = 'not a valid tap: %s' % tap
elif already_tapped(module, brew_path, tap):
if module.check_mode:
module.exit_json(changed=True)
rc, out, err = module.run_command([
brew_path,
'untap',
tap,
])
if not already_tapped(module, brew_path, tap):
changed = True
msg = 'successfully untapped: %s' % tap
else:
failed = True
msg = 'failed to untap: %s' % tap
else:
msg = 'already untapped: %s' % tap
return (failed, changed, msg)
def remove_taps(module, brew_path, taps):
'''Removes one or more taps.'''
failed, unchanged, removed, msg = False, 0, 0, ''
for tap in taps:
(failed, changed, msg) = remove_tap(module, brew_path, tap)
if failed:
break
if changed:
removed += 1
else:
unchanged += 1
if failed:
msg = 'removed: %d, unchanged: %d, error: ' + msg
msg = msg % (removed, unchanged)
elif removed:
changed = True
msg = 'removed: %d, unchanged: %d' % (removed, unchanged)
else:
msg = 'removed: %d, unchanged: %d' % (removed, unchanged)
return (failed, changed, msg)
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(aliases=['tap'], type='list', required=True),
url=dict(default=None, required=False),
state=dict(default='present', choices=['present', 'absent']),
),
supports_check_mode=True,
)
brew_path = module.get_bin_path(
'brew',
required=True,
opt_dirs=['/usr/local/bin']
)
taps = module.params['name']
url = module.params['url']
if module.params['state'] == 'present':
if url is None:
# No tap URL provided explicitly, continue with bulk addition
# of all the taps.
failed, changed, msg = add_taps(module, brew_path, taps)
else:
# When an tap URL is provided explicitly, we allow adding
# *single* tap only. Validate and proceed to add single tap.
if len(taps) > 1:
msg = "List of muliple taps may not be provided with 'url' option."
module.fail_json(msg=msg)
else:
failed, changed, msg = add_tap(module, brew_path, taps[0], url)
if failed:
module.fail_json(msg=msg)
else:
module.exit_json(changed=changed, msg=msg)
elif module.params['state'] == 'absent':
failed, changed, msg = remove_taps(module, brew_path, taps)
if failed:
module.fail_json(msg=msg)
else:
module.exit_json(changed=changed, msg=msg)
# this is magic, see lib/ansible/module_common.py
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()

View file

@ -0,0 +1,275 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Jakub Jirutka <jakub@jirutka.cz>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import shutil
from os import path
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: layman
author: "Jakub Jirutka (@jirutka)"
version_added: "1.6"
short_description: Manage Gentoo overlays
description:
- Uses Layman to manage an additional repositories for the Portage package manager on Gentoo Linux.
Please note that Layman must be installed on a managed node prior using this module.
requirements:
- "python >= 2.6"
- layman python module
options:
name:
description:
- The overlay id to install, synchronize, or uninstall.
Use 'ALL' to sync all of the installed overlays (can be used only when C(state=updated)).
required: true
list_url:
description:
- An URL of the alternative overlays list that defines the overlay to install.
This list will be fetched and saved under C(${overlay_defs})/${name}.xml), where
C(overlay_defs) is readed from the Layman's configuration.
required: false
state:
description:
- Whether to install (C(present)), sync (C(updated)), or uninstall (C(absent)) the overlay.
required: false
default: present
choices: [present, absent, updated]
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be
set to C(no) when no other option exists. Prior to 1.9.3 the code
defaulted to C(no).
required: false
default: 'yes'
choices: ['yes', 'no']
version_added: '1.9.3'
'''
EXAMPLES = '''
# Install the overlay 'mozilla' which is on the central overlays list.
- layman:
name: mozilla
# Install the overlay 'cvut' from the specified alternative list.
- layman:
name: cvut
list_url: 'http://raw.github.com/cvut/gentoo-overlay/master/overlay.xml'
# Update (sync) the overlay 'cvut', or install if not installed yet.
- layman:
name: cvut
list_url: 'http://raw.github.com/cvut/gentoo-overlay/master/overlay.xml'
state: updated
# Update (sync) all of the installed overlays.
- layman:
name: ALL
state: updated
# Uninstall the overlay 'cvut'.
- layman:
name: cvut
state: absent
'''
USERAGENT = 'ansible-httpget'
try:
from layman.api import LaymanAPI
from layman.config import BareConfig
HAS_LAYMAN_API = True
except ImportError:
HAS_LAYMAN_API = False
class ModuleError(Exception): pass
def init_layman(config=None):
'''Returns the initialized ``LaymanAPI``.
:param config: the layman's configuration to use (optional)
'''
if config is None:
config = BareConfig(read_configfile=True, quietness=1)
return LaymanAPI(config)
def download_url(module, url, dest):
'''
:param url: the URL to download
:param dest: the absolute path of where to save the downloaded content to;
it must be writable and not a directory
:raises ModuleError
'''
# Hack to add params in the form that fetch_url expects
module.params['http_agent'] = USERAGENT
response, info = fetch_url(module, url)
if info['status'] != 200:
raise ModuleError("Failed to get %s: %s" % (url, info['msg']))
try:
with open(dest, 'w') as f:
shutil.copyfileobj(response, f)
except IOError as e:
raise ModuleError("Failed to write: %s" % str(e))
def install_overlay(module, name, list_url=None):
'''Installs the overlay repository. If not on the central overlays list,
then :list_url of an alternative list must be provided. The list will be
fetched and saved under ``%(overlay_defs)/%(name.xml)`` (location of the
``overlay_defs`` is read from the Layman's configuration).
:param name: the overlay id
:param list_url: the URL of the remote repositories list to look for the overlay
definition (optional, default: None)
:returns: True if the overlay was installed, or False if already exists
(i.e. nothing has changed)
:raises ModuleError
'''
# read Layman configuration
layman_conf = BareConfig(read_configfile=True)
layman = init_layman(layman_conf)
if layman.is_installed(name):
return False
if module.check_mode:
mymsg = 'Would add layman repo \'' + name + '\''
module.exit_json(changed=True, msg=mymsg)
if not layman.is_repo(name):
if not list_url:
raise ModuleError("Overlay '%s' is not on the list of known " \
"overlays and URL of the remote list was not provided." % name)
overlay_defs = layman_conf.get_option('overlay_defs')
dest = path.join(overlay_defs, name + '.xml')
download_url(module, list_url, dest)
# reload config
layman = init_layman()
if not layman.add_repos(name):
raise ModuleError(layman.get_errors())
return True
def uninstall_overlay(module, name):
'''Uninstalls the given overlay repository from the system.
:param name: the overlay id to uninstall
:returns: True if the overlay was uninstalled, or False if doesn't exist
(i.e. nothing has changed)
:raises ModuleError
'''
layman = init_layman()
if not layman.is_installed(name):
return False
if module.check_mode:
mymsg = 'Would remove layman repo \'' + name + '\''
module.exit_json(changed=True, msg=mymsg)
layman.delete_repos(name)
if layman.get_errors(): raise ModuleError(layman.get_errors())
return True
def sync_overlay(name):
'''Synchronizes the specified overlay repository.
:param name: the overlay repository id to sync
:raises ModuleError
'''
layman = init_layman()
if not layman.sync(name):
messages = [ str(item[1]) for item in layman.sync_results[2] ]
raise ModuleError(messages)
def sync_overlays():
'''Synchronize all of the installed overlays.
:raises ModuleError
'''
layman = init_layman()
for name in layman.get_installed():
sync_overlay(name)
def main():
# define module
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True),
list_url = dict(aliases=['url']),
state = dict(default="present", choices=['present', 'absent', 'updated']),
validate_certs = dict(required=False, default=True, type='bool'),
),
supports_check_mode=True
)
if not HAS_LAYMAN_API:
module.fail_json(msg='Layman is not installed')
state, name, url = (module.params[key] for key in ['state', 'name', 'list_url'])
changed = False
try:
if state == 'present':
changed = install_overlay(module, name, url)
elif state == 'updated':
if name == 'ALL':
sync_overlays()
elif install_overlay(module, name, url):
changed = True
else:
sync_overlay(name)
else:
changed = uninstall_overlay(module, name)
except ModuleError as e:
module.fail_json(msg=e.message)
else:
module.exit_json(changed=changed, name=name)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
if __name__ == '__main__':
main()

View file

@ -0,0 +1,237 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Jimmy Tang <jcftang@gmail.com>
# Based on okpg (Patrick Pelletier <pp.pelletier@gmail.com>), pacman
# (Afterburn) and pkgin (Shaun Zinck) modules
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: macports
author: "Jimmy Tang (@jcftang)"
short_description: Package manager for MacPorts
description:
- Manages MacPorts packages
version_added: "1.1"
options:
name:
description:
- name of package to install/remove
required: true
state:
description:
- state of the package
choices: [ 'present', 'absent', 'active', 'inactive' ]
required: false
default: present
update_cache:
description:
- update the package db first
required: false
default: "no"
choices: [ "yes", "no" ]
notes: []
'''
EXAMPLES = '''
- macports:
name: foo
state: present
- macports:
name: foo
state: present
update_cache: yes
- macports:
name: foo
state: absent
- macports:
name: foo
state: active
- macports:
name: foo
state: inactive
'''
import pipes
def update_package_db(module, port_path):
""" Updates packages list. """
rc, out, err = module.run_command("%s sync" % port_path)
if rc != 0:
module.fail_json(msg="could not update package db")
def query_package(module, port_path, name, state="present"):
""" Returns whether a package is installed or not. """
if state == "present":
rc, out, err = module.run_command("%s installed | grep -q ^.*%s" % (pipes.quote(port_path), pipes.quote(name)), use_unsafe_shell=True)
if rc == 0:
return True
return False
elif state == "active":
rc, out, err = module.run_command("%s installed %s | grep -q active" % (pipes.quote(port_path), pipes.quote(name)), use_unsafe_shell=True)
if rc == 0:
return True
return False
def remove_packages(module, port_path, packages):
""" Uninstalls one or more packages if installed. """
remove_c = 0
# Using a for loop incase of error, we can report the package that failed
for package in packages:
# Query the package first, to see if we even need to remove
if not query_package(module, port_path, package):
continue
rc, out, err = module.run_command("%s uninstall %s" % (port_path, package))
if query_package(module, port_path, package):
module.fail_json(msg="failed to remove %s: %s" % (package, out))
remove_c += 1
if remove_c > 0:
module.exit_json(changed=True, msg="removed %s package(s)" % remove_c)
module.exit_json(changed=False, msg="package(s) already absent")
def install_packages(module, port_path, packages):
""" Installs one or more packages if not already installed. """
install_c = 0
for package in packages:
if query_package(module, port_path, package):
continue
rc, out, err = module.run_command("%s install %s" % (port_path, package))
if not query_package(module, port_path, package):
module.fail_json(msg="failed to install %s: %s" % (package, out))
install_c += 1
if install_c > 0:
module.exit_json(changed=True, msg="installed %s package(s)" % (install_c))
module.exit_json(changed=False, msg="package(s) already present")
def activate_packages(module, port_path, packages):
""" Activate a package if it's inactive. """
activate_c = 0
for package in packages:
if not query_package(module, port_path, package):
module.fail_json(msg="failed to activate %s, package(s) not present" % (package))
if query_package(module, port_path, package, state="active"):
continue
rc, out, err = module.run_command("%s activate %s" % (port_path, package))
if not query_package(module, port_path, package, state="active"):
module.fail_json(msg="failed to activate %s: %s" % (package, out))
activate_c += 1
if activate_c > 0:
module.exit_json(changed=True, msg="activated %s package(s)" % (activate_c))
module.exit_json(changed=False, msg="package(s) already active")
def deactivate_packages(module, port_path, packages):
""" Deactivate a package if it's active. """
deactivated_c = 0
for package in packages:
if not query_package(module, port_path, package):
module.fail_json(msg="failed to activate %s, package(s) not present" % (package))
if not query_package(module, port_path, package, state="active"):
continue
rc, out, err = module.run_command("%s deactivate %s" % (port_path, package))
if query_package(module, port_path, package, state="active"):
module.fail_json(msg="failed to deactivated %s: %s" % (package, out))
deactivated_c += 1
if deactivated_c > 0:
module.exit_json(changed=True, msg="deactivated %s package(s)" % (deactivated_c))
module.exit_json(changed=False, msg="package(s) already inactive")
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(aliases=["pkg"], required=True),
state = dict(default="present", choices=["present", "installed", "absent", "removed", "active", "inactive"]),
update_cache = dict(default="no", aliases=["update-cache"], type='bool')
)
)
port_path = module.get_bin_path('port', True, ['/opt/local/bin'])
p = module.params
if p["update_cache"]:
update_package_db(module, port_path)
pkgs = p["name"].split(",")
if p["state"] in ["present", "installed"]:
install_packages(module, port_path, pkgs)
elif p["state"] in ["absent", "removed"]:
remove_packages(module, port_path, pkgs)
elif p["state"] == "active":
activate_packages(module, port_path, pkgs)
elif p["state"] == "inactive":
deactivate_packages(module, port_path, pkgs)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()

View file

@ -0,0 +1,552 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Patrik Lundin <patrik@sigterm.se>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
import platform
import re
import shlex
import sqlite3
from distutils.version import StrictVersion
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: openbsd_pkg
author: "Patrik Lundin (@eest)"
version_added: "1.1"
short_description: Manage packages on OpenBSD.
description:
- Manage packages on OpenBSD using the pkg tools.
requirements: [ "python >= 2.5" ]
options:
name:
required: true
description:
- Name of the package.
state:
required: true
choices: [ present, latest, absent ]
description:
- C(present) will make sure the package is installed.
C(latest) will make sure the latest version of the package is installed.
C(absent) will make sure the specified package is not installed.
build:
required: false
choices: [ yes, no ]
default: no
description:
- Build the package from source instead of downloading and installing
a binary. Requires that the port source tree is already installed.
Automatically builds and installs the 'sqlports' package, if it is
not already installed.
version_added: "2.1"
ports_dir:
required: false
default: /usr/ports
description:
- When used in combination with the 'build' option, allows overriding
the default ports source directory.
version_added: "2.1"
'''
EXAMPLES = '''
# Make sure nmap is installed
- openbsd_pkg:
name: nmap
state: present
# Make sure nmap is the latest version
- openbsd_pkg:
name: nmap
state: latest
# Make sure nmap is not installed
- openbsd_pkg:
name: nmap
state: absent
# Make sure nmap is installed, build it from source if it is not
- openbsd_pkg:
name: nmap
state: present
build: yes
# Specify a pkg flavour with '--'
- openbsd_pkg:
name: vim--no_x11
state: present
# Specify the default flavour to avoid ambiguity errors
- openbsd_pkg:
name: vim--
state: present
# Specify a package branch (requires at least OpenBSD 6.0)
- openbsd_pkg:
name: python%3.5
state: present
# Update all packages on the system
- openbsd_pkg:
name: *
state: latest
'''
# Function used for executing commands.
def execute_command(cmd, module):
# Break command line into arguments.
# This makes run_command() use shell=False which we need to not cause shell
# expansion of special characters like '*'.
cmd_args = shlex.split(cmd)
return module.run_command(cmd_args)
# Function used to find out if a package is currently installed.
def get_package_state(name, pkg_spec, module):
info_cmd = 'pkg_info -Iq'
command = "%s inst:%s" % (info_cmd, name)
rc, stdout, stderr = execute_command(command, module)
if stderr:
module.fail_json(msg="failed in get_package_state(): " + stderr)
if stdout:
# If the requested package name is just a stem, like "python", we may
# find multiple packages with that name.
pkg_spec['installed_names'] = [name for name in stdout.splitlines()]
module.debug("get_package_state(): installed_names = %s" % pkg_spec['installed_names'])
return True
else:
return False
# Function used to make sure a package is present.
def package_present(name, installed_state, pkg_spec, module):
build = module.params['build']
if module.check_mode:
install_cmd = 'pkg_add -Imn'
else:
if build is True:
port_dir = "%s/%s" % (module.params['ports_dir'], get_package_source_path(name, pkg_spec, module))
if os.path.isdir(port_dir):
if pkg_spec['flavor']:
flavors = pkg_spec['flavor'].replace('-', ' ')
install_cmd = "cd %s && make clean=depends && FLAVOR=\"%s\" make install && make clean=depends" % (port_dir, flavors)
elif pkg_spec['subpackage']:
install_cmd = "cd %s && make clean=depends && SUBPACKAGE=\"%s\" make install && make clean=depends" % (port_dir, pkg_spec['subpackage'])
else:
install_cmd = "cd %s && make install && make clean=depends" % (port_dir)
else:
module.fail_json(msg="the port source directory %s does not exist" % (port_dir))
else:
install_cmd = 'pkg_add -Im'
if installed_state is False:
# Attempt to install the package
if build is True and not module.check_mode:
(rc, stdout, stderr) = module.run_command(install_cmd, module, use_unsafe_shell=True)
else:
(rc, stdout, stderr) = execute_command("%s %s" % (install_cmd, name), module)
# The behaviour of pkg_add is a bit different depending on if a
# specific version is supplied or not.
#
# When a specific version is supplied the return code will be 0 when
# a package is found and 1 when it is not. If a version is not
# supplied the tool will exit 0 in both cases.
#
# It is important to note that "version" relates to the
# packages-specs(7) notion of a version. If using the branch syntax
# (like "python%3.5") the version number is considered part of the
# stem, and the pkg_add behavior behaves the same as if the name did
# not contain a version (which it strictly speaking does not).
if pkg_spec['version'] or build is True:
# Depend on the return code.
module.debug("package_present(): depending on return code")
if rc:
changed=False
else:
# Depend on stderr instead.
module.debug("package_present(): depending on stderr")
if stderr:
# There is a corner case where having an empty directory in
# installpath prior to the right location will result in a
# "file:/local/package/directory/ is empty" message on stderr
# while still installing the package, so we need to look for
# for a message like "packagename-1.0: ok" just in case.
if pkg_spec['style'] == 'branch':
match = re.search("\W%s-[^:]+: ok\W" % pkg_spec['pkgname'], stdout)
else:
match = re.search("\W%s-[^:]+: ok\W" % name, stdout)
if match:
# It turns out we were able to install the package.
module.debug("package_present(): we were able to install the package")
else:
# We really did fail, fake the return code.
module.debug("package_present(): we really did fail")
rc = 1
changed=False
else:
module.debug("package_present(): stderr was not set")
if rc == 0:
if module.check_mode:
module.exit_json(changed=True)
changed=True
else:
rc = 0
stdout = ''
stderr = ''
changed=False
return (rc, stdout, stderr, changed)
# Function used to make sure a package is the latest available version.
def package_latest(name, installed_state, pkg_spec, module):
if module.params['build'] is True:
module.fail_json(msg="the combination of build=%s and state=latest is not supported" % module.params['build'])
if module.check_mode:
upgrade_cmd = 'pkg_add -umn'
else:
upgrade_cmd = 'pkg_add -um'
pre_upgrade_name = ''
if installed_state is True:
# Attempt to upgrade the package.
(rc, stdout, stderr) = execute_command("%s %s" % (upgrade_cmd, name), module)
# Look for output looking something like "nmap-6.01->6.25: ok" to see if
# something changed (or would have changed). Use \W to delimit the match
# from progress meter output.
changed = False
for installed_name in pkg_spec['installed_names']:
module.debug("package_latest(): checking for pre-upgrade package name: %s" % installed_name)
match = re.search("\W%s->.+: ok\W" % installed_name, stdout)
if match:
module.debug("package_latest(): pre-upgrade package name match: %s" % installed_name)
if module.check_mode:
module.exit_json(changed=True)
changed = True
break
# FIXME: This part is problematic. Based on the issues mentioned (and
# handled) in package_present() it is not safe to blindly trust stderr
# as an indicator that the command failed, and in the case with
# empty installpath directories this will break.
#
# For now keep this safeguard here, but ignore it if we managed to
# parse out a successful update above. This way we will report a
# successful run when we actually modify something but fail
# otherwise.
if changed != True:
if stderr:
rc=1
return (rc, stdout, stderr, changed)
else:
# If package was not installed at all just make it present.
module.debug("package_latest(): package is not installed, calling package_present()")
return package_present(name, installed_state, pkg_spec, module)
# Function used to make sure a package is not installed.
def package_absent(name, installed_state, module):
if module.check_mode:
remove_cmd = 'pkg_delete -In'
else:
remove_cmd = 'pkg_delete -I'
if installed_state is True:
# Attempt to remove the package.
rc, stdout, stderr = execute_command("%s %s" % (remove_cmd, name), module)
if rc == 0:
if module.check_mode:
module.exit_json(changed=True)
changed=True
else:
changed=False
else:
rc = 0
stdout = ''
stderr = ''
changed=False
return (rc, stdout, stderr, changed)
# Function used to parse the package name based on packages-specs(7).
# The general name structure is "stem-version[-flavors]".
#
# Names containing "%" are a special variation not part of the
# packages-specs(7) syntax. See pkg_add(1) on OpenBSD 6.0 or later for a
# description.
def parse_package_name(name, pkg_spec, module):
module.debug("parse_package_name(): parsing name: %s" % name)
# Do some initial matches so we can base the more advanced regex on that.
version_match = re.search("-[0-9]", name)
versionless_match = re.search("--", name)
# Stop if someone is giving us a name that both has a version and is
# version-less at the same time.
if version_match and versionless_match:
module.fail_json(msg="package name both has a version and is version-less: " + name)
# If name includes a version.
if version_match:
match = re.search("^(?P<stem>.*)-(?P<version>[0-9][^-]*)(?P<flavor_separator>-)?(?P<flavor>[a-z].*)?$", name)
if match:
pkg_spec['stem'] = match.group('stem')
pkg_spec['version_separator'] = '-'
pkg_spec['version'] = match.group('version')
pkg_spec['flavor_separator'] = match.group('flavor_separator')
pkg_spec['flavor'] = match.group('flavor')
pkg_spec['style'] = 'version'
else:
module.fail_json(msg="unable to parse package name at version_match: " + name)
# If name includes no version but is version-less ("--").
elif versionless_match:
match = re.search("^(?P<stem>.*)--(?P<flavor>[a-z].*)?$", name)
if match:
pkg_spec['stem'] = match.group('stem')
pkg_spec['version_separator'] = '-'
pkg_spec['version'] = None
pkg_spec['flavor_separator'] = '-'
pkg_spec['flavor'] = match.group('flavor')
pkg_spec['style'] = 'versionless'
else:
module.fail_json(msg="unable to parse package name at versionless_match: " + name)
# If name includes no version, and is not version-less, it is all a stem.
else:
match = re.search("^(?P<stem>.*)$", name)
if match:
pkg_spec['stem'] = match.group('stem')
pkg_spec['version_separator'] = None
pkg_spec['version'] = None
pkg_spec['flavor_separator'] = None
pkg_spec['flavor'] = None
pkg_spec['style'] = 'stem'
else:
module.fail_json(msg="unable to parse package name at else: " + name)
# If the stem contains an "%" then it needs special treatment.
branch_match = re.search("%", pkg_spec['stem'])
if branch_match:
branch_release = "6.0"
if version_match or versionless_match:
module.fail_json(msg="package name using 'branch' syntax also has a version or is version-less: " + name)
if StrictVersion(platform.release()) < StrictVersion(branch_release):
module.fail_json(msg="package name using 'branch' syntax requires at least OpenBSD %s: %s" % (branch_release, name))
pkg_spec['style'] = 'branch'
# Key names from description in pkg_add(1).
pkg_spec['pkgname'] = pkg_spec['stem'].split('%')[0]
pkg_spec['branch'] = pkg_spec['stem'].split('%')[1]
# Sanity check that there are no trailing dashes in flavor.
# Try to stop strange stuff early so we can be strict later.
if pkg_spec['flavor']:
match = re.search("-$", pkg_spec['flavor'])
if match:
module.fail_json(msg="trailing dash in flavor: " + pkg_spec['flavor'])
# Function used for figuring out the port path.
def get_package_source_path(name, pkg_spec, module):
pkg_spec['subpackage'] = None
if pkg_spec['stem'] == 'sqlports':
return 'databases/sqlports'
else:
# try for an exact match first
sqlports_db_file = '/usr/local/share/sqlports'
if not os.path.isfile(sqlports_db_file):
module.fail_json(msg="sqlports file '%s' is missing" % sqlports_db_file)
conn = sqlite3.connect(sqlports_db_file)
first_part_of_query = 'SELECT fullpkgpath, fullpkgname FROM ports WHERE fullpkgname'
query = first_part_of_query + ' = ?'
module.debug("package_package_source_path(): exact query: %s" % query)
cursor = conn.execute(query, (name,))
results = cursor.fetchall()
# next, try for a fuzzier match
if len(results) < 1:
looking_for = pkg_spec['stem'] + (pkg_spec['version_separator'] or '-') + (pkg_spec['version'] or '%')
query = first_part_of_query + ' LIKE ?'
if pkg_spec['flavor']:
looking_for += pkg_spec['flavor_separator'] + pkg_spec['flavor']
module.debug("package_package_source_path(): fuzzy flavor query: %s" % query)
cursor = conn.execute(query, (looking_for,))
elif pkg_spec['style'] == 'versionless':
query += ' AND fullpkgname NOT LIKE ?'
module.debug("package_package_source_path(): fuzzy versionless query: %s" % query)
cursor = conn.execute(query, (looking_for, "%s-%%" % looking_for,))
else:
module.debug("package_package_source_path(): fuzzy query: %s" % query)
cursor = conn.execute(query, (looking_for,))
results = cursor.fetchall()
# error if we don't find exactly 1 match
conn.close()
if len(results) < 1:
module.fail_json(msg="could not find a port by the name '%s'" % name)
if len(results) > 1:
matches = map(lambda x:x[1], results)
module.fail_json(msg="too many matches, unsure which to build: %s" % ' OR '.join(matches))
# there's exactly 1 match, so figure out the subpackage, if any, then return
fullpkgpath = results[0][0]
parts = fullpkgpath.split(',')
if len(parts) > 1 and parts[1][0] == '-':
pkg_spec['subpackage'] = parts[1]
return parts[0]
# Function used for upgrading all installed packages.
def upgrade_packages(module):
if module.check_mode:
upgrade_cmd = 'pkg_add -Imnu'
else:
upgrade_cmd = 'pkg_add -Imu'
# Attempt to upgrade all packages.
rc, stdout, stderr = execute_command("%s" % upgrade_cmd, module)
# Try to find any occurance of a package changing version like:
# "bzip2-1.0.6->1.0.6p0: ok".
match = re.search("\W\w.+->.+: ok\W", stdout)
if match:
if module.check_mode:
module.exit_json(changed=True)
changed=True
else:
changed=False
# It seems we can not trust the return value, so depend on the presence of
# stderr to know if something failed.
if stderr:
rc = 1
else:
rc = 0
return (rc, stdout, stderr, changed)
# ===========================================
# Main control flow.
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True),
state = dict(required=True, choices=['absent', 'installed', 'latest', 'present', 'removed']),
build = dict(default='no', type='bool'),
ports_dir = dict(default='/usr/ports'),
),
supports_check_mode = True
)
name = module.params['name']
state = module.params['state']
build = module.params['build']
ports_dir = module.params['ports_dir']
rc = 0
stdout = ''
stderr = ''
result = {}
result['name'] = name
result['state'] = state
result['build'] = build
if build is True:
if not os.path.isdir(ports_dir):
module.fail_json(msg="the ports source directory %s does not exist" % (ports_dir))
# build sqlports if its not installed yet
pkg_spec = {}
parse_package_name('sqlports', pkg_spec, module)
installed_state = get_package_state('sqlports', pkg_spec, module)
if not installed_state:
module.debug("main(): installing 'sqlports' because build=%s" % module.params['build'])
package_present('sqlports', installed_state, pkg_spec, module)
if name == '*':
if state != 'latest':
module.fail_json(msg="the package name '*' is only valid when using state=latest")
else:
# Perform an upgrade of all installed packages.
(rc, stdout, stderr, changed) = upgrade_packages(module)
else:
# Parse package name and put results in the pkg_spec dictionary.
pkg_spec = {}
parse_package_name(name, pkg_spec, module)
# Not sure how the branch syntax is supposed to play together
# with build mode. Disable it for now.
if pkg_spec['style'] == 'branch' and module.params['build'] is True:
module.fail_json(msg="the combination of 'branch' syntax and build=%s is not supported: %s" % (module.params['build'], name))
# Get package state.
installed_state = get_package_state(name, pkg_spec, module)
# Perform requested action.
if state in ['installed', 'present']:
(rc, stdout, stderr, changed) = package_present(name, installed_state, pkg_spec, module)
elif state in ['absent', 'removed']:
(rc, stdout, stderr, changed) = package_absent(name, installed_state, module)
elif state == 'latest':
(rc, stdout, stderr, changed) = package_latest(name, installed_state, pkg_spec, module)
if rc != 0:
if stderr:
module.fail_json(msg=stderr)
else:
module.fail_json(msg=stdout)
result['changed'] = changed
module.exit_json(**result)
# Import module snippets.
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()

View file

@ -0,0 +1,190 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Patrick Pelletier <pp.pelletier@gmail.com>
# Based on pacman (Afterburn) and pkgin (Shaun Zinck) modules
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: opkg
author: "Patrick Pelletier (@skinp)"
short_description: Package manager for OpenWrt
description:
- Manages OpenWrt packages
version_added: "1.1"
options:
name:
description:
- name of package to install/remove
required: true
state:
description:
- state of the package
choices: [ 'present', 'absent' ]
required: false
default: present
force:
description:
- opkg --force parameter used
choices: ["", "depends", "maintainer", "reinstall", "overwrite", "downgrade", "space", "postinstall", "remove", "checksum", "removal-of-dependent-packages"]
required: false
default: absent
version_added: "2.0"
update_cache:
description:
- update the package db first
required: false
default: "no"
choices: [ "yes", "no" ]
notes: []
'''
EXAMPLES = '''
- opkg:
name: foo
state: present
- opkg:
name: foo
state: present
update_cache: yes
- opkg:
name: foo
state: absent
- opkg:
name: foo,bar
state: absent
- opkg:
name: foo
state: present
force: overwrite
'''
import pipes
def update_package_db(module, opkg_path):
""" Updates packages list. """
rc, out, err = module.run_command("%s update" % opkg_path)
if rc != 0:
module.fail_json(msg="could not update package db")
def query_package(module, opkg_path, name, state="present"):
""" Returns whether a package is installed or not. """
if state == "present":
rc, out, err = module.run_command("%s list-installed | grep -q \"^%s \"" % (pipes.quote(opkg_path), pipes.quote(name)), use_unsafe_shell=True)
if rc == 0:
return True
return False
def remove_packages(module, opkg_path, packages):
""" Uninstalls one or more packages if installed. """
p = module.params
force = p["force"]
if force:
force = "--force-%s" % force
remove_c = 0
# Using a for loop incase of error, we can report the package that failed
for package in packages:
# Query the package first, to see if we even need to remove
if not query_package(module, opkg_path, package):
continue
rc, out, err = module.run_command("%s remove %s %s" % (opkg_path, force, package))
if query_package(module, opkg_path, package):
module.fail_json(msg="failed to remove %s: %s" % (package, out))
remove_c += 1
if remove_c > 0:
module.exit_json(changed=True, msg="removed %s package(s)" % remove_c)
module.exit_json(changed=False, msg="package(s) already absent")
def install_packages(module, opkg_path, packages):
""" Installs one or more packages if not already installed. """
p = module.params
force = p["force"]
if force:
force = "--force-%s" % force
install_c = 0
for package in packages:
if query_package(module, opkg_path, package):
continue
rc, out, err = module.run_command("%s install %s %s" % (opkg_path, force, package))
if not query_package(module, opkg_path, package):
module.fail_json(msg="failed to install %s: %s" % (package, out))
install_c += 1
if install_c > 0:
module.exit_json(changed=True, msg="installed %s package(s)" % (install_c))
module.exit_json(changed=False, msg="package(s) already present")
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(aliases=["pkg"], required=True),
state = dict(default="present", choices=["present", "installed", "absent", "removed"]),
force = dict(default="", choices=["", "depends", "maintainer", "reinstall", "overwrite", "downgrade", "space", "postinstall", "remove", "checksum", "removal-of-dependent-packages"]),
update_cache = dict(default="no", aliases=["update-cache"], type='bool')
)
)
opkg_path = module.get_bin_path('opkg', True, ['/bin'])
p = module.params
if p["update_cache"]:
update_package_db(module, opkg_path)
pkgs = p["name"].split(",")
if p["state"] in ["present", "installed"]:
install_packages(module, opkg_path, pkgs)
elif p["state"] in ["absent", "removed"]:
remove_packages(module, opkg_path, pkgs)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()

View file

@ -0,0 +1,371 @@
#!/usr/bin/python -tt
# -*- coding: utf-8 -*-
# (c) 2012, Afterburn <http://github.com/afterburn>
# (c) 2013, Aaron Bull Schaefer <aaron@elasticdog.com>
# (c) 2015, Indrajit Raychaudhuri <irc+code@indrajit.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: pacman
short_description: Manage packages with I(pacman)
description:
- Manage packages with the I(pacman) package manager, which is used by
Arch Linux and its variants.
version_added: "1.0"
author:
- "Indrajit Raychaudhuri (@indrajitr)"
- "'Aaron Bull Schaefer (@elasticdog)' <aaron@elasticdog.com>"
- "Afterburn"
notes: []
requirements: []
options:
name:
description:
- Name of the package to install, upgrade, or remove.
required: false
default: null
aliases: [ 'pkg', 'package' ]
state:
description:
- Desired state of the package.
required: false
default: "present"
choices: ["present", "absent", "latest"]
recurse:
description:
- When removing a package, also remove its dependencies, provided
that they are not required by other packages and were not
explicitly installed by a user.
required: false
default: no
choices: ["yes", "no"]
version_added: "1.3"
force:
description:
- When removing package - force remove package, without any
checks. When update_cache - force redownload repo
databases.
required: false
default: no
choices: ["yes", "no"]
version_added: "2.0"
update_cache:
description:
- Whether or not to refresh the master package lists. This can be
run as part of a package installation or as a separate step.
required: false
default: no
choices: ["yes", "no"]
aliases: [ 'update-cache' ]
upgrade:
description:
- Whether or not to upgrade whole system
required: false
default: no
choices: ["yes", "no"]
version_added: "2.0"
'''
EXAMPLES = '''
# Install package foo
- pacman:
name: foo
state: present
# Upgrade package foo
- pacman:
name: foo
state: latest
update_cache: yes
# Remove packages foo and bar
- pacman:
name: foo,bar
state: absent
# Recursively remove package baz
- pacman:
name: baz
state: absent
recurse: yes
# Run the equivalent of "pacman -Sy" as a separate step
- pacman:
update_cache: yes
# Run the equivalent of "pacman -Su" as a separate step
- pacman:
upgrade: yes
# Run the equivalent of "pacman -Syu" as a separate step
- pacman:
update_cache: yes
upgrade: yes
# Run the equivalent of "pacman -Rdd", force remove package baz
- pacman:
name: baz
state: absent
force: yes
'''
import shlex
import os
import re
import sys
def get_version(pacman_output):
"""Take pacman -Qi or pacman -Si output and get the Version"""
lines = pacman_output.split('\n')
for line in lines:
if 'Version' in line:
return line.split(':')[1].strip()
return None
def query_package(module, pacman_path, name, state="present"):
"""Query the package status in both the local system and the repository. Returns a boolean to indicate if the package is installed, a second boolean to indicate if the package is up-to-date and a third boolean to indicate whether online information were available"""
if state == "present":
lcmd = "%s -Qi %s" % (pacman_path, name)
lrc, lstdout, lstderr = module.run_command(lcmd, check_rc=False)
if lrc != 0:
# package is not installed locally
return False, False, False
# get the version installed locally (if any)
lversion = get_version(lstdout)
rcmd = "%s -Si %s" % (pacman_path, name)
rrc, rstdout, rstderr = module.run_command(rcmd, check_rc=False)
# get the version in the repository
rversion = get_version(rstdout)
if rrc == 0:
# Return True to indicate that the package is installed locally, and the result of the version number comparison
# to determine if the package is up-to-date.
return True, (lversion == rversion), False
# package is installed but cannot fetch remote Version. Last True stands for the error
return True, True, True
def update_package_db(module, pacman_path):
if module.params["force"]:
args = "Syy"
else:
args = "Sy"
cmd = "%s -%s" % (pacman_path, args)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc == 0:
return True
else:
module.fail_json(msg="could not update package db")
def upgrade(module, pacman_path):
cmdupgrade = "%s -Suq --noconfirm" % (pacman_path)
cmdneedrefresh = "%s -Qqu" % (pacman_path)
rc, stdout, stderr = module.run_command(cmdneedrefresh, check_rc=False)
if rc == 0:
if module.check_mode:
data = stdout.split('\n')
module.exit_json(changed=True, msg="%s package(s) would be upgraded" % (len(data) - 1))
rc, stdout, stderr = module.run_command(cmdupgrade, check_rc=False)
if rc == 0:
module.exit_json(changed=True, msg='System upgraded')
else:
module.fail_json(msg="Could not upgrade")
else:
module.exit_json(changed=False, msg='Nothing to upgrade')
def remove_packages(module, pacman_path, packages):
if module.params["recurse"] or module.params["force"]:
if module.params["recurse"]:
args = "Rs"
if module.params["force"]:
args = "Rdd"
if module.params["recurse"] and module.params["force"]:
args = "Rdds"
else:
args = "R"
remove_c = 0
# Using a for loop incase of error, we can report the package that failed
for package in packages:
# Query the package first, to see if we even need to remove
installed, updated, unknown = query_package(module, pacman_path, package)
if not installed:
continue
cmd = "%s -%s %s --noconfirm" % (pacman_path, args, package)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0:
module.fail_json(msg="failed to remove %s" % (package))
remove_c += 1
if remove_c > 0:
module.exit_json(changed=True, msg="removed %s package(s)" % remove_c)
module.exit_json(changed=False, msg="package(s) already absent")
def install_packages(module, pacman_path, state, packages, package_files):
install_c = 0
package_err = []
message = ""
for i, package in enumerate(packages):
# if the package is installed and state == present or state == latest and is up-to-date then skip
installed, updated, latestError = query_package(module, pacman_path, package)
if latestError and state == 'latest':
package_err.append(package)
if installed and (state == 'present' or (state == 'latest' and updated)):
continue
if package_files[i]:
params = '-U %s' % package_files[i]
else:
params = '-S %s' % package
cmd = "%s %s --noconfirm --needed" % (pacman_path, params)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0:
module.fail_json(msg="failed to install %s" % (package))
install_c += 1
if state == 'latest' and len(package_err) > 0:
message = "But could not ensure 'latest' state for %s package(s) as remote version could not be fetched." % (package_err)
if install_c > 0:
module.exit_json(changed=True, msg="installed %s package(s). %s" % (install_c, message))
module.exit_json(changed=False, msg="package(s) already installed. %s" % (message))
def check_packages(module, pacman_path, packages, state):
would_be_changed = []
for package in packages:
installed, updated, unknown = query_package(module, pacman_path, package)
if ((state in ["present", "latest"] and not installed) or
(state == "absent" and installed) or
(state == "latest" and not updated)):
would_be_changed.append(package)
if would_be_changed:
if state == "absent":
state = "removed"
module.exit_json(changed=True, msg="%s package(s) would be %s" % (
len(would_be_changed), state))
else:
module.exit_json(changed=False, msg="package(s) already %s" % state)
def expand_package_groups(module, pacman_path, pkgs):
expanded = []
for pkg in pkgs:
cmd = "%s -Sgq %s" % (pacman_path, pkg)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc == 0:
# A group was found matching the name, so expand it
for name in stdout.split('\n'):
name = name.strip()
if name:
expanded.append(name)
else:
expanded.append(pkg)
return expanded
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(aliases=['pkg', 'package'], type='list'),
state = dict(default='present', choices=['present', 'installed', "latest", 'absent', 'removed']),
recurse = dict(default=False, type='bool'),
force = dict(default=False, type='bool'),
upgrade = dict(default=False, type='bool'),
update_cache = dict(default=False, aliases=['update-cache'], type='bool')
),
required_one_of = [['name', 'update_cache', 'upgrade']],
supports_check_mode = True)
pacman_path = module.get_bin_path('pacman', True)
p = module.params
# normalize the state parameter
if p['state'] in ['present', 'installed']:
p['state'] = 'present'
elif p['state'] in ['absent', 'removed']:
p['state'] = 'absent'
if p["update_cache"] and not module.check_mode:
update_package_db(module, pacman_path)
if not (p['name'] or p['upgrade']):
module.exit_json(changed=True, msg='Updated the package master lists')
if p['update_cache'] and module.check_mode and not (p['name'] or p['upgrade']):
module.exit_json(changed=True, msg='Would have updated the package cache')
if p['upgrade']:
upgrade(module, pacman_path)
if p['name']:
pkgs = expand_package_groups(module, pacman_path, p['name'])
pkg_files = []
for i, pkg in enumerate(pkgs):
if pkg.endswith('.pkg.tar.xz'):
# The package given is a filename, extract the raw pkg name from
# it and store the filename
pkg_files.append(pkg)
pkgs[i] = re.sub('-[0-9].*$', '', pkgs[i].split('/')[-1])
else:
pkg_files.append(None)
if module.check_mode:
check_packages(module, pacman_path, pkgs, p['state'])
if p['state'] in ['present', 'latest']:
install_packages(module, pacman_path, p['state'], pkgs, pkg_files)
elif p['state'] == 'absent':
remove_packages(module, pacman_path, pkgs)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == "__main__":
main()

View file

@ -0,0 +1,177 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2014 Peter Oliver <ansible@mavit.org.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: pkg5
author: "Peter Oliver (@mavit)"
short_description: Manages packages with the Solaris 11 Image Packaging System
version_added: 1.9
description:
- IPS packages are the native packages in Solaris 11 and higher.
notes:
- The naming of IPS packages is explained at U(http://www.oracle.com/technetwork/articles/servers-storage-admin/ips-package-versioning-2232906.html).
options:
name:
description:
- An FRMI of the package(s) to be installed/removed/updated.
- Multiple packages may be specified, separated by C(,).
required: true
state:
description:
- Whether to install (I(present), I(latest)), or remove (I(absent)) a
package.
required: false
default: present
choices: [ present, latest, absent ]
accept_licenses:
description:
- Accept any licences.
required: false
default: false
choices: [ true, false ]
aliases: [ accept_licences, accept ]
'''
EXAMPLES = '''
# Install Vim:
- pkg5:
name: editor/vim
# Remove finger daemon:
- pkg5:
name: service/network/finger
state: absent
# Install several packages at once:
- pkg5:
name:
- /file/gnu-findutils
- /text/gnu-grep
'''
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True, type='list'),
state=dict(
default='present',
choices=[
'present',
'installed',
'latest',
'absent',
'uninstalled',
'removed',
]
),
accept_licenses=dict(
type='bool',
default=False,
aliases=['accept_licences', 'accept'],
),
)
)
params = module.params
packages = []
# pkg(5) FRMIs include a comma before the release number, but
# AnsibleModule will have split this into multiple items for us.
# Try to spot where this has happened and fix it.
for fragment in params['name']:
if (
re.search('^\d+(?:\.\d+)*', fragment)
and packages and re.search('@[^,]*$', packages[-1])
):
packages[-1] += ',' + fragment
else:
packages.append(fragment)
if params['state'] in ['present', 'installed']:
ensure(module, 'present', packages, params)
elif params['state'] in ['latest']:
ensure(module, 'latest', packages, params)
elif params['state'] in ['absent', 'uninstalled', 'removed']:
ensure(module, 'absent', packages, params)
def ensure(module, state, packages, params):
response = {
'results': [],
'msg': '',
}
behaviour = {
'present': {
'filter': lambda p: not is_installed(module, p),
'subcommand': 'install',
},
'latest': {
'filter': lambda p: not is_latest(module, p),
'subcommand': 'install',
},
'absent': {
'filter': lambda p: is_installed(module, p),
'subcommand': 'uninstall',
},
}
if params['accept_licenses']:
accept_licenses = ['--accept']
else:
accept_licenses = []
to_modify = filter(behaviour[state]['filter'], packages)
if to_modify:
rc, out, err = module.run_command(
[
'pkg', behaviour[state]['subcommand']
]
+ accept_licenses
+ [
'-q', '--'
] + to_modify
)
response['rc'] = rc
response['results'].append(out)
response['msg'] += err
response['changed'] = True
if rc != 0:
module.fail_json(**response)
module.exit_json(**response)
def is_installed(module, package):
rc, out, err = module.run_command(['pkg', 'list', '--', package])
return not bool(int(rc))
def is_latest(module, package):
rc, out, err = module.run_command(['pkg', 'list', '-u', '--', package])
return bool(int(rc))
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()

View file

@ -0,0 +1,212 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2014 Peter Oliver <ansible@mavit.org.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: pkg5_publisher
author: "Peter Oliver (@mavit)"
short_description: Manages Solaris 11 Image Packaging System publishers
version_added: 1.9
description:
- IPS packages are the native packages in Solaris 11 and higher.
- This modules will configure which publishers a client will download IPS
packages from.
options:
name:
description:
- The publisher's name.
required: true
aliases: [ publisher ]
state:
description:
- Whether to ensure that a publisher is present or absent.
required: false
default: present
choices: [ present, absent ]
sticky:
description:
- Packages installed from a sticky repository can only receive updates
from that repository.
required: false
default: null
choices: [ true, false ]
enabled:
description:
- Is the repository enabled or disabled?
required: false
default: null
choices: [ true, false ]
origin:
description:
- A path or URL to the repository.
- Multiple values may be provided.
required: false
default: null
mirror:
description:
- A path or URL to the repository mirror.
- Multiple values may be provided.
required: false
default: null
'''
EXAMPLES = '''
# Fetch packages for the solaris publisher direct from Oracle:
- pkg5_publisher:
name: solaris
sticky: true
origin: https://pkg.oracle.com/solaris/support/
# Configure a publisher for locally-produced packages:
- pkg5_publisher:
name: site
origin: 'https://pkg.example.com/site/'
'''
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True, aliases=['publisher']),
state=dict(default='present', choices=['present', 'absent']),
sticky=dict(type='bool'),
enabled=dict(type='bool'),
# search_after=dict(),
# search_before=dict(),
origin=dict(type='list'),
mirror=dict(type='list'),
)
)
for option in ['origin', 'mirror']:
if module.params[option] == ['']:
module.params[option] = []
if module.params['state'] == 'present':
modify_publisher(module, module.params)
else:
unset_publisher(module, module.params['name'])
def modify_publisher(module, params):
name = params['name']
existing = get_publishers(module)
if name in existing:
for option in ['origin', 'mirror', 'sticky', 'enabled']:
if params[option] != None:
if params[option] != existing[name][option]:
return set_publisher(module, params)
else:
return set_publisher(module, params)
module.exit_json()
def set_publisher(module, params):
name = params['name']
args = []
if params['origin'] != None:
args.append('--remove-origin=*')
args.extend(['--add-origin=' + u for u in params['origin']])
if params['mirror'] != None:
args.append('--remove-mirror=*')
args.extend(['--add-mirror=' + u for u in params['mirror']])
if params['sticky'] != None and params['sticky']:
args.append('--sticky')
elif params['sticky'] != None:
args.append('--non-sticky')
if params['enabled'] != None and params['enabled']:
args.append('--enable')
elif params['enabled'] != None:
args.append('--disable')
rc, out, err = module.run_command(
["pkg", "set-publisher"] + args + [name],
check_rc=True
)
response = {
'rc': rc,
'results': [out],
'msg': err,
'changed': True,
}
module.exit_json(**response)
def unset_publisher(module, publisher):
if not publisher in get_publishers(module):
module.exit_json()
rc, out, err = module.run_command(
["pkg", "unset-publisher", publisher],
check_rc=True
)
response = {
'rc': rc,
'results': [out],
'msg': err,
'changed': True,
}
module.exit_json(**response)
def get_publishers(module):
rc, out, err = module.run_command(["pkg", "publisher", "-Ftsv"], True)
lines = out.splitlines()
keys = lines.pop(0).lower().split("\t")
publishers = {}
for line in lines:
values = dict(zip(keys, map(unstringify, line.split("\t"))))
name = values['publisher']
if not name in publishers:
publishers[name] = dict(
(k, values[k]) for k in ['sticky', 'enabled']
)
publishers[name]['origin'] = []
publishers[name]['mirror'] = []
if values['type'] is not None:
publishers[name][values['type']].append(values['uri'])
return publishers
def unstringify(val):
if val == "-" or val == '':
return None
elif val == "true":
return True
elif val == "false":
return False
else:
return val
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()

View file

@ -0,0 +1,389 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2013 Shaun Zinck <shaun.zinck at gmail.com>
# Copyright (c) 2015 Lawrence Leonard Gilbert <larry@L2G.to>
# Copyright (c) 2016 Jasper Lievisse Adriaanse <j at jasper.la>
#
# Written by Shaun Zinck
# Based on pacman module written by Afterburn <http://github.com/afterburn>
# that was based on apt module written by Matthew Williams <matthew@flowroute.com>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: pkgin
short_description: Package manager for SmartOS, NetBSD, et al.
description:
- "The standard package manager for SmartOS, but also usable on NetBSD
or any OS that uses C(pkgsrc). (Home: U(http://pkgin.net/))"
version_added: "1.0"
author:
- "Larry Gilbert (L2G)"
- "Shaun Zinck (@szinck)"
- "Jasper Lievisse Adriaanse (@jasperla)"
notes:
- "Known bug with pkgin < 0.8.0: if a package is removed and another
package depends on it, the other package will be silently removed as
well. New to Ansible 1.9: check-mode support."
options:
name:
description:
- Name of package to install/remove;
- multiple names may be given, separated by commas
required: false
default: null
state:
description:
- Intended state of the package
choices: [ 'present', 'absent' ]
required: false
default: present
update_cache:
description:
- Update repository database. Can be run with other steps or on it's own.
required: false
default: no
choices: [ "yes", "no" ]
version_added: "2.1"
upgrade:
description:
- Upgrade main packages to their newer versions
required: false
default: no
choices: [ "yes", "no" ]
version_added: "2.1"
full_upgrade:
description:
- Upgrade all packages to their newer versions
required: false
default: no
choices: [ "yes", "no" ]
version_added: "2.1"
clean:
description:
- Clean packages cache
required: false
default: no
choices: [ "yes", "no" ]
version_added: "2.1"
force:
description:
- Force package reinstall
required: false
default: no
choices: [ "yes", "no" ]
version_added: "2.1"
'''
EXAMPLES = '''
# install package foo
- pkgin:
name: foo
state: present
# Update database and install "foo" package
- pkgin:
name: foo
update_cache: yes
# remove package foo
- pkgin:
name: foo
state: absent
# remove packages foo and bar
- pkgin:
name: foo,bar
state: absent
# Update repositories as a separate step
- pkgin:
update_cache: yes
# Upgrade main packages (equivalent to C(pkgin upgrade))
- pkgin:
upgrade: yes
# Upgrade all packages (equivalent to C(pkgin full-upgrade))
- pkgin:
full_upgrade: yes
# Force-upgrade all packages (equivalent to C(pkgin -F full-upgrade))
- pkgin:
full_upgrade: yes
force: yes
# clean packages cache (equivalent to C(pkgin clean))
- pkgin:
clean: yes
'''
import re
def query_package(module, name):
"""Search for the package by name.
Possible return values:
* "present" - installed, no upgrade needed
* "outdated" - installed, but can be upgraded
* False - not installed or not found
"""
# test whether '-p' (parsable) flag is supported.
rc, out, err = module.run_command("%s -p -v" % PKGIN_PATH)
if rc == 0:
pflag = '-p'
splitchar = ';'
else:
pflag = ''
splitchar = ' '
# Use "pkgin search" to find the package. The regular expression will
# only match on the complete name.
rc, out, err = module.run_command("%s %s search \"^%s$\"" % (PKGIN_PATH, pflag, name))
# rc will not be 0 unless the search was a success
if rc == 0:
# Search results may contain more than one line (e.g., 'emacs'), so iterate
# through each line to see if we have a match.
packages = out.split('\n')
for package in packages:
# Break up line at spaces. The first part will be the package with its
# version (e.g. 'gcc47-libs-4.7.2nb4'), and the second will be the state
# of the package:
# '' - not installed
# '<' - installed but out of date
# '=' - installed and up to date
# '>' - installed but newer than the repository version
pkgname_with_version, raw_state = package.split(splitchar)[0:2]
# Search for package, stripping version
# (results in sth like 'gcc47-libs' or 'emacs24-nox11')
pkg_search_obj = re.search(r'^(.*?)\-[0-9][0-9.]*(nb[0-9]+)*', pkgname_with_version, re.M)
# Do not proceed unless we have a match
if not pkg_search_obj:
continue
# Grab matched string
pkgname_without_version = pkg_search_obj.group(1)
if name != pkgname_without_version:
continue
# The package was found; now return its state
if raw_state == '<':
return 'outdated'
elif raw_state == '=' or raw_state == '>':
return 'present'
else:
return False
# no fall-through
# No packages were matched, so return False
return False
def format_action_message(module, action, count):
vars = { "actioned": action,
"count": count }
if module.check_mode:
message = "would have %(actioned)s %(count)d package" % vars
else:
message = "%(actioned)s %(count)d package" % vars
if count == 1:
return message
else:
return message + "s"
def format_pkgin_command(module, command, package=None):
# Not all commands take a package argument, so cover this up by passing
# an empty string. Some commands (e.g. 'update') will ignore extra
# arguments, however this behaviour cannot be relied on for others.
if package is None:
package = ""
if module.params["force"]:
force = "-F"
else:
force = ""
vars = { "pkgin": PKGIN_PATH,
"command": command,
"package": package,
"force": force}
if module.check_mode:
return "%(pkgin)s -n %(command)s %(package)s" % vars
else:
return "%(pkgin)s -y %(force)s %(command)s %(package)s" % vars
def remove_packages(module, packages):
remove_c = 0
# Using a for loop incase of error, we can report the package that failed
for package in packages:
# Query the package first, to see if we even need to remove
if not query_package(module, package):
continue
rc, out, err = module.run_command(
format_pkgin_command(module, "remove", package))
if not module.check_mode and query_package(module, package):
module.fail_json(msg="failed to remove %s: %s" % (package, out))
remove_c += 1
if remove_c > 0:
module.exit_json(changed=True, msg=format_action_message(module, "removed", remove_c))
module.exit_json(changed=False, msg="package(s) already absent")
def install_packages(module, packages):
install_c = 0
for package in packages:
if query_package(module, package):
continue
rc, out, err = module.run_command(
format_pkgin_command(module, "install", package))
if not module.check_mode and not query_package(module, package):
module.fail_json(msg="failed to install %s: %s" % (package, out))
install_c += 1
if install_c > 0:
module.exit_json(changed=True, msg=format_action_message(module, "installed", install_c))
module.exit_json(changed=False, msg="package(s) already present")
def update_package_db(module):
rc, out, err = module.run_command(
format_pkgin_command(module, "update"))
if rc == 0:
if re.search('database for.*is up-to-date\n$', out):
return False, "datebase is up-to-date"
else:
return True, "updated repository database"
else:
module.fail_json(msg="could not update package db")
def do_upgrade_packages(module, full=False):
if full:
cmd = "full-upgrade"
else:
cmd = "upgrade"
rc, out, err = module.run_command(
format_pkgin_command(module, cmd))
if rc == 0:
if re.search('^nothing to do.\n$', out):
module.exit_json(changed=False, msg="nothing left to upgrade")
else:
module.fail_json(msg="could not %s packages" % cmd)
def upgrade_packages(module):
do_upgrade_packages(module)
def full_upgrade_packages(module):
do_upgrade_packages(module, True)
def clean_cache(module):
rc, out, err = module.run_command(
format_pkgin_command(module, "clean"))
if rc == 0:
# There's no indication if 'clean' actually removed anything,
# so assume it did.
module.exit_json(changed=True, msg="cleaned caches")
else:
module.fail_json(msg="could not clean package cache")
def main():
module = AnsibleModule(
argument_spec = dict(
state = dict(default="present", choices=["present","absent"]),
name = dict(aliases=["pkg"], type='list'),
update_cache = dict(default='no', type='bool'),
upgrade = dict(default='no', type='bool'),
full_upgrade = dict(default='no', type='bool'),
clean = dict(default='no', type='bool'),
force = dict(default='no', type='bool')),
required_one_of = [['name', 'update_cache', 'upgrade', 'full_upgrade', 'clean']],
supports_check_mode = True)
global PKGIN_PATH
PKGIN_PATH = module.get_bin_path('pkgin', True, ['/opt/local/bin'])
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C', LC_CTYPE='C')
p = module.params
if p["update_cache"]:
c, msg = update_package_db(module)
if not (p['name'] or p["upgrade"] or p["full_upgrade"]):
module.exit_json(changed=c, msg=msg)
if p["upgrade"]:
upgrade_packages(module)
if not p['name']:
module.exit_json(changed=True, msg='upgraded packages')
if p["full_upgrade"]:
full_upgrade_packages(module)
if not p['name']:
module.exit_json(changed=True, msg='upgraded all packages')
if p["clean"]:
clean_cache(module)
if not p['name']:
module.exit_json(changed=True, msg='cleaned caches')
pkgs = p["name"]
if p["state"] == "present":
install_packages(module, pkgs)
elif p["state"] == "absent":
remove_packages(module, pkgs)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()

View file

@ -0,0 +1,363 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, bleader
# Written by bleader <bleader@ratonland.org>
# Based on pkgin module written by Shaun Zinck <shaun.zinck at gmail.com>
# that was based on pacman module written by Afterburn <http://github.com/afterburn>
# that was based on apt module written by Matthew Williams <matthew@flowroute.com>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: pkgng
short_description: Package manager for FreeBSD >= 9.0
description:
- Manage binary packages for FreeBSD using 'pkgng' which
is available in versions after 9.0.
version_added: "1.2"
options:
name:
description:
- Name of package to install/remove.
required: true
state:
description:
- State of the package.
choices: [ 'present', 'absent' ]
required: false
default: present
cached:
description:
- Use local package base instead of fetching an updated one.
choices: [ 'yes', 'no' ]
required: false
default: no
annotation:
description:
- A comma-separated list of keyvalue-pairs of the form
C(<+/-/:><key>[=<value>]). A C(+) denotes adding an annotation, a
C(-) denotes removing an annotation, and C(:) denotes modifying an
annotation.
If setting or modifying annotations, a value must be provided.
required: false
version_added: "1.6"
pkgsite:
description:
- For pkgng versions before 1.1.4, specify packagesite to use
for downloading packages. If not specified, use settings from
C(/usr/local/etc/pkg.conf).
- For newer pkgng versions, specify a the name of a repository
configured in C(/usr/local/etc/pkg/repos).
required: false
rootdir:
description:
- For pkgng versions 1.5 and later, pkg will install all packages
within the specified root directory.
- Can not be used together with I(chroot) option.
required: false
chroot:
version_added: "2.1"
description:
- Pkg will chroot in the specified environment.
- Can not be used together with I(rootdir) option.
required: false
autoremove:
version_added: "2.2"
description:
- Remove automatically installed packages which are no longer needed.
required: false
choices: [ "yes", "no" ]
default: no
author: "bleader (@bleader)"
notes:
- When using pkgsite, be careful that already in cache packages won't be downloaded again.
'''
EXAMPLES = '''
# Install package foo
- pkgng:
name: foo
state: present
# Annotate package foo and bar
- pkgng:
name: foo,bar
annotation: '+test1=baz,-test2,:test3=foobar'
# Remove packages foo and bar
- pkgng:
name: foo,bar
state: absent
'''
import re
from ansible.module_utils.basic import AnsibleModule
def query_package(module, pkgng_path, name, dir_arg):
rc, out, err = module.run_command("%s %s info -g -e %s" % (pkgng_path, dir_arg, name))
if rc == 0:
return True
return False
def pkgng_older_than(module, pkgng_path, compare_version):
rc, out, err = module.run_command("%s -v" % pkgng_path)
version = [int(x) for x in re.split(r'[\._]', out)]
i = 0
new_pkgng = True
while compare_version[i] == version[i]:
i += 1
if i == min(len(compare_version), len(version)):
break
else:
if compare_version[i] > version[i]:
new_pkgng = False
return not new_pkgng
def remove_packages(module, pkgng_path, packages, dir_arg):
remove_c = 0
# Using a for loop incase of error, we can report the package that failed
for package in packages:
# Query the package first, to see if we even need to remove
if not query_package(module, pkgng_path, package, dir_arg):
continue
if not module.check_mode:
rc, out, err = module.run_command("%s %s delete -y %s" % (pkgng_path, dir_arg, package))
if not module.check_mode and query_package(module, pkgng_path, package, dir_arg):
module.fail_json(msg="failed to remove %s: %s" % (package, out))
remove_c += 1
if remove_c > 0:
return (True, "removed %s package(s)" % remove_c)
return (False, "package(s) already absent")
def install_packages(module, pkgng_path, packages, cached, pkgsite, dir_arg):
install_c = 0
# as of pkg-1.1.4, PACKAGESITE is deprecated in favor of repository definitions
# in /usr/local/etc/pkg/repos
old_pkgng = pkgng_older_than(module, pkgng_path, [1, 1, 4])
if pkgsite != "":
if old_pkgng:
pkgsite = "PACKAGESITE=%s" % (pkgsite)
else:
pkgsite = "-r %s" % (pkgsite)
batch_var = 'env BATCH=yes' # This environment variable skips mid-install prompts,
# setting them to their default values.
if not module.check_mode and not cached:
if old_pkgng:
rc, out, err = module.run_command("%s %s update" % (pkgsite, pkgng_path))
else:
rc, out, err = module.run_command("%s %s update" % (pkgng_path, dir_arg))
if rc != 0:
module.fail_json(msg="Could not update catalogue")
for package in packages:
if query_package(module, pkgng_path, package, dir_arg):
continue
if not module.check_mode:
if old_pkgng:
rc, out, err = module.run_command("%s %s %s install -g -U -y %s" % (batch_var, pkgsite, pkgng_path, package))
else:
rc, out, err = module.run_command("%s %s %s install %s -g -U -y %s" % (batch_var, pkgng_path, dir_arg, pkgsite, package))
if not module.check_mode and not query_package(module, pkgng_path, package, dir_arg):
module.fail_json(msg="failed to install %s: %s" % (package, out), stderr=err)
install_c += 1
if install_c > 0:
return (True, "added %s package(s)" % (install_c))
return (False, "package(s) already present")
def annotation_query(module, pkgng_path, package, tag, dir_arg):
rc, out, err = module.run_command("%s %s info -g -A %s" % (pkgng_path, dir_arg, package))
match = re.search(r'^\s*(?P<tag>%s)\s*:\s*(?P<value>\w+)' % tag, out, flags=re.MULTILINE)
if match:
return match.group('value')
return False
def annotation_add(module, pkgng_path, package, tag, value, dir_arg):
_value = annotation_query(module, pkgng_path, package, tag, dir_arg)
if not _value:
# Annotation does not exist, add it.
rc, out, err = module.run_command('%s %s annotate -y -A %s %s "%s"'
% (pkgng_path, dir_arg, package, tag, value))
if rc != 0:
module.fail_json("could not annotate %s: %s"
% (package, out), stderr=err)
return True
elif _value != value:
# Annotation exists, but value differs
module.fail_json(
mgs="failed to annotate %s, because %s is already set to %s, but should be set to %s"
% (package, tag, _value, value))
return False
else:
# Annotation exists, nothing to do
return False
def annotation_delete(module, pkgng_path, package, tag, value, dir_arg):
_value = annotation_query(module, pkgng_path, package, tag, dir_arg)
if _value:
rc, out, err = module.run_command('%s %s annotate -y -D %s %s'
% (pkgng_path, dir_arg, package, tag))
if rc != 0:
module.fail_json("could not delete annotation to %s: %s"
% (package, out), stderr=err)
return True
return False
def annotation_modify(module, pkgng_path, package, tag, value, dir_arg):
_value = annotation_query(module, pkgng_path, package, tag, dir_arg)
if not value:
# No such tag
module.fail_json("could not change annotation to %s: tag %s does not exist"
% (package, tag))
elif _value == value:
# No change in value
return False
else:
rc,out,err = module.run_command('%s %s annotate -y -M %s %s "%s"'
% (pkgng_path, dir_arg, package, tag, value))
if rc != 0:
module.fail_json("could not change annotation annotation to %s: %s"
% (package, out), stderr=err)
return True
def annotate_packages(module, pkgng_path, packages, annotation, dir_arg):
annotate_c = 0
annotations = map(lambda _annotation:
re.match(r'(?P<operation>[\+-:])(?P<tag>\w+)(=(?P<value>\w+))?',
_annotation).groupdict(),
re.split(r',', annotation))
operation = {
'+': annotation_add,
'-': annotation_delete,
':': annotation_modify
}
for package in packages:
for _annotation in annotations:
if operation[_annotation['operation']](module, pkgng_path, package, _annotation['tag'], _annotation['value']):
annotate_c += 1
if annotate_c > 0:
return (True, "added %s annotations." % annotate_c)
return (False, "changed no annotations")
def autoremove_packages(module, pkgng_path, dir_arg):
rc, out, err = module.run_command("%s %s autoremove -n" % (pkgng_path, dir_arg))
autoremove_c = 0
match = re.search('^Deinstallation has been requested for the following ([0-9]+) packages', out, re.MULTILINE)
if match:
autoremove_c = int(match.group(1))
if autoremove_c == 0:
return False, "no package(s) to autoremove"
if not module.check_mode:
rc, out, err = module.run_command("%s %s autoremove -y" % (pkgng_path, dir_arg))
return True, "autoremoved %d package(s)" % (autoremove_c)
def main():
module = AnsibleModule(
argument_spec = dict(
state = dict(default="present", choices=["present","absent"], required=False),
name = dict(aliases=["pkg"], required=True, type='list'),
cached = dict(default=False, type='bool'),
annotation = dict(default="", required=False),
pkgsite = dict(default="", required=False),
rootdir = dict(default="", required=False, type='path'),
chroot = dict(default="", required=False, type='path'),
autoremove = dict(default=False, type='bool')),
supports_check_mode = True,
mutually_exclusive =[["rootdir", "chroot"]])
pkgng_path = module.get_bin_path('pkg', True)
p = module.params
pkgs = p["name"]
changed = False
msgs = []
dir_arg = ""
if p["rootdir"] != "":
old_pkgng = pkgng_older_than(module, pkgng_path, [1, 5, 0])
if old_pkgng:
module.fail_json(msg="To use option 'rootdir' pkg version must be 1.5 or greater")
else:
dir_arg = "--rootdir %s" % (p["rootdir"])
if p["chroot"] != "":
dir_arg = '--chroot %s' % (p["chroot"])
if p["state"] == "present":
_changed, _msg = install_packages(module, pkgng_path, pkgs, p["cached"], p["pkgsite"], dir_arg)
changed = changed or _changed
msgs.append(_msg)
elif p["state"] == "absent":
_changed, _msg = remove_packages(module, pkgng_path, pkgs, dir_arg)
changed = changed or _changed
msgs.append(_msg)
if p["autoremove"]:
_changed, _msg = autoremove_packages(module, pkgng_path, dir_arg)
changed = changed or _changed
msgs.append(_msg)
if p["annotation"]:
_changed, _msg = annotate_packages(module, pkgng_path, pkgs, p["annotation"], dir_arg)
changed = changed or _changed
msgs.append(_msg)
module.exit_json(changed=changed, msg=", ".join(msgs))
if __name__ == '__main__':
main()

View file

@ -0,0 +1,234 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Alexander Winkler <mail () winkler-alexander.de>
# based on svr4pkg by
# Boyd Adamson <boyd () boydadamson.com> (2012)
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: pkgutil
short_description: Manage CSW-Packages on Solaris
description:
- Manages CSW packages (SVR4 format) on Solaris 10 and 11.
- These were the native packages on Solaris <= 10 and are available
as a legacy feature in Solaris 11.
- Pkgutil is an advanced packaging system, which resolves dependency on installation.
It is designed for CSW packages.
version_added: "1.3"
author: "Alexander Winkler (@dermute)"
options:
name:
description:
- Package name, e.g. (C(CSWnrpe))
required: true
site:
description:
- Specifies the repository path to install the package from.
- Its global definition is done in C(/etc/opt/csw/pkgutil.conf).
required: false
state:
description:
- Whether to install (C(present)), or remove (C(absent)) a package.
- The upgrade (C(latest)) operation will update/install the package to the latest version available.
- "Note: The module has a limitation that (C(latest)) only works for one package, not lists of them."
required: true
choices: ["present", "absent", "latest"]
update_catalog:
description:
- If you want to refresh your catalog from the mirror, set this to (C(yes)).
required: false
default: False
version_added: "2.1"
'''
EXAMPLES = '''
# Install a package
- pkgutil:
name: CSWcommon
state: present
# Install a package from a specific repository
- pkgutil:
name: CSWnrpe
site: 'ftp://myinternal.repo/opencsw/kiel'
state: latest
'''
import os
import pipes
def package_installed(module, name):
cmd = ['pkginfo']
cmd.append('-q')
cmd.append(name)
rc, out, err = run_command(module, cmd)
if rc == 0:
return True
else:
return False
def package_latest(module, name, site):
# Only supports one package
cmd = [ 'pkgutil', '-U', '--single', '-c' ]
if site is not None:
cmd += [ '-t', site]
cmd.append(name)
rc, out, err = run_command(module, cmd)
# replace | tail -1 |grep -v SAME
# use -2, because splitting on \n create a empty line
# at the end of the list
return 'SAME' in out.split('\n')[-2]
def run_command(module, cmd, **kwargs):
progname = cmd[0]
cmd[0] = module.get_bin_path(progname, True, ['/opt/csw/bin'])
return module.run_command(cmd, **kwargs)
def package_install(module, state, name, site, update_catalog):
cmd = [ 'pkgutil', '-iy' ]
if update_catalog:
cmd += [ '-U' ]
if site is not None:
cmd += [ '-t', site ]
if state == 'latest':
cmd += [ '-f' ]
cmd.append(name)
(rc, out, err) = run_command(module, cmd)
return (rc, out, err)
def package_upgrade(module, name, site, update_catalog):
cmd = [ 'pkgutil', '-ufy' ]
if update_catalog:
cmd += [ '-U' ]
if site is not None:
cmd += [ '-t', site ]
cmd.append(name)
(rc, out, err) = run_command(module, cmd)
return (rc, out, err)
def package_uninstall(module, name):
cmd = [ 'pkgutil', '-ry', name]
(rc, out, err) = run_command(module, cmd)
return (rc, out, err)
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required = True),
state = dict(required = True, choices=['present', 'absent','latest']),
site = dict(default = None),
update_catalog = dict(required = False, default = False, type='bool'),
),
supports_check_mode=True
)
name = module.params['name']
state = module.params['state']
site = module.params['site']
update_catalog = module.params['update_catalog']
rc = None
out = ''
err = ''
result = {}
result['name'] = name
result['state'] = state
if state == 'present':
if not package_installed(module, name):
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = package_install(module, state, name, site, update_catalog)
# Stdout is normally empty but for some packages can be
# very long and is not often useful
if len(out) > 75:
out = out[:75] + '...'
if rc != 0:
if err:
msg = err
else:
msg = out
module.fail_json(msg=msg)
elif state == 'latest':
if not package_installed(module, name):
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = package_install(module, state, name, site, update_catalog)
if len(out) > 75:
out = out[:75] + '...'
if rc != 0:
if err:
msg = err
else:
msg = out
module.fail_json(msg=msg)
else:
if not package_latest(module, name, site):
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = package_upgrade(module, name, site, update_catalog)
if len(out) > 75:
out = out[:75] + '...'
if rc != 0:
if err:
msg = err
else:
msg = out
module.fail_json(msg=msg)
elif state == 'absent':
if package_installed(module, name):
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = package_uninstall(module, name)
if len(out) > 75:
out = out[:75] + '...'
if rc != 0:
if err:
msg = err
else:
msg = out
module.fail_json(msg=msg)
if rc is None:
# pkgutil was not executed because the package was already present/absent
result['changed'] = False
elif rc == 0:
result['changed'] = True
else:
result['changed'] = False
result['failed'] = True
if out:
result['stdout'] = out
if err:
result['stderr'] = err
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()

View file

@ -0,0 +1,521 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, William L Thomson Jr
# (c) 2013, Yap Sok Ann
# Written by Yap Sok Ann <sokann@gmail.com>
# Modified by William L. Thomson Jr. <wlt@o-sinc.com>
# Based on apt module written by Matthew Williams <matthew@flowroute.com>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: portage
short_description: Package manager for Gentoo
description:
- Manages Gentoo packages
version_added: "1.6"
options:
package:
description:
- Package atom or set, e.g. C(sys-apps/foo) or C(>foo-2.13) or C(@world)
required: false
default: null
state:
description:
- State of the package atom
required: false
default: "present"
choices: [ "present", "installed", "emerged", "absent", "removed", "unmerged" ]
update:
description:
- Update packages to the best version available (--update)
required: false
default: null
choices: [ "yes" ]
deep:
description:
- Consider the entire dependency tree of packages (--deep)
required: false
default: null
choices: [ "yes" ]
newuse:
description:
- Include installed packages where USE flags have changed (--newuse)
required: false
default: null
choices: [ "yes" ]
changed_use:
description:
- Include installed packages where USE flags have changed, except when
- flags that the user has not enabled are added or removed
- (--changed-use)
required: false
default: null
choices: [ "yes" ]
version_added: 1.8
oneshot:
description:
- Do not add the packages to the world file (--oneshot)
required: false
default: False
choices: [ "yes", "no" ]
noreplace:
description:
- Do not re-emerge installed packages (--noreplace)
required: false
default: False
choices: [ "yes", "no" ]
nodeps:
description:
- Only merge packages but not their dependencies (--nodeps)
required: false
default: False
choices: [ "yes", "no" ]
onlydeps:
description:
- Only merge packages' dependencies but not the packages (--onlydeps)
required: false
default: False
choices: [ "yes", "no" ]
depclean:
description:
- Remove packages not needed by explicitly merged packages (--depclean)
- If no package is specified, clean up the world's dependencies
- Otherwise, --depclean serves as a dependency aware version of --unmerge
required: false
default: False
choices: [ "yes", "no" ]
quiet:
description:
- Run emerge in quiet mode (--quiet)
required: false
default: False
choices: [ "yes", "no" ]
verbose:
description:
- Run emerge in verbose mode (--verbose)
required: false
default: False
choices: [ "yes", "no" ]
sync:
description:
- Sync package repositories first
- If yes, perform "emerge --sync"
- If web, perform "emerge-webrsync"
required: false
default: null
choices: [ "yes", "web", "no" ]
getbinpkg:
description:
- Prefer packages specified at PORTAGE_BINHOST in make.conf
required: false
default: False
choices: [ "yes", "no" ]
usepkgonly:
description:
- Merge only binaries (no compiling). This sets getbinpkg=yes.
required: false
default: False
choices: [ "yes", "no" ]
keepgoing:
description:
- Continue as much as possible after an error.
required: false
default: False
choices: [ "yes", "no" ]
version_added: 2.3
jobs:
description:
- Specifies the number of packages to build simultaneously.
required: false
default: None
type: int
version_added: 2.3
loadavg:
description:
- Specifies that no new builds should be started if there are
- other builds running and the load average is at least LOAD
required: false
default: None
type: float
version_added: 2.3
requirements: [ gentoolkit ]
author:
- "William L Thomson Jr (@wltjr)"
- "Yap Sok Ann (@sayap)"
- "Andrew Udvare"
notes: []
'''
EXAMPLES = '''
# Make sure package foo is installed
- portage:
package: foo
state: present
# Make sure package foo is not installed
- portage:
package: foo
state: absent
# Update package foo to the "best" version
- portage:
package: foo
update: yes
# Install package foo using PORTAGE_BINHOST setup
- portage:
package: foo
getbinpkg: yes
# Re-install world from binary packages only and do not allow any compiling
- portage:
package: @world
usepkgonly: yes
# Sync repositories and update world
- portage:
package: @world
update: yes
deep: yes
sync: yes
# Remove unneeded packages
- portage:
depclean: yes
# Remove package foo if it is not explicitly needed
- portage:
package: foo
state: absent
depclean: yes
'''
import os
import pipes
import re
def query_package(module, package, action):
if package.startswith('@'):
return query_set(module, package, action)
return query_atom(module, package, action)
def query_atom(module, atom, action):
cmd = '%s list %s' % (module.equery_path, atom)
rc, out, err = module.run_command(cmd)
return rc == 0
def query_set(module, set, action):
system_sets = [
'@live-rebuild',
'@module-rebuild',
'@preserved-rebuild',
'@security',
'@selected',
'@system',
'@world',
'@x11-module-rebuild',
]
if set in system_sets:
if action == 'unmerge':
module.fail_json(msg='set %s cannot be removed' % set)
return False
world_sets_path = '/var/lib/portage/world_sets'
if not os.path.exists(world_sets_path):
return False
cmd = 'grep %s %s' % (set, world_sets_path)
rc, out, err = module.run_command(cmd)
return rc == 0
def sync_repositories(module, webrsync=False):
if module.check_mode:
module.exit_json(msg='check mode not supported by sync')
if webrsync:
webrsync_path = module.get_bin_path('emerge-webrsync', required=True)
cmd = '%s --quiet' % webrsync_path
else:
cmd = '%s --sync --quiet --ask=n' % module.emerge_path
rc, out, err = module.run_command(cmd)
if rc != 0:
module.fail_json(msg='could not sync package repositories')
# Note: In the 3 functions below, equery is done one-by-one, but emerge is done
# in one go. If that is not desirable, split the packages into multiple tasks
# instead of joining them together with comma.
def emerge_packages(module, packages):
p = module.params
if not (p['update'] or p['noreplace']):
for package in packages:
if not query_package(module, package, 'emerge'):
break
else:
module.exit_json(changed=False, msg='Packages already present.')
if module.check_mode:
module.exit_json(changed=True, msg='Packages would be installed.')
args = []
emerge_flags = {
'update': '--update',
'deep': '--deep',
'newuse': '--newuse',
'changed_use': '--changed-use',
'oneshot': '--oneshot',
'noreplace': '--noreplace',
'nodeps': '--nodeps',
'onlydeps': '--onlydeps',
'quiet': '--quiet',
'verbose': '--verbose',
'getbinpkg': '--getbinpkg',
'usepkgonly': '--usepkgonly',
'usepkg': '--usepkg',
'keepgoing': '--keep-going',
}
for flag, arg in emerge_flags.items():
if p[flag]:
args.append(arg)
if p['usepkg'] and p['usepkgonly']:
module.fail_json(msg='Use only one of usepkg, usepkgonly')
emerge_flags = {
'jobs': '--jobs=',
'loadavg': '--load-average ',
}
for flag, arg in emerge_flags.items():
if p[flag] is not None:
args.append(arg + str(p[flag]))
cmd, (rc, out, err) = run_emerge(module, packages, *args)
if rc != 0:
module.fail_json(
cmd=cmd, rc=rc, stdout=out, stderr=err,
msg='Packages not installed.',
)
# Check for SSH error with PORTAGE_BINHOST, since rc is still 0 despite
# this error
if (p['usepkgonly'] or p['getbinpkg']) \
and 'Permission denied (publickey).' in err:
module.fail_json(
cmd=cmd, rc=rc, stdout=out, stderr=err,
msg='Please check your PORTAGE_BINHOST configuration in make.conf '
'and your SSH authorized_keys file',
)
changed = True
for line in out.splitlines():
if re.match(r'(?:>+) Emerging (?:binary )?\(1 of', line):
msg = 'Packages installed.'
break
elif module.check_mode and re.match(r'\[(binary|ebuild)', line):
msg = 'Packages would be installed.'
break
else:
changed = False
msg = 'No packages installed.'
module.exit_json(
changed=changed, cmd=cmd, rc=rc, stdout=out, stderr=err,
msg=msg,
)
def unmerge_packages(module, packages):
p = module.params
for package in packages:
if query_package(module, package, 'unmerge'):
break
else:
module.exit_json(changed=False, msg='Packages already absent.')
args = ['--unmerge']
for flag in ['quiet', 'verbose']:
if p[flag]:
args.append('--%s' % flag)
cmd, (rc, out, err) = run_emerge(module, packages, *args)
if rc != 0:
module.fail_json(
cmd=cmd, rc=rc, stdout=out, stderr=err,
msg='Packages not removed.',
)
module.exit_json(
changed=True, cmd=cmd, rc=rc, stdout=out, stderr=err,
msg='Packages removed.',
)
def cleanup_packages(module, packages):
p = module.params
if packages:
for package in packages:
if query_package(module, package, 'unmerge'):
break
else:
module.exit_json(changed=False, msg='Packages already absent.')
args = ['--depclean']
for flag in ['quiet', 'verbose']:
if p[flag]:
args.append('--%s' % flag)
cmd, (rc, out, err) = run_emerge(module, packages, *args)
if rc != 0:
module.fail_json(cmd=cmd, rc=rc, stdout=out, stderr=err)
removed = 0
for line in out.splitlines():
if not line.startswith('Number removed:'):
continue
parts = line.split(':')
removed = int(parts[1].strip())
changed = removed > 0
module.exit_json(
changed=changed, cmd=cmd, rc=rc, stdout=out, stderr=err,
msg='Depclean completed.',
)
def run_emerge(module, packages, *args):
args = list(args)
args.append('--ask=n')
if module.check_mode:
args.append('--pretend')
cmd = [module.emerge_path] + args + packages
return cmd, module.run_command(cmd)
portage_present_states = ['present', 'emerged', 'installed']
portage_absent_states = ['absent', 'unmerged', 'removed']
def main():
module = AnsibleModule(
argument_spec=dict(
package=dict(default=None, aliases=['name'], type='list'),
state=dict(
default=portage_present_states[0],
choices=portage_present_states + portage_absent_states,
),
update=dict(default=False, type='bool'),
deep=dict(default=False, type='bool'),
newuse=dict(default=False, type='bool'),
changed_use=dict(default=False, type='bool'),
oneshot=dict(default=False, type='bool'),
noreplace=dict(default=False, type='bool'),
nodeps=dict(default=False, type='bool'),
onlydeps=dict(default=False, type='bool'),
depclean=dict(default=False, type='bool'),
quiet=dict(default=False, type='bool'),
verbose=dict(default=False, type='bool'),
sync=dict(default=None, choices=['yes', 'web']),
getbinpkg=dict(default=False, type='bool'),
usepkgonly=dict(default=False, type='bool'),
usepkg=dict(default=False, type='bool'),
keepgoing=dict(default=False, type='bool'),
jobs=dict(default=None, type='int'),
loadavg=dict(default=None, type='float'),
),
required_one_of=[['package', 'sync', 'depclean']],
mutually_exclusive=[['nodeps', 'onlydeps'], ['quiet', 'verbose']],
supports_check_mode=True,
)
module.emerge_path = module.get_bin_path('emerge', required=True)
module.equery_path = module.get_bin_path('equery', required=True)
p = module.params
if p['sync']:
sync_repositories(module, webrsync=(p['sync'] == 'web'))
if not p['package']:
module.exit_json(msg='Sync successfully finished.')
packages = []
if p['package']:
packages.extend(p['package'])
if p['depclean']:
if packages and p['state'] not in portage_absent_states:
module.fail_json(
msg='Depclean can only be used with package when the state is '
'one of: %s' % portage_absent_states,
)
cleanup_packages(module, packages)
elif p['state'] in portage_present_states:
emerge_packages(module, packages)
elif p['state'] in portage_absent_states:
unmerge_packages(module, packages)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()

View file

@ -0,0 +1,217 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, berenddeboer
# Written by berenddeboer <berend@pobox.com>
# Based on pkgng module written by bleader <bleader at ratonland.org>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: portinstall
short_description: Installing packages from FreeBSD's ports system
description:
- Manage packages for FreeBSD using 'portinstall'.
version_added: "1.3"
options:
name:
description:
- name of package to install/remove
required: true
state:
description:
- state of the package
choices: [ 'present', 'absent' ]
required: false
default: present
use_packages:
description:
- use packages instead of ports whenever available
choices: [ 'yes', 'no' ]
required: false
default: yes
author: "berenddeboer (@berenddeboer)"
'''
EXAMPLES = '''
# Install package foo
- portinstall:
name: foo
state: present
# Install package security/cyrus-sasl2-saslauthd
- portinstall:
name: security/cyrus-sasl2-saslauthd
state: present
# Remove packages foo and bar
- portinstall:
name: foo,bar
state: absent
'''
import shlex
import os
import sys
def query_package(module, name):
pkg_info_path = module.get_bin_path('pkg_info', False)
# Assume that if we have pkg_info, we haven't upgraded to pkgng
if pkg_info_path:
pkgng = False
pkg_glob_path = module.get_bin_path('pkg_glob', True)
rc, out, err = module.run_command("%s -e `pkg_glob %s`" % (pkg_info_path, pipes.quote(name)), use_unsafe_shell=True)
else:
pkgng = True
pkg_info_path = module.get_bin_path('pkg', True)
pkg_info_path = pkg_info_path + " info"
rc, out, err = module.run_command("%s %s" % (pkg_info_path, name))
found = rc == 0
if not found:
# databases/mysql55-client installs as mysql-client, so try solving
# that the ugly way. Pity FreeBSD doesn't have a fool proof way of checking
# some package is installed
name_without_digits = re.sub('[0-9]', '', name)
if name != name_without_digits:
if pkgng:
rc, out, err = module.run_command("%s %s" % (pkg_info_path, name_without_digits))
else:
rc, out, err = module.run_command("%s %s" % (pkg_info_path, name_without_digits))
found = rc == 0
return found
def matching_packages(module, name):
ports_glob_path = module.get_bin_path('ports_glob', True)
rc, out, err = module.run_command("%s %s" % (ports_glob_path, name))
#counts the numer of packages found
occurrences = out.count('\n')
if occurrences == 0:
name_without_digits = re.sub('[0-9]', '', name)
if name != name_without_digits:
rc, out, err = module.run_command("%s %s" % (ports_glob_path, name_without_digits))
occurrences = out.count('\n')
return occurrences
def remove_packages(module, packages):
remove_c = 0
pkg_glob_path = module.get_bin_path('pkg_glob', True)
# If pkg_delete not found, we assume pkgng
pkg_delete_path = module.get_bin_path('pkg_delete', False)
if not pkg_delete_path:
pkg_delete_path = module.get_bin_path('pkg', True)
pkg_delete_path = pkg_delete_path + " delete -y"
# Using a for loop incase of error, we can report the package that failed
for package in packages:
# Query the package first, to see if we even need to remove
if not query_package(module, package):
continue
rc, out, err = module.run_command("%s `%s %s`" % (pkg_delete_path, pkg_glob_path, pipes.quote(package)), use_unsafe_shell=True)
if query_package(module, package):
name_without_digits = re.sub('[0-9]', '', package)
rc, out, err = module.run_command("%s `%s %s`" % (pkg_delete_path, pkg_glob_path, pipes.quote(name_without_digits)),use_unsafe_shell=True)
if query_package(module, package):
module.fail_json(msg="failed to remove %s: %s" % (package, out))
remove_c += 1
if remove_c > 0:
module.exit_json(changed=True, msg="removed %s package(s)" % remove_c)
module.exit_json(changed=False, msg="package(s) already absent")
def install_packages(module, packages, use_packages):
install_c = 0
# If portinstall not found, automagically install
portinstall_path = module.get_bin_path('portinstall', False)
if not portinstall_path:
pkg_path = module.get_bin_path('pkg', False)
if pkg_path:
module.run_command("pkg install -y portupgrade")
portinstall_path = module.get_bin_path('portinstall', True)
if use_packages == "yes":
portinstall_params="--use-packages"
else:
portinstall_params=""
for package in packages:
if query_package(module, package):
continue
# TODO: check how many match
matches = matching_packages(module, package)
if matches == 1:
rc, out, err = module.run_command("%s --batch %s %s" % (portinstall_path, portinstall_params, package))
if not query_package(module, package):
module.fail_json(msg="failed to install %s: %s" % (package, out))
elif matches == 0:
module.fail_json(msg="no matches for package %s" % (package))
else:
module.fail_json(msg="%s matches found for package name %s" % (matches, package))
install_c += 1
if install_c > 0:
module.exit_json(changed=True, msg="present %s package(s)" % (install_c))
module.exit_json(changed=False, msg="package(s) already present")
def main():
module = AnsibleModule(
argument_spec = dict(
state = dict(default="present", choices=["present","absent"]),
name = dict(aliases=["pkg"], required=True),
use_packages = dict(type='bool', default='yes')))
p = module.params
pkgs = p["name"].split(",")
if p["state"] == "present":
install_packages(module, pkgs, p["use_packages"])
elif p["state"] == "absent":
remove_packages(module, pkgs)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()

View file

@ -0,0 +1,209 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Kim Nørgaard
# Written by Kim Nørgaard <jasen@jasen.dk>
# Based on pkgng module written by bleader <bleader@ratonland.org>
# that was based on pkgin module written by Shaun Zinck <shaun.zinck at gmail.com>
# that was based on pacman module written by Afterburn <http://github.com/afterburn>
# that was based on apt module written by Matthew Williams <matthew@flowroute.com>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: slackpkg
short_description: Package manager for Slackware >= 12.2
description:
- Manage binary packages for Slackware using 'slackpkg' which
is available in versions after 12.2.
version_added: "2.0"
options:
name:
description:
- name of package to install/remove
required: true
state:
description:
- state of the package, you can use "installed" as an alias for C(present) and removed as one for c(absent).
choices: [ 'present', 'absent', 'latest' ]
required: false
default: present
update_cache:
description:
- update the package database first
required: false
default: false
choices: [ true, false ]
author: Kim Nørgaard (@KimNorgaard)
requirements: [ "Slackware >= 12.2" ]
'''
EXAMPLES = '''
# Install package foo
- slackpkg:
name: foo
state: present
# Remove packages foo and bar
- slackpkg:
name: foo,bar
state: absent
# Make sure that it is the most updated package
- slackpkg:
name: foo
state: latest
'''
def query_package(module, slackpkg_path, name):
import glob
import platform
machine = platform.machine()
packages = glob.glob("/var/log/packages/%s-*-[%s|noarch]*" % (name,
machine))
if len(packages) > 0:
return True
return False
def remove_packages(module, slackpkg_path, packages):
remove_c = 0
# Using a for loop incase of error, we can report the package that failed
for package in packages:
# Query the package first, to see if we even need to remove
if not query_package(module, slackpkg_path, package):
continue
if not module.check_mode:
rc, out, err = module.run_command("%s -default_answer=y -batch=on \
remove %s" % (slackpkg_path,
package))
if not module.check_mode and query_package(module, slackpkg_path,
package):
module.fail_json(msg="failed to remove %s: %s" % (package, out))
remove_c += 1
if remove_c > 0:
module.exit_json(changed=True, msg="removed %s package(s)" % remove_c)
module.exit_json(changed=False, msg="package(s) already absent")
def install_packages(module, slackpkg_path, packages):
install_c = 0
for package in packages:
if query_package(module, slackpkg_path, package):
continue
if not module.check_mode:
rc, out, err = module.run_command("%s -default_answer=y -batch=on \
install %s" % (slackpkg_path,
package))
if not module.check_mode and not query_package(module, slackpkg_path,
package):
module.fail_json(msg="failed to install %s: %s" % (package, out),
stderr=err)
install_c += 1
if install_c > 0:
module.exit_json(changed=True, msg="present %s package(s)"
% (install_c))
module.exit_json(changed=False, msg="package(s) already present")
def upgrade_packages(module, slackpkg_path, packages):
install_c = 0
for package in packages:
if not module.check_mode:
rc, out, err = module.run_command("%s -default_answer=y -batch=on \
upgrade %s" % (slackpkg_path,
package))
if not module.check_mode and not query_package(module, slackpkg_path,
package):
module.fail_json(msg="failed to install %s: %s" % (package, out),
stderr=err)
install_c += 1
if install_c > 0:
module.exit_json(changed=True, msg="present %s package(s)"
% (install_c))
module.exit_json(changed=False, msg="package(s) already present")
def update_cache(module, slackpkg_path):
rc, out, err = module.run_command("%s -batch=on update" % (slackpkg_path))
if rc != 0:
module.fail_json(msg="Could not update package cache")
def main():
module = AnsibleModule(
argument_spec=dict(
state=dict(default="installed", choices=['installed', 'removed', 'absent', 'present', 'latest']),
name=dict(aliases=["pkg"], required=True, type='list'),
update_cache=dict(default=False, aliases=["update-cache"],
type='bool'),
),
supports_check_mode=True)
slackpkg_path = module.get_bin_path('slackpkg', True)
p = module.params
pkgs = p['name']
if p["update_cache"]:
update_cache(module, slackpkg_path)
if p['state'] == 'latest':
upgrade_packages(module, slackpkg_path, pkgs)
elif p['state'] in ['present', 'installed']:
install_packages(module, slackpkg_path, pkgs)
elif p["state"] in ['removed', 'absent']:
remove_packages(module, slackpkg_path, pkgs)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()

View file

@ -0,0 +1,267 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Boyd Adamson <boyd () boydadamson.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: svr4pkg
short_description: Manage Solaris SVR4 packages
description:
- Manages SVR4 packages on Solaris 10 and 11.
- These were the native packages on Solaris <= 10 and are available
as a legacy feature in Solaris 11.
- Note that this is a very basic packaging system. It will not enforce
dependencies on install or remove.
version_added: "0.9"
author: "Boyd Adamson (@brontitall)"
options:
name:
description:
- Package name, e.g. C(SUNWcsr)
required: true
state:
description:
- Whether to install (C(present)), or remove (C(absent)) a package.
- If the package is to be installed, then I(src) is required.
- The SVR4 package system doesn't provide an upgrade operation. You need to uninstall the old, then install the new package.
required: true
choices: ["present", "absent"]
src:
description:
- Specifies the location to install the package from. Required when C(state=present).
- "Can be any path acceptable to the C(pkgadd) command's C(-d) option. e.g.: C(somefile.pkg), C(/dir/with/pkgs), C(http:/server/mypkgs.pkg)."
- If using a file or directory, they must already be accessible by the host. See the M(copy) module for a way to get them there.
proxy:
description:
- HTTP[s] proxy to be used if C(src) is a URL.
response_file:
description:
- Specifies the location of a response file to be used if package expects input on install. (added in Ansible 1.4)
required: false
zone:
description:
- Whether to install the package only in the current zone, or install it into all zones.
- The installation into all zones works only if you are working with the global zone.
required: false
default: "all"
choices: ["current", "all"]
version_added: "1.6"
category:
description:
- Install/Remove category instead of a single package.
required: false
choices: ["true", "false"]
version_added: "1.6"
'''
EXAMPLES = '''
# Install a package from an already copied file
- svr4pkg:
name: CSWcommon
src: /tmp/cswpkgs.pkg
state: present
# Install a package directly from an http site
- svr4pkg:
name: CSWpkgutil
src: 'http://get.opencsw.org/now'
state: present
zone: current
# Install a package with a response file
- svr4pkg:
name: CSWggrep
src: /tmp/third-party.pkg
response_file: /tmp/ggrep.response
state: present
# Ensure that a package is not installed.
- svr4pkg:
name: SUNWgnome-sound-recorder
state: absent
# Ensure that a category is not installed.
- svr4pkg:
name: FIREFOX
state: absent
category: true
'''
import os
import tempfile
def package_installed(module, name, category):
cmd = [module.get_bin_path('pkginfo', True)]
cmd.append('-q')
if category:
cmd.append('-c')
cmd.append(name)
rc, out, err = module.run_command(' '.join(cmd))
if rc == 0:
return True
else:
return False
def create_admin_file():
(desc, filename) = tempfile.mkstemp(prefix='ansible_svr4pkg', text=True)
fullauto = '''
mail=
instance=unique
partial=nocheck
runlevel=quit
idepend=nocheck
rdepend=nocheck
space=quit
setuid=nocheck
conflict=nocheck
action=nocheck
networktimeout=60
networkretries=3
authentication=quit
keystore=/var/sadm/security
proxy=
basedir=default
'''
os.write(desc, fullauto)
os.close(desc)
return filename
def run_command(module, cmd):
progname = cmd[0]
cmd[0] = module.get_bin_path(progname, True)
return module.run_command(cmd)
def package_install(module, name, src, proxy, response_file, zone, category):
adminfile = create_admin_file()
cmd = [ 'pkgadd', '-n']
if zone == 'current':
cmd += [ '-G' ]
cmd += [ '-a', adminfile, '-d', src ]
if proxy is not None:
cmd += [ '-x', proxy ]
if response_file is not None:
cmd += [ '-r', response_file ]
if category:
cmd += [ '-Y' ]
cmd.append(name)
(rc, out, err) = run_command(module, cmd)
os.unlink(adminfile)
return (rc, out, err)
def package_uninstall(module, name, src, category):
adminfile = create_admin_file()
if category:
cmd = [ 'pkgrm', '-na', adminfile, '-Y', name ]
else:
cmd = [ 'pkgrm', '-na', adminfile, name]
(rc, out, err) = run_command(module, cmd)
os.unlink(adminfile)
return (rc, out, err)
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required = True),
state = dict(required = True, choices=['present', 'absent']),
src = dict(default = None),
proxy = dict(default = None),
response_file = dict(default = None),
zone = dict(required=False, default = 'all', choices=['current','all']),
category = dict(default=False, type='bool')
),
supports_check_mode=True
)
state = module.params['state']
name = module.params['name']
src = module.params['src']
proxy = module.params['proxy']
response_file = module.params['response_file']
zone = module.params['zone']
category = module.params['category']
rc = None
out = ''
err = ''
result = {}
result['name'] = name
result['state'] = state
if state == 'present':
if src is None:
module.fail_json(name=name,
msg="src is required when state=present")
if not package_installed(module, name, category):
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = package_install(module, name, src, proxy, response_file, zone, category)
# Stdout is normally empty but for some packages can be
# very long and is not often useful
if len(out) > 75:
out = out[:75] + '...'
elif state == 'absent':
if package_installed(module, name, category):
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = package_uninstall(module, name, src, category)
out = out[:75]
# Returncodes as per pkgadd(1m)
# 0 Successful completion
# 1 Fatal error.
# 2 Warning.
# 3 Interruption.
# 4 Administration.
# 5 Administration. Interaction is required. Do not use pkgadd -n.
# 10 Reboot after installation of all packages.
# 20 Reboot after installation of this package.
# 99 (observed) pkgadd: ERROR: could not process datastream from </tmp/pkgutil.pkg>
if rc in (0, 2, 3, 10, 20):
result['changed'] = True
# no install nor uninstall, or failed
else:
result['changed'] = False
# rc will be none when the package already was installed and no action took place
# Only return failed=False when the returncode is known to be good as there may be more
# undocumented failure return codes
if rc not in (None, 0, 2, 10, 20):
result['failed'] = True
else:
result['failed'] = False
if out:
result['stdout'] = out
if err:
result['stderr'] = err
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()

View file

@ -0,0 +1,210 @@
#!/usr/bin/python -tt
# -*- coding: utf-8 -*-
# (c) 2013, Raul Melo
# Written by Raul Melo <raulmelo@gmail.com>
# Based on yum module written by Seth Vidal <skvidal at fedoraproject.org>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
import re
import pipes
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: swdepot
short_description: Manage packages with swdepot package manager (HP-UX)
description:
- Will install, upgrade and remove packages with swdepot package manager (HP-UX)
version_added: "1.4"
notes: []
author: "Raul Melo (@melodous)"
options:
name:
description:
- package name.
required: true
default: null
choices: []
aliases: []
version_added: 1.4
state:
description:
- whether to install (C(present), C(latest)), or remove (C(absent)) a package.
required: true
default: null
choices: [ 'present', 'latest', 'absent']
aliases: []
version_added: 1.4
depot:
description:
- The source repository from which install or upgrade a package.
required: false
default: null
choices: []
aliases: []
version_added: 1.4
'''
EXAMPLES = '''
- swdepot:
name: unzip-6.0
state: installed
depot: 'repository:/path'
- swdepot:
name: unzip
state: latest
depot: 'repository:/path'
- swdepot:
name: unzip
state: absent
'''
def compare_package(version1, version2):
""" Compare version packages.
Return values:
-1 first minor
0 equal
1 fisrt greater """
def normalize(v):
return [int(x) for x in re.sub(r'(\.0+)*$', '', v).split(".")]
return cmp(normalize(version1), normalize(version2))
def query_package(module, name, depot=None):
""" Returns whether a package is installed or not and version. """
cmd_list = '/usr/sbin/swlist -a revision -l product'
if depot:
rc, stdout, stderr = module.run_command("%s -s %s %s | grep %s" % (cmd_list, pipes.quote(depot), pipes.quote(name), pipes.quote(name)), use_unsafe_shell=True)
else:
rc, stdout, stderr = module.run_command("%s %s | grep %s" % (cmd_list, pipes.quote(name), pipes.quote(name)), use_unsafe_shell=True)
if rc == 0:
version = re.sub("\s\s+|\t" , " ", stdout).strip().split()[1]
else:
version = None
return rc, version
def remove_package(module, name):
""" Uninstall package if installed. """
cmd_remove = '/usr/sbin/swremove'
rc, stdout, stderr = module.run_command("%s %s" % (cmd_remove, name))
if rc == 0:
return rc, stdout
else:
return rc, stderr
def install_package(module, depot, name):
""" Install package if not already installed """
cmd_install = '/usr/sbin/swinstall -x mount_all_filesystems=false'
rc, stdout, stderr = module.run_command("%s -s %s %s" % (cmd_install, depot, name))
if rc == 0:
return rc, stdout
else:
return rc, stderr
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(aliases=['pkg'], required=True),
state = dict(choices=['present', 'absent', 'latest'], required=True),
depot = dict(default=None, required=False)
),
supports_check_mode=True
)
name = module.params['name']
state = module.params['state']
depot = module.params['depot']
changed = False
msg = "No changed"
rc = 0
if ( state == 'present' or state == 'latest' ) and depot == None:
output = "depot parameter is mandatory in present or latest task"
module.fail_json(name=name, msg=output, rc=rc)
#Check local version
rc, version_installed = query_package(module, name)
if not rc:
installed = True
msg = "Already installed"
else:
installed = False
if ( state == 'present' or state == 'latest' ) and installed == False:
if module.check_mode:
module.exit_json(changed=True)
rc, output = install_package(module, depot, name)
if not rc:
changed = True
msg = "Package installed"
else:
module.fail_json(name=name, msg=output, rc=rc)
elif state == 'latest' and installed == True:
#Check depot version
rc, version_depot = query_package(module, name, depot)
if not rc:
if compare_package(version_installed,version_depot) == -1:
if module.check_mode:
module.exit_json(changed=True)
#Install new version
rc, output = install_package(module, depot, name)
if not rc:
msg = "Packge upgraded, Before " + version_installed + " Now " + version_depot
changed = True
else:
module.fail_json(name=name, msg=output, rc=rc)
else:
output = "Software package not in repository " + depot
module.fail_json(name=name, msg=output, rc=rc)
elif state == 'absent' and installed == True:
if module.check_mode:
module.exit_json(changed=True)
rc, output = remove_package(module, name)
if not rc:
changed = True
msg = "Package removed"
else:
module.fail_json(name=name, msg=output, rc=rc)
if module.check_mode:
module.exit_json(changed=False)
module.exit_json(changed=changed, name=name, state=state, msg=msg)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()

View file

@ -0,0 +1,217 @@
#!/usr/bin/python -tt
# -*- coding: utf-8 -*-
# (c) 2013, Philippe Makowski
# Written by Philippe Makowski <philippem@mageia.org>
# Based on apt module written by Matthew Williams <matthew@flowroute.com>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: urpmi
short_description: Urpmi manager
description:
- Manages packages with I(urpmi) (such as for Mageia or Mandriva)
version_added: "1.3.4"
options:
pkg:
description:
- name of package to install, upgrade or remove.
required: true
default: null
state:
description:
- Indicates the desired package state
required: false
default: present
choices: [ "absent", "present" ]
update_cache:
description:
- update the package database first C(urpmi.update -a).
required: false
default: no
choices: [ "yes", "no" ]
no-recommends:
description:
- Corresponds to the C(--no-recommends) option for I(urpmi).
required: false
default: yes
choices: [ "yes", "no" ]
force:
description:
- Assume "yes" is the answer to any question urpmi has to ask.
Corresponds to the C(--force) option for I(urpmi).
required: false
default: yes
choices: [ "yes", "no" ]
author: "Philippe Makowski (@pmakowski)"
notes: []
'''
EXAMPLES = '''
# install package foo
- urpmi:
pkg: foo
state: present
# remove package foo
- urpmi:
pkg: foo
state: absent
# description: remove packages foo and bar
- urpmi:
pkg: foo,bar
state: absent
# description: update the package database (urpmi.update -a -q) and install bar (bar will be the updated if a newer version exists)
- urpmi:
name: bar
state: present
update_cache: yes
'''
import shlex
import os
import sys
URPMI_PATH = '/usr/sbin/urpmi'
URPME_PATH = '/usr/sbin/urpme'
def query_package(module, name):
# rpm -q returns 0 if the package is installed,
# 1 if it is not installed
cmd = "rpm -q %s" % (name)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc == 0:
return True
else:
return False
def query_package_provides(module, name):
# rpm -q returns 0 if the package is installed,
# 1 if it is not installed
cmd = "rpm -q --provides %s" % (name)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
return rc == 0
def update_package_db(module):
cmd = "urpmi.update -a -q"
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0:
module.fail_json(msg="could not update package db")
def remove_packages(module, packages):
remove_c = 0
# Using a for loop incase of error, we can report the package that failed
for package in packages:
# Query the package first, to see if we even need to remove
if not query_package(module, package):
continue
cmd = "%s --auto %s" % (URPME_PATH, package)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0:
module.fail_json(msg="failed to remove %s" % (package))
remove_c += 1
if remove_c > 0:
module.exit_json(changed=True, msg="removed %s package(s)" % remove_c)
module.exit_json(changed=False, msg="package(s) already absent")
def install_packages(module, pkgspec, force=True, no_recommends=True):
packages = ""
for package in pkgspec:
if not query_package_provides(module, package):
packages += "'%s' " % package
if len(packages) != 0:
if no_recommends:
no_recommends_yes = '--no-recommends'
else:
no_recommends_yes = ''
if force:
force_yes = '--force'
else:
force_yes = ''
cmd = ("%s --auto %s --quiet %s %s" % (URPMI_PATH, force_yes, no_recommends_yes, packages))
rc, out, err = module.run_command(cmd)
installed = True
for packages in pkgspec:
if not query_package_provides(module, package):
installed = False
# urpmi always have 0 for exit code if --force is used
if rc or not installed:
module.fail_json(msg="'urpmi %s' failed: %s" % (packages, err))
else:
module.exit_json(changed=True, msg="%s present(s)" % packages)
else:
module.exit_json(changed=False)
def main():
module = AnsibleModule(
argument_spec = dict(
state = dict(default='installed', choices=['installed', 'removed', 'absent', 'present']),
update_cache = dict(default=False, aliases=['update-cache'], type='bool'),
force = dict(default=True, type='bool'),
no_recommends = dict(default=True, aliases=['no-recommends'], type='bool'),
package = dict(aliases=['pkg', 'name'], required=True)))
if not os.path.exists(URPMI_PATH):
module.fail_json(msg="cannot find urpmi, looking for %s" % (URPMI_PATH))
p = module.params
force_yes = p['force']
no_recommends_yes = p['no_recommends']
if p['update_cache']:
update_package_db(module)
packages = p['package'].split(',')
if p['state'] in [ 'installed', 'present' ]:
install_packages(module, packages, force_yes, no_recommends_yes)
elif p['state'] in [ 'removed', 'absent' ]:
remove_packages(module, packages)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()

View file

@ -0,0 +1,303 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2016 Dino Occhialini <dino.occhialini@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: xbps
short_description: Manage packages with XBPS
description:
- Manage packages with the XBPS package manager.
author:
- "Dino Occhialini (@dinoocch)"
- "Michael Aldridge (@the-maldridge)"
version_added: "2.3"
options:
name:
description:
- Name of the package to install, upgrade, or remove.
required: false
default: null
state:
description:
- Desired state of the package.
required: false
default: "present"
choices: ["present", "absent", "latest"]
recurse:
description:
- When removing a package, also remove its dependencies, provided
that they are not required by other packages and were not
explicitly installed by a user.
required: false
default: no
choices: ["yes", "no"]
update_cache:
description:
- Whether or not to refresh the master package lists. This can be
run as part of a package installation or as a separate step.
required: false
default: yes
choices: ["yes", "no"]
upgrade:
description:
- Whether or not to upgrade whole system
required: false
default: no
choices: ["yes", "no"]
'''
EXAMPLES = '''
# Install package foo
- xbps: name=foo state=present
# Upgrade package foo
- xbps: name=foo state=latest update_cache=yes
# Remove packages foo and bar
- xbps: name=foo,bar state=absent
# Recursively remove package foo
- xbps: name=foo state=absent recurse=yes
# Update package cache
- xbps: update_cache=yes
# Upgrade packages
- xbps: upgrade=yes
'''
RETURN = '''
msg:
description: Message about results
returned: success
type: string
sample: "System Upgraded"
packages:
description: Packages that are affected/would be affected
type: list
sample: ["ansible"]
'''
import os
from ansible.module_utils.basic import AnsibleModule
def is_installed(xbps_output):
"""Returns package install state"""
return bool(len(xbps_output))
def query_package(module, xbps_path, name, state="present"):
"""Returns Package info"""
if state == "present":
lcmd = "%s %s" % (xbps_path['query'], name)
lrc, lstdout, lstderr = module.run_command(lcmd, check_rc=False)
if not is_installed(lstdout):
# package is not installed locally
return False, False
rcmd = "%s -Sun" % (xbps_path['install'])
rrc, rstdout, rstderr = module.run_command(rcmd, check_rc=False)
if rrc == 0 or rrc == 17:
"""Return True to indicate that the package is installed locally,
and the result of the version number comparison to determine if the
package is up-to-date"""
return True, name not in rstdout
return False, False
def update_package_db(module, xbps_path):
"""Returns True if update_package_db changed"""
cmd = "%s -S" % (xbps_path['install'])
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0:
module.fail_json(msg="Could not update package db")
if "avg rate" in stdout:
return True
else:
return False
def upgrade(module, xbps_path):
"""Returns true is full upgrade succeeds"""
cmdupgrade = "%s -uy" % (xbps_path['install'])
cmdneedupgrade = "%s -un" % (xbps_path['install'])
rc, stdout, stderr = module.run_command(cmdneedupgrade, check_rc=False)
if rc == 0:
if(len(stdout.splitlines()) == 0):
module.exit_json(changed=False, msg='Nothing to upgrade')
else:
rc, stdout, stderr = module.run_command(cmdupgrade, check_rc=False)
if rc == 0:
module.exit_json(changed=True, msg='System upgraded')
else:
module.fail_json(msg="Could not upgrade")
else:
module.fail_json(msg="Could not upgrade")
def remove_packages(module, xbps_path, packages):
"""Returns true if package removal succeeds"""
changed_packages = []
# Using a for loop incase of error, we can report the package that failed
for package in packages:
# Query the package first, to see if we even need to remove
installed, updated = query_package(module, xbps_path, package)
if not installed:
continue
cmd = "%s -y %s" % (xbps_path['remove'], package)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0:
module.fail_json(msg="failed to remove %s" % (package))
changed_packages.append(package)
if len(changed_packages) > 0:
module.exit_json(changed=True, msg="removed %s package(s)" %
len(changed_packages), packages=changed_packages)
module.exit_json(changed=False, msg="package(s) already absent")
def install_packages(module, xbps_path, state, packages):
"""Returns true if package install succeeds."""
toInstall = []
for i, package in enumerate(packages):
"""If the package is installed and state == present or state == latest
and is up-to-date then skip"""
installed, updated = query_package(module, xbps_path, package)
if installed and (state == 'present' or
(state == 'latest' and updated)):
continue
toInstall.append(package)
if len(toInstall) == 0:
module.exit_json(changed=False, msg="Nothing to Install")
cmd = "%s -y %s" % (xbps_path['install'], " ".join(toInstall))
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0 and not (state == 'latest' and rc == 17):
module.fail_json(msg="failed to install %s" % (package))
module.exit_json(changed=True, msg="installed %s package(s)"
% (len(toInstall)),
packages=toInstall)
module.exit_json(changed=False, msg="package(s) already installed",
packages=[])
def check_packages(module, xbps_path, packages, state):
"""Returns change status of command"""
would_be_changed = []
for package in packages:
installed, updated = query_package(module, xbps_path, package)
if ((state in ["present", "latest"] and not installed) or
(state == "absent" and installed) or
(state == "latest" and not updated)):
would_be_changed.append(package)
if would_be_changed:
if state == "absent":
state = "removed"
module.exit_json(changed=True, msg="%s package(s) would be %s" % (
len(would_be_changed), state),
packages=would_be_changed)
else:
module.exit_json(changed=False, msg="package(s) already %s" % state,
packages=[])
def main():
"""Returns, calling appropriate command"""
module = AnsibleModule(
argument_spec=dict(
name=dict(default=None, aliases=['pkg', 'package'], type='list'),
state=dict(default='present', choices=['present', 'installed',
'latest', 'absent',
'removed']),
recurse=dict(default=False, type='bool'),
force=dict(default=False, type='bool'),
upgrade=dict(default=False, type='bool'),
update_cache=dict(default=True, aliases=['update-cache'],
type='bool')
),
required_one_of=[['name', 'update_cache', 'upgrade']],
supports_check_mode=True)
xbps_path = dict()
xbps_path['install'] = module.get_bin_path('xbps-install', True)
xbps_path['query'] = module.get_bin_path('xbps-query', True)
xbps_path['remove'] = module.get_bin_path('xbps-remove', True)
if not os.path.exists(xbps_path['install']):
module.fail_json(msg="cannot find xbps, in path %s"
% (xbps_path['install']))
p = module.params
# normalize the state parameter
if p['state'] in ['present', 'installed']:
p['state'] = 'present'
elif p['state'] in ['absent', 'removed']:
p['state'] = 'absent'
if p["update_cache"] and not module.check_mode:
changed = update_package_db(module, xbps_path)
if p['name'] is None and not p['upgrade']:
if changed:
module.exit_json(changed=True,
msg='Updated the package master lists')
else:
module.exit_json(changed=False,
msg='Package list already up to date')
if (p['update_cache'] and module.check_mode and not
(p['name'] or p['upgrade'])):
module.exit_json(changed=True,
msg='Would have updated the package cache')
if p['upgrade']:
upgrade(module, xbps_path)
if p['name']:
pkgs = p['name']
if module.check_mode:
check_packages(module, xbps_path, pkgs, p['state'])
if p['state'] in ['present', 'latest']:
install_packages(module, xbps_path, p['state'], pkgs)
elif p['state'] == 'absent':
remove_packages(module, xbps_path, pkgs)
if __name__ == "__main__":
main()

View file

@ -0,0 +1,761 @@
#!/usr/bin/python
# encoding: utf-8
# (c) 2015-2016, Jiri Tyr <jiri.tyr@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
from ansible.module_utils.six.moves import configparser
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'core',
'version': '1.0'}
DOCUMENTATION = '''
---
module: yum_repository
author: Jiri Tyr (@jtyr)
version_added: '2.1'
short_description: Add and remove YUM repositories
description:
- Add or remove YUM repositories in RPM-based Linux distributions.
options:
async:
required: false
choices: ['yes', 'no']
default: 'yes'
description:
- If set to C(yes) Yum will download packages and metadata from this
repo in parallel, if possible.
bandwidth:
required: false
default: 0
description:
- Maximum available network bandwidth in bytes/second. Used with the
I(throttle) option.
- If I(throttle) is a percentage and bandwidth is C(0) then bandwidth
throttling will be disabled. If I(throttle) is expressed as a data rate
(bytes/sec) then this option is ignored. Default is C(0) (no bandwidth
throttling).
baseurl:
required: false
default: null
description:
- URL to the directory where the yum repository's 'repodata' directory
lives.
- This or the I(mirrorlist) parameter is required if I(state) is set to
C(present).
cost:
required: false
default: 1000
description:
- Relative cost of accessing this repository. Useful for weighing one
repo's packages as greater/less than any other.
deltarpm_metadata_percentage:
required: false
default: 100
description:
- When the relative size of deltarpm metadata vs pkgs is larger than
this, deltarpm metadata is not downloaded from the repo. Note that you
can give values over C(100), so C(200) means that the metadata is
required to be half the size of the packages. Use C(0) to turn off
this check, and always download metadata.
deltarpm_percentage:
required: false
default: 75
description:
- When the relative size of delta vs pkg is larger than this, delta is
not used. Use C(0) to turn off delta rpm processing. Local repositories
(with file:// I(baseurl)) have delta rpms turned off by default.
description:
required: false
default: null
description:
- A human readable string describing the repository.
- This parameter is only required if I(state) is set to C(present).
enabled:
required: false
choices: ['yes', 'no']
default: 'yes'
description:
- This tells yum whether or not use this repository.
enablegroups:
required: false
choices: ['yes', 'no']
default: 'yes'
description:
- Determines whether yum will allow the use of package groups for this
repository.
exclude:
required: false
default: null
description:
- List of packages to exclude from updates or installs. This should be a
space separated list. Shell globs using wildcards (eg. C(*) and C(?))
are allowed.
- The list can also be a regular YAML array.
failovermethod:
required: false
choices: [roundrobin, priority]
default: roundrobin
description:
- C(roundrobin) randomly selects a URL out of the list of URLs to start
with and proceeds through each of them as it encounters a failure
contacting the host.
- C(priority) starts from the first I(baseurl) listed and reads through
them sequentially.
file:
required: false
default: null
description:
- File to use to save the repo in. Defaults to the value of I(name).
gpgcakey:
required: false
default: null
description:
- A URL pointing to the ASCII-armored CA key file for the repository.
gpgcheck:
required: false
choices: ['yes', 'no']
default: 'no'
description:
- Tells yum whether or not it should perform a GPG signature check on
packages.
gpgkey:
required: false
default: null
description:
- A URL pointing to the ASCII-armored GPG key file for the repository.
http_caching:
required: false
choices: [all, packages, none]
default: all
description:
- Determines how upstream HTTP caches are instructed to handle any HTTP
downloads that Yum does.
- C(all) means that all HTTP downloads should be cached.
- C(packages) means that only RPM package downloads should be cached (but
not repository metadata downloads).
- C(none) means that no HTTP downloads should be cached.
include:
required: false
default: null
description:
- Include external configuration file. Both, local path and URL is
supported. Configuration file will be inserted at the position of the
I(include=) line. Included files may contain further include lines.
Yum will abort with an error if an inclusion loop is detected.
includepkgs:
required: false
default: null
description:
- List of packages you want to only use from a repository. This should be
a space separated list. Shell globs using wildcards (eg. C(*) and C(?))
are allowed. Substitution variables (e.g. C($releasever)) are honored
here.
- The list can also be a regular YAML array.
ip_resolve:
required: false
choices: [4, 6, IPv4, IPv6, whatever]
default: whatever
description:
- Determines how yum resolves host names.
- C(4) or C(IPv4) - resolve to IPv4 addresses only.
- C(6) or C(IPv6) - resolve to IPv6 addresses only.
keepalive:
required: false
choices: ['yes', 'no']
default: 'no'
description:
- This tells yum whether or not HTTP/1.1 keepalive should be used with
this repository. This can improve transfer speeds by using one
connection when downloading multiple files from a repository.
keepcache:
required: false
choices: ['0', '1']
default: '1'
description:
- Either C(1) or C(0). Determines whether or not yum keeps the cache of
headers and packages after successful installation.
metadata_expire:
required: false
default: 21600
description:
- Time (in seconds) after which the metadata will expire.
- Default value is 6 hours.
metadata_expire_filter:
required: false
choices: [never, 'read-only:past', 'read-only:present', 'read-only:future']
default: 'read-only:present'
description:
- Filter the I(metadata_expire) time, allowing a trade of speed for
accuracy if a command doesn't require it. Each yum command can specify
that it requires a certain level of timeliness quality from the remote
repos. from "I'm about to install/upgrade, so this better be current"
to "Anything that's available is good enough".
- C(never) - Nothing is filtered, always obey I(metadata_expire).
- C(read-only:past) - Commands that only care about past information are
filtered from metadata expiring. Eg. I(yum history) info (if history
needs to lookup anything about a previous transaction, then by
definition the remote package was available in the past).
- C(read-only:present) - Commands that are balanced between past and
future. Eg. I(yum list yum).
- C(read-only:future) - Commands that are likely to result in running
other commands which will require the latest metadata. Eg.
I(yum check-update).
- Note that this option does not override "yum clean expire-cache".
metalink:
required: false
default: null
description:
- Specifies a URL to a metalink file for the repomd.xml, a list of
mirrors for the entire repository are generated by converting the
mirrors for the repomd.xml file to a I(baseurl).
mirrorlist:
required: false
default: null
description:
- Specifies a URL to a file containing a list of baseurls.
- This or the I(baseurl) parameter is required if I(state) is set to
C(present).
mirrorlist_expire:
required: false
default: 21600
description:
- Time (in seconds) after which the mirrorlist locally cached will
expire.
- Default value is 6 hours.
name:
required: true
description:
- Unique repository ID.
- This parameter is only required if I(state) is set to C(present) or
C(absent).
params:
required: false
default: null
description:
- Option used to allow the user to overwrite any of the other options.
To remove an option, set the value of the option to C(null).
password:
required: false
default: null
description:
- Password to use with the username for basic authentication.
priority:
required: false
default: 99
description:
- Enforce ordered protection of repositories. The value is an integer
from 1 to 99.
- This option only works if the YUM Priorities plugin is installed.
protect:
required: false
choices: ['yes', 'no']
default: 'no'
description:
- Protect packages from updates from other repositories.
proxy:
required: false
default: null
description:
- URL to the proxy server that yum should use. Set to C(_none_) to
disable the global proxy setting.
proxy_password:
required: false
default: null
description:
- Username to use for proxy.
proxy_username:
required: false
default: null
description:
- Password for this proxy.
repo_gpgcheck:
required: false
choices: ['yes', 'no']
default: 'no'
description:
- This tells yum whether or not it should perform a GPG signature check
on the repodata from this repository.
reposdir:
required: false
default: /etc/yum.repos.d
description:
- Directory where the C(.repo) files will be stored.
retries:
required: false
default: 10
description:
- Set the number of times any attempt to retrieve a file should retry
before returning an error. Setting this to C(0) makes yum try forever.
s3_enabled:
required: false
choices: ['yes', 'no']
default: 'no'
description:
- Enables support for S3 repositories.
- This option only works if the YUM S3 plugin is installed.
skip_if_unavailable:
required: false
choices: ['yes', 'no']
default: 'no'
description:
- If set to C(yes) yum will continue running if this repository cannot be
contacted for any reason. This should be set carefully as all repos are
consulted for any given command.
ssl_check_cert_permissions:
required: false
choices: ['yes', 'no']
default: 'no'
description:
- Whether yum should check the permissions on the paths for the
certificates on the repository (both remote and local).
- If we can't read any of the files then yum will force
I(skip_if_unavailable) to be C(yes). This is most useful for non-root
processes which use yum on repos that have client cert files which are
readable only by root.
sslcacert:
required: false
default: null
description:
- Path to the directory containing the databases of the certificate
authorities yum should use to verify SSL certificates.
sslclientcert:
required: false
default: null
description:
- Path to the SSL client certificate yum should use to connect to
repos/remote sites.
sslclientkey:
required: false
default: null
description:
- Path to the SSL client key yum should use to connect to repos/remote
sites.
sslverify:
required: false
choices: ['yes', 'no']
default: 'yes'
description:
- Defines whether yum should verify SSL certificates/hosts at all.
state:
required: false
choices: [absent, present]
default: present
description:
- State of the repo file.
throttle:
required: false
default: null
description:
- Enable bandwidth throttling for downloads.
- This option can be expressed as a absolute data rate in bytes/sec. An
SI prefix (k, M or G) may be appended to the bandwidth value.
timeout:
required: false
default: 30
description:
- Number of seconds to wait for a connection before timing out.
ui_repoid_vars:
required: false
default: releasever basearch
description:
- When a repository id is displayed, append these yum variables to the
string if they are used in the I(baseurl)/etc. Variables are appended
in the order listed (and found).
username:
required: false
default: null
description:
- Username to use for basic authentication to a repo or really any url.
extends_documentation_fragment:
- files
notes:
- All comments will be removed if modifying an existing repo file.
- Section order is preserved in an existing repo file.
- Parameters in a section are ordered alphabetically in an existing repo
file.
- The repo file will be automatically deleted if it contains no repository.
'''
EXAMPLES = '''
- name: Add repository
yum_repository:
name: epel
description: EPEL YUM repo
baseurl: https://download.fedoraproject.org/pub/epel/$releasever/$basearch/
- name: Add multiple repositories into the same file (1/2)
yum_repository:
name: epel
description: EPEL YUM repo
file: external_repos
baseurl: https://download.fedoraproject.org/pub/epel/$releasever/$basearch/
gpgcheck: no
- name: Add multiple repositories into the same file (2/2)
yum_repository:
name: rpmforge
description: RPMforge YUM repo
file: external_repos
baseurl: http://apt.sw.be/redhat/el7/en/$basearch/rpmforge
mirrorlist: http://mirrorlist.repoforge.org/el7/mirrors-rpmforge
enabled: no
- name: Remove repository
yum_repository:
name: epel
state: absent
- name: Remove repository from a specific repo file
yum_repository:
name: epel
file: external_repos
state: absent
#
# Allow to overwrite the yum_repository parameters by defining the parameters
# as a variable in the defaults or vars file:
#
# my_role_somerepo_params:
# # Disable GPG checking
# gpgcheck: no
# # Remove the gpgkey option
# gpgkey: null
#
- name: Add Some repo
yum_repository:
name: somerepo
description: Some YUM repo
baseurl: http://server.com/path/to/the/repo
gpgkey: http://server.com/keys/somerepo.pub
gpgcheck: yes
params: "{{ my_role_somerepo_params }}"
'''
RETURN = '''
repo:
description: repository name
returned: success
type: string
sample: "epel"
state:
description: state of the target, after execution
returned: success
type: string
sample: "present"
'''
class YumRepo(object):
# Class global variables
module = None
params = None
section = None
repofile = configparser.RawConfigParser()
# List of parameters which will be allowed in the repo file output
allowed_params = [
'async',
'bandwidth',
'baseurl',
'cost',
'deltarpm_metadata_percentage',
'deltarpm_percentage',
'enabled',
'enablegroups',
'exclude',
'failovermethod',
'gpgcakey',
'gpgcheck',
'gpgkey',
'http_caching',
'include',
'includepkgs',
'ip_resolve',
'keepalive',
'keepcache',
'metadata_expire',
'metadata_expire_filter',
'metalink',
'mirrorlist',
'mirrorlist_expire',
'name',
'password',
'priority',
'protect',
'proxy',
'proxy_password',
'proxy_username',
'repo_gpgcheck',
'retries',
's3_enabled',
'skip_if_unavailable',
'sslcacert',
'ssl_check_cert_permissions',
'sslclientcert',
'sslclientkey',
'sslverify',
'throttle',
'timeout',
'ui_repoid_vars',
'username']
# List of parameters which can be a list
list_params = ['exclude', 'includepkgs']
def __init__(self, module):
# To be able to use fail_json
self.module = module
# Shortcut for the params
self.params = self.module.params
# Section is always the repoid
self.section = self.params['repoid']
# Check if repo directory exists
repos_dir = self.params['reposdir']
if not os.path.isdir(repos_dir):
self.module.fail_json(
msg="Repo directory '%s' does not exist." % repos_dir)
# Set dest; also used to set dest parameter for the FS attributes
self.params['dest'] = os.path.join(
repos_dir, "%s.repo" % self.params['file'])
# Read the repo file if it exists
if os.path.isfile(self.params['dest']):
self.repofile.read(self.params['dest'])
def add(self):
# Remove already existing repo and create a new one
if self.repofile.has_section(self.section):
self.repofile.remove_section(self.section)
# Add section
self.repofile.add_section(self.section)
# Baseurl/mirrorlist is not required because for removal we need only
# the repo name. This is why we check if the baseurl/mirrorlist is
# defined.
if (self.params['baseurl'], self.params['mirrorlist']) == (None, None):
self.module.fail_json(
msg='Paramater "baseurl" or "mirrorlist" is required for '
'adding a new repo.')
# Set options
for key, value in sorted(self.params.items()):
if key in self.list_params and isinstance(value, list):
# Join items into one string for specific parameters
value = ' '.join(value)
elif isinstance(value, bool):
# Convert boolean value to integer
value = int(value)
# Set the value only if it was defined (default is None)
if value is not None and key in self.allowed_params:
self.repofile.set(self.section, key, value)
def save(self):
if len(self.repofile.sections()):
# Write data into the file
try:
fd = open(self.params['dest'], 'w')
except IOError:
e = get_exception()
self.module.fail_json(
msg="Cannot open repo file %s." % self.params['dest'],
details=str(e))
self.repofile.write(fd)
try:
fd.close()
except IOError:
e = get_exception()
self.module.fail_json(
msg="Cannot write repo file %s." % self.params['dest'],
details=str(e))
else:
# Remove the file if there are not repos
try:
os.remove(self.params['dest'])
except OSError:
e = get_exception()
self.module.fail_json(
msg=(
"Cannot remove empty repo file %s." %
self.params['dest']),
details=str(e))
def remove(self):
# Remove section if exists
if self.repofile.has_section(self.section):
self.repofile.remove_section(self.section)
def dump(self):
repo_string = ""
# Compose the repo file
for section in sorted(self.repofile.sections()):
repo_string += "[%s]\n" % section
for key, value in sorted(self.repofile.items(section)):
repo_string += "%s = %s\n" % (key, value)
repo_string += "\n"
return repo_string
def main():
# Module settings
module = AnsibleModule(
argument_spec=dict(
async=dict(type='bool'),
bandwidth=dict(),
baseurl=dict(),
cost=dict(),
deltarpm_metadata_percentage=dict(),
deltarpm_percentage=dict(),
description=dict(),
enabled=dict(type='bool'),
enablegroups=dict(type='bool'),
exclude=dict(),
failovermethod=dict(choices=['roundrobin', 'priority']),
file=dict(),
gpgcakey=dict(),
gpgcheck=dict(type='bool'),
gpgkey=dict(),
http_caching=dict(choices=['all', 'packages', 'none']),
include=dict(),
includepkgs=dict(),
ip_resolve=dict(choices=['4', '6', 'IPv4', 'IPv6', 'whatever']),
keepalive=dict(type='bool'),
keepcache=dict(choices=['0', '1']),
metadata_expire=dict(),
metadata_expire_filter=dict(
choices=[
'never',
'read-only:past',
'read-only:present',
'read-only:future']),
metalink=dict(),
mirrorlist=dict(),
mirrorlist_expire=dict(),
name=dict(required=True),
params=dict(type='dict'),
password=dict(no_log=True),
priority=dict(),
protect=dict(type='bool'),
proxy=dict(),
proxy_password=dict(no_log=True),
proxy_username=dict(),
repo_gpgcheck=dict(type='bool'),
reposdir=dict(default='/etc/yum.repos.d', type='path'),
retries=dict(),
s3_enabled=dict(type='bool'),
skip_if_unavailable=dict(type='bool'),
sslcacert=dict(),
ssl_check_cert_permissions=dict(type='bool'),
sslclientcert=dict(),
sslclientkey=dict(),
sslverify=dict(type='bool'),
state=dict(choices=['present', 'absent'], default='present'),
throttle=dict(),
timeout=dict(),
ui_repoid_vars=dict(),
username=dict(),
),
add_file_common_args=True,
supports_check_mode=True,
)
# Update module parameters by user's parameters if defined
if 'params' in module.params and isinstance(module.params['params'], dict):
module.params.update(module.params['params'])
# Remove the params
module.params.pop('params', None)
name = module.params['name']
state = module.params['state']
# Check if required parameters are present
if state == 'present':
if (
module.params['baseurl'] is None and
module.params['mirrorlist'] is None):
module.fail_json(
msg="Parameter 'baseurl' or 'mirrorlist' is required.")
if module.params['description'] is None:
module.fail_json(
msg="Parameter 'description' is required.")
# Rename "name" and "description" to ensure correct key sorting
module.params['repoid'] = module.params['name']
module.params['name'] = module.params['description']
del module.params['description']
# Define repo file name if it doesn't exist
if module.params['file'] is None:
module.params['file'] = module.params['repoid']
# Instantiate the YumRepo object
yumrepo = YumRepo(module)
# Get repo status before change
diff = {
'before_header': yumrepo.params['dest'],
'before': yumrepo.dump(),
'after_header': yumrepo.params['dest'],
'after': ''
}
# Perform action depending on the state
if state == 'present':
yumrepo.add()
elif state == 'absent':
yumrepo.remove()
# Get repo status after change
diff['after'] = yumrepo.dump()
# Compare repo states
changed = diff['before'] != diff['after']
# Save the file only if not in check mode and if there was a change
if not module.check_mode and changed:
yumrepo.save()
# Change file attributes if needed
if os.path.isfile(module.params['dest']):
file_args = module.load_file_common_arguments(module.params)
changed = module.set_fs_attributes_if_different(file_args, changed)
# Print status of the change
module.exit_json(changed=changed, repo=name, state=state, diff=diff)
if __name__ == '__main__':
main()

View file

@ -0,0 +1,477 @@
#!/usr/bin/python -tt
# -*- coding: utf-8 -*-
# (c) 2013, Patrick Callahan <pmc@patrickcallahan.com>
# based on
# openbsd_pkg
# (c) 2013
# Patrik Lundin <patrik.lundin.swe@gmail.com>
#
# yum
# (c) 2012, Red Hat, Inc
# Written by Seth Vidal <skvidal at fedoraproject.org>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from xml.dom.minidom import parseString as parseXML
import re
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'committer',
'version': '1.0'}
DOCUMENTATION = '''
---
module: zypper
author:
- "Patrick Callahan (@dirtyharrycallahan)"
- "Alexander Gubin (@alxgu)"
- "Thomas O'Donnell (@andytom)"
- "Robin Roth (@robinro)"
- "Andrii Radyk (@AnderEnder)"
version_added: "1.2"
short_description: Manage packages on SUSE and openSUSE
description:
- Manage packages on SUSE and openSUSE using the zypper and rpm tools.
options:
name:
description:
- Package name C(name) or package specifier.
- Can include a version like C(name=1.0), C(name>3.4) or C(name<=2.7). If a version is given, C(oldpackage) is implied and zypper is allowed to update the package within the version range given.
- You can also pass a url or a local path to a rpm file.
- When using state=latest, this can be '*', which updates all installed packages.
required: true
aliases: [ 'pkg' ]
state:
description:
- C(present) will make sure the package is installed.
C(latest) will make sure the latest version of the package is installed.
C(absent) will make sure the specified package is not installed.
required: false
choices: [ present, latest, absent ]
default: "present"
type:
description:
- The type of package to be operated on.
required: false
choices: [ package, patch, pattern, product, srcpackage, application ]
default: "package"
version_added: "2.0"
disable_gpg_check:
description:
- Whether to disable to GPG signature checking of the package
signature being installed. Has an effect only if state is
I(present) or I(latest).
required: false
default: "no"
choices: [ "yes", "no" ]
disable_recommends:
version_added: "1.8"
description:
- Corresponds to the C(--no-recommends) option for I(zypper). Default behavior (C(yes)) modifies zypper's default behavior; C(no) does install recommended packages.
required: false
default: "yes"
choices: [ "yes", "no" ]
force:
version_added: "2.2"
description:
- Adds C(--force) option to I(zypper). Allows to downgrade packages and change vendor or architecture.
required: false
default: "no"
choices: [ "yes", "no" ]
update_cache:
version_added: "2.2"
description:
- Run the equivalent of C(zypper refresh) before the operation.
required: false
default: "no"
choices: [ "yes", "no" ]
aliases: [ "refresh" ]
oldpackage:
version_added: "2.2"
description:
- Adds C(--oldpackage) option to I(zypper). Allows to downgrade packages with less side-effects than force. This is implied as soon as a version is specified as part of the package name.
required: false
default: "no"
choices: [ "yes", "no" ]
# informational: requirements for nodes
requirements:
- "zypper >= 1.0 # included in openSuSE >= 11.1 or SuSE Linux Enterprise Server/Desktop >= 11.0"
- python-xml
- rpm
'''
EXAMPLES = '''
# Install "nmap"
- zypper:
name: nmap
state: present
# Install apache2 with recommended packages
- zypper:
name: apache2
state: present
disable_recommends: no
# Apply a given patch
- zypper:
name: openSUSE-2016-128
state: present
type: patch
# Remove the "nmap" package
- zypper:
name: nmap
state: absent
# Install the nginx rpm from a remote repo
- zypper:
name: 'http://nginx.org/packages/sles/12/x86_64/RPMS/nginx-1.8.0-1.sles12.ngx.x86_64.rpm'
state: present
# Install local rpm file
- zypper:
name: /tmp/fancy-software.rpm
state: present
# Update all packages
- zypper:
name: *
state: latest
# Apply all available patches
- zypper:
name: *
state: latest
type: patch
# Refresh repositories and update package "openssl"
- zypper:
name: openssl
state: present
update_cache: yes
# Install specific version (possible comparisons: <, >, <=, >=, =)
- zypper:
name: 'docker>=1.10'
state: installed
'''
def split_name_version(name):
"""splits of the package name and desired version
example formats:
- docker>=1.10
- apache=2.4
Allowed version specifiers: <, >, <=, >=, =
Allowed version format: [0-9.-]*
Also allows a prefix indicating remove "-", "~" or install "+"
"""
prefix = ''
if name[0] in ['-', '~', '+']:
prefix = name[0]
name = name[1:]
version_check = re.compile('^(.*?)((?:<|>|<=|>=|=)[0-9.-]*)?$')
try:
reres = version_check.match(name)
name, version = reres.groups()
return prefix, name, version
except:
return prefix, name, None
def get_want_state(m, names, remove=False):
packages_install = {}
packages_remove = {}
urls = []
for name in names:
if '://' in name or name.endswith('.rpm'):
urls.append(name)
else:
prefix, pname, version = split_name_version(name)
if prefix in ['-', '~']:
packages_remove[pname] = version
elif prefix == '+':
packages_install[pname] = version
else:
if remove:
packages_remove[pname] = version
else:
packages_install[pname] = version
return packages_install, packages_remove, urls
def get_installed_state(m, packages):
"get installed state of packages"
cmd = get_cmd(m, 'search')
cmd.extend(['--match-exact', '--details', '--installed-only'])
cmd.extend(packages)
return parse_zypper_xml(m, cmd, fail_not_found=False)[0]
def parse_zypper_xml(m, cmd, fail_not_found=True, packages=None):
rc, stdout, stderr = m.run_command(cmd, check_rc=False)
dom = parseXML(stdout)
if rc == 104:
# exit code 104 is ZYPPER_EXIT_INF_CAP_NOT_FOUND (no packages found)
if fail_not_found:
errmsg = dom.getElementsByTagName('message')[-1].childNodes[0].data
m.fail_json(msg=errmsg, rc=rc, stdout=stdout, stderr=stderr, cmd=cmd)
else:
return {}, rc, stdout, stderr
elif rc in [0, 106, 103]:
# zypper exit codes
# 0: success
# 106: signature verification failed
# 103: zypper was upgraded, run same command again
if packages is None:
firstrun = True
packages = {}
solvable_list = dom.getElementsByTagName('solvable')
for solvable in solvable_list:
name = solvable.getAttribute('name')
packages[name] = {}
packages[name]['version'] = solvable.getAttribute('edition')
packages[name]['oldversion'] = solvable.getAttribute('edition-old')
status = solvable.getAttribute('status')
packages[name]['installed'] = status == "installed"
packages[name]['group'] = solvable.parentNode.nodeName
if rc == 103 and firstrun:
# if this was the first run and it failed with 103
# run zypper again with the same command to complete update
return parse_zypper_xml(m, cmd, fail_not_found=fail_not_found, packages=packages)
return packages, rc, stdout, stderr
m.fail_json(msg='Zypper run command failed with return code %s.'%rc, rc=rc, stdout=stdout, stderr=stderr, cmd=cmd)
def get_cmd(m, subcommand):
"puts together the basic zypper command arguments with those passed to the module"
is_install = subcommand in ['install', 'update', 'patch']
is_refresh = subcommand == 'refresh'
cmd = ['/usr/bin/zypper', '--quiet', '--non-interactive', '--xmlout']
# add global options before zypper command
if (is_install or is_refresh) and m.params['disable_gpg_check']:
cmd.append('--no-gpg-checks')
cmd.append(subcommand)
if subcommand != 'patch' and not is_refresh:
cmd.extend(['--type', m.params['type']])
if m.check_mode and subcommand != 'search':
cmd.append('--dry-run')
if is_install:
cmd.append('--auto-agree-with-licenses')
if m.params['disable_recommends']:
cmd.append('--no-recommends')
if m.params['force']:
cmd.append('--force')
if m.params['oldpackage']:
cmd.append('--oldpackage')
return cmd
def set_diff(m, retvals, result):
# TODO: if there is only one package, set before/after to version numbers
packages = {'installed': [], 'removed': [], 'upgraded': []}
if result:
for p in result:
group = result[p]['group']
if group == 'to-upgrade':
versions = ' (' + result[p]['oldversion'] + ' => ' + result[p]['version'] + ')'
packages['upgraded'].append(p + versions)
elif group == 'to-install':
packages['installed'].append(p)
elif group == 'to-remove':
packages['removed'].append(p)
output = ''
for state in packages:
if packages[state]:
output += state + ': ' + ', '.join(packages[state]) + '\n'
if 'diff' not in retvals:
retvals['diff'] = {}
if 'prepared' not in retvals['diff']:
retvals['diff']['prepared'] = output
else:
retvals['diff']['prepared'] += '\n' + output
def package_present(m, name, want_latest):
"install and update (if want_latest) the packages in name_install, while removing the packages in name_remove"
retvals = {'rc': 0, 'stdout': '', 'stderr': ''}
name_install, name_remove, urls = get_want_state(m, name)
# if a version string is given, pass it to zypper
install_version = [p+name_install[p] for p in name_install if name_install[p]]
remove_version = [p+name_remove[p] for p in name_remove if name_remove[p]]
# add oldpackage flag when a version is given to allow downgrades
if install_version or remove_version:
m.params['oldpackage'] = True
if not want_latest:
# for state=present: filter out already installed packages
install_and_remove = name_install.copy()
install_and_remove.update(name_remove)
prerun_state = get_installed_state(m, install_and_remove)
# generate lists of packages to install or remove
name_install = [p for p in name_install if p not in prerun_state]
name_remove = [p for p in name_remove if p in prerun_state]
if not any((name_install, name_remove, urls, install_version, remove_version)):
# nothing to install/remove and nothing to update
return None, retvals
# zypper install also updates packages
cmd = get_cmd(m, 'install')
cmd.append('--')
cmd.extend(urls)
# pass packages with version information
cmd.extend(install_version)
cmd.extend(['-%s' % p for p in remove_version])
# allow for + or - prefixes in install/remove lists
# do this in one zypper run to allow for dependency-resolution
# for example "-exim postfix" runs without removing packages depending on mailserver
cmd.extend(name_install)
cmd.extend(['-%s' % p for p in name_remove])
retvals['cmd'] = cmd
result, retvals['rc'], retvals['stdout'], retvals['stderr'] = parse_zypper_xml(m, cmd)
return result, retvals
def package_update_all(m):
"run update or patch on all available packages"
retvals = {'rc': 0, 'stdout': '', 'stderr': ''}
if m.params['type'] == 'patch':
cmdname = 'patch'
else:
cmdname = 'update'
cmd = get_cmd(m, cmdname)
retvals['cmd'] = cmd
result, retvals['rc'], retvals['stdout'], retvals['stderr'] = parse_zypper_xml(m, cmd)
return result, retvals
def package_absent(m, name):
"remove the packages in name"
retvals = {'rc': 0, 'stdout': '', 'stderr': ''}
# Get package state
name_install, name_remove, urls = get_want_state(m, name, remove=True)
if name_install:
m.fail_json(msg="Can not combine '+' prefix with state=remove/absent.")
if urls:
m.fail_json(msg="Can not remove via URL.")
if m.params['type'] == 'patch':
m.fail_json(msg="Can not remove patches.")
prerun_state = get_installed_state(m, name_remove)
remove_version = [p+name_remove[p] for p in name_remove if name_remove[p]]
name_remove = [p for p in name_remove if p in prerun_state]
if not name_remove and not remove_version:
return None, retvals
cmd = get_cmd(m, 'remove')
cmd.extend(name_remove)
cmd.extend(remove_version)
retvals['cmd'] = cmd
result, retvals['rc'], retvals['stdout'], retvals['stderr'] = parse_zypper_xml(m, cmd)
return result, retvals
def repo_refresh(m):
"update the repositories"
retvals = {'rc': 0, 'stdout': '', 'stderr': ''}
cmd = get_cmd(m, 'refresh')
retvals['cmd'] = cmd
result, retvals['rc'], retvals['stdout'], retvals['stderr'] = parse_zypper_xml(m, cmd)
return retvals
# ===========================================
# Main control flow
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True, aliases=['pkg'], type='list'),
state = dict(required=False, default='present', choices=['absent', 'installed', 'latest', 'present', 'removed']),
type = dict(required=False, default='package', choices=['package', 'patch', 'pattern', 'product', 'srcpackage', 'application']),
disable_gpg_check = dict(required=False, default='no', type='bool'),
disable_recommends = dict(required=False, default='yes', type='bool'),
force = dict(required=False, default='no', type='bool'),
update_cache = dict(required=False, aliases=['refresh'], default='no', type='bool'),
oldpackage = dict(required=False, default='no', type='bool'),
),
supports_check_mode = True
)
name = module.params['name']
state = module.params['state']
update_cache = module.params['update_cache']
# remove empty strings from package list
name = filter(None, name)
# Refresh repositories
if update_cache:
retvals = repo_refresh(module)
if retvals['rc'] != 0:
module.fail_json(msg="Zypper refresh run failed.", **retvals)
# Perform requested action
if name == ['*'] and state == 'latest':
packages_changed, retvals = package_update_all(module)
else:
if state in ['absent', 'removed']:
packages_changed, retvals = package_absent(module, name)
elif state in ['installed', 'present', 'latest']:
packages_changed, retvals = package_present(module, name, state == 'latest')
retvals['changed'] = retvals['rc'] == 0 and bool(packages_changed)
if module._diff:
set_diff(module, retvals, packages_changed)
if retvals['rc'] != 0:
module.fail_json(msg="Zypper run failed.", **retvals)
if not retvals['changed']:
del retvals['stdout']
del retvals['stderr']
module.exit_json(name=name, state=state, update_cache=update_cache, **retvals)
# import module snippets
from ansible.module_utils.basic import AnsibleModule
if __name__ == "__main__":
main()

View file

@ -0,0 +1,414 @@
#!/usr/bin/python
# encoding: utf-8
# (c) 2013, Matthias Vogelgesang <matthias.vogelgesang@gmail.com>
# (c) 2014, Justin Lecher <jlec@gentoo.org>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: zypper_repository
author: "Matthias Vogelgesang (@matze)"
version_added: "1.4"
short_description: Add and remove Zypper repositories
description:
- Add or remove Zypper repositories on SUSE and openSUSE
options:
name:
required: false
default: none
description:
- A name for the repository. Not required when adding repofiles.
repo:
required: false
default: none
description:
- URI of the repository or .repo file. Required when state=present.
state:
required: false
choices: [ "absent", "present" ]
default: "present"
description:
- A source string state.
description:
required: false
default: none
description:
- A description of the repository
disable_gpg_check:
description:
- Whether to disable GPG signature checking of
all packages. Has an effect only if state is
I(present).
- Needs zypper version >= 1.6.2.
required: false
default: "no"
choices: [ "yes", "no" ]
autorefresh:
description:
- Enable autorefresh of the repository.
required: false
default: "yes"
choices: [ "yes", "no" ]
aliases: [ "refresh" ]
priority:
description:
- Set priority of repository. Packages will always be installed
from the repository with the smallest priority number.
- Needs zypper version >= 1.12.25.
required: false
version_added: "2.1"
overwrite_multiple:
description:
- Overwrite multiple repository entries, if repositories with both name and
URL already exist.
required: false
default: "no"
choices: [ "yes", "no" ]
version_added: "2.1"
auto_import_keys:
description:
- Automatically import the gpg signing key of the new or changed repository.
- Has an effect only if state is I(present). Has no effect on existing (unchanged) repositories or in combination with I(absent).
- Implies runrefresh.
required: false
default: "no"
choices: ["yes", "no"]
version_added: "2.2"
runrefresh:
description:
- Refresh the package list of the given repository.
- Can be used with repo=* to refresh all repositories.
required: false
default: "no"
choices: ["yes", "no"]
version_added: "2.2"
enabled:
description:
- Set repository to enabled (or disabled).
required: false
default: "yes"
choices: ["yes", "no"]
version_added: "2.2"
requirements:
- "zypper >= 1.0 # included in openSuSE >= 11.1 or SuSE Linux Enterprise Server/Desktop >= 11.0"
- python-xml
'''
EXAMPLES = '''
# Add NVIDIA repository for graphics drivers
- zypper_repository:
name: nvidia-repo
repo: 'ftp://download.nvidia.com/opensuse/12.2'
state: present
# Remove NVIDIA repository
- zypper_repository:
name: nvidia-repo
repo: 'ftp://download.nvidia.com/opensuse/12.2'
state: absent
# Add python development repository
- zypper_repository:
repo: 'http://download.opensuse.org/repositories/devel:/languages:/python/SLE_11_SP3/devel:languages:python.repo'
# Refresh all repos
- zypper_repository:
repo: *
runrefresh: yes
# Add a repo and add it's gpg key
- zypper_repository:
repo: 'http://download.opensuse.org/repositories/systemsmanagement/openSUSE_Leap_42.1/'
auto_import_keys: yes
# Force refresh of a repository
- zypper_repository:
repo: 'http://my_internal_ci_repo/repo
name: my_ci_repo
state: present
runrefresh: yes
'''
REPO_OPTS = ['alias', 'name', 'priority', 'enabled', 'autorefresh', 'gpgcheck']
from distutils.version import LooseVersion
def _get_cmd(*args):
"""Combines the non-interactive zypper command with arguments/subcommands"""
cmd = ['/usr/bin/zypper', '--quiet', '--non-interactive']
cmd.extend(args)
return cmd
def _parse_repos(module):
"""parses the output of zypper --xmlout repos and return a parse repo dictionary"""
cmd = _get_cmd('--xmlout', 'repos')
from xml.dom.minidom import parseString as parseXML
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc == 0:
repos = []
dom = parseXML(stdout)
repo_list = dom.getElementsByTagName('repo')
for repo in repo_list:
opts = {}
for o in REPO_OPTS:
opts[o] = repo.getAttribute(o)
opts['url'] = repo.getElementsByTagName('url')[0].firstChild.data
# A repo can be uniquely identified by an alias + url
repos.append(opts)
return repos
# exit code 6 is ZYPPER_EXIT_NO_REPOS (no repositories defined)
elif rc == 6:
return []
else:
module.fail_json(msg='Failed to execute "%s"' % " ".join(cmd), rc=rc, stdout=stdout, stderr=stderr)
def _repo_changes(realrepo, repocmp):
"Check whether the 2 given repos have different settings."
for k in repocmp:
if repocmp[k] and k not in realrepo:
return True
for k, v in realrepo.items():
if k in repocmp and repocmp[k]:
valold = str(repocmp[k] or "")
valnew = v or ""
if k == "url":
valold, valnew = valold.rstrip("/"), valnew.rstrip("/")
if valold != valnew:
return True
return False
def repo_exists(module, repodata, overwrite_multiple):
"""Check whether the repository already exists.
returns (exists, mod, old_repos)
exists: whether a matching (name, URL) repo exists
mod: whether there are changes compared to the existing repo
old_repos: list of matching repos
"""
existing_repos = _parse_repos(module)
# look for repos that have matching alias or url to the one searched
repos = []
for kw in ['alias', 'url']:
name = repodata[kw]
for oldr in existing_repos:
if repodata[kw] == oldr[kw] and oldr not in repos:
repos.append(oldr)
if len(repos) == 0:
# Repo does not exist yet
return (False, False, None)
elif len(repos) == 1:
# Found an existing repo, look for changes
has_changes = _repo_changes(repos[0], repodata)
return (True, has_changes, repos)
elif len(repos) >= 2:
if overwrite_multiple:
# Found two repos and want to overwrite_multiple
return (True, True, repos)
else:
errmsg = 'More than one repo matched "%s": "%s".' % (name, repos)
errmsg += ' Use overwrite_multiple to allow more than one repo to be overwritten'
module.fail_json(msg=errmsg)
def addmodify_repo(module, repodata, old_repos, zypper_version, warnings):
"Adds the repo, removes old repos before, that would conflict."
repo = repodata['url']
cmd = _get_cmd('addrepo', '--check')
if repodata['name']:
cmd.extend(['--name', repodata['name']])
# priority on addrepo available since 1.12.25
# https://github.com/openSUSE/zypper/blob/b9b3cb6db76c47dc4c47e26f6a4d2d4a0d12b06d/package/zypper.changes#L327-L336
if repodata['priority']:
if zypper_version >= LooseVersion('1.12.25'):
cmd.extend(['--priority', str(repodata['priority'])])
else:
warnings.append("Setting priority only available for zypper >= 1.12.25. Ignoring priority argument.")
if repodata['enabled'] == '0':
cmd.append('--disable')
# gpgcheck available since 1.6.2
# https://github.com/openSUSE/zypper/blob/b9b3cb6db76c47dc4c47e26f6a4d2d4a0d12b06d/package/zypper.changes#L2446-L2449
# the default changed in the past, so don't assume a default here and show warning for old zypper versions
if zypper_version >= LooseVersion('1.6.2'):
if repodata['gpgcheck'] == '1':
cmd.append('--gpgcheck')
else:
cmd.append('--no-gpgcheck')
else:
warnings.append("Enabling/disabling gpgcheck only available for zypper >= 1.6.2. Using zypper default value.")
if repodata['autorefresh'] == '1':
cmd.append('--refresh')
cmd.append(repo)
if not repo.endswith('.repo'):
cmd.append(repodata['alias'])
if old_repos is not None:
for oldrepo in old_repos:
remove_repo(module, oldrepo['url'])
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
return rc, stdout, stderr
def remove_repo(module, repo):
"Removes the repo."
cmd = _get_cmd('removerepo', repo)
rc, stdout, stderr = module.run_command(cmd, check_rc=True)
return rc, stdout, stderr
def get_zypper_version(module):
rc, stdout, stderr = module.run_command(['/usr/bin/zypper', '--version'])
if rc != 0 or not stdout.startswith('zypper '):
return LooseVersion('1.0')
return LooseVersion(stdout.split()[1])
def runrefreshrepo(module, auto_import_keys=False, shortname=None):
"Forces zypper to refresh repo metadata."
if auto_import_keys:
cmd = _get_cmd('--gpg-auto-import-keys', 'refresh', '--force')
else:
cmd = _get_cmd('refresh', '--force')
if shortname is not None:
cmd.extend(['-r', shortname])
rc, stdout, stderr = module.run_command(cmd, check_rc=True)
return rc, stdout, stderr
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=False),
repo=dict(required=False),
state=dict(choices=['present', 'absent'], default='present'),
runrefresh=dict(required=False, default='no', type='bool'),
description=dict(required=False),
disable_gpg_check = dict(required=False, default=False, type='bool'),
autorefresh = dict(required=False, default=True, type='bool', aliases=['refresh']),
priority = dict(required=False, type='int'),
enabled = dict(required=False, default=True, type='bool'),
overwrite_multiple = dict(required=False, default=False, type='bool'),
auto_import_keys = dict(required=False, default=False, type='bool'),
),
supports_check_mode=False,
required_one_of = [['state','runrefresh']],
)
repo = module.params['repo']
alias = module.params['name']
state = module.params['state']
overwrite_multiple = module.params['overwrite_multiple']
auto_import_keys = module.params['auto_import_keys']
runrefresh = module.params['runrefresh']
zypper_version = get_zypper_version(module)
warnings = [] # collect warning messages for final output
repodata = {
'url': repo,
'alias': alias,
'name': module.params['description'],
'priority': module.params['priority'],
}
# rewrite bools in the language that zypper lr -x provides for easier comparison
if module.params['enabled']:
repodata['enabled'] = '1'
else:
repodata['enabled'] = '0'
if module.params['disable_gpg_check']:
repodata['gpgcheck'] = '0'
else:
repodata['gpgcheck'] = '1'
if module.params['autorefresh']:
repodata['autorefresh'] = '1'
else:
repodata['autorefresh'] = '0'
def exit_unchanged():
module.exit_json(changed=False, repodata=repodata, state=state)
# Check run-time module parameters
if repo == '*' or alias == '*':
if runrefresh:
runrefreshrepo(module, auto_import_keys)
module.exit_json(changed=False, runrefresh=True)
else:
module.fail_json(msg='repo=* can only be used with the runrefresh option.')
if state == 'present' and not repo:
module.fail_json(msg='Module option state=present requires repo')
if state == 'absent' and not repo and not alias:
module.fail_json(msg='Alias or repo parameter required when state=absent')
if repo and repo.endswith('.repo'):
if alias:
module.fail_json(msg='Incompatible option: \'name\'. Do not use name when adding .repo files')
else:
if not alias and state == "present":
module.fail_json(msg='Name required when adding non-repo files.')
exists, mod, old_repos = repo_exists(module, repodata, overwrite_multiple)
if repo:
shortname = repo
else:
shortname = alias
if state == 'present':
if exists and not mod:
if runrefresh:
runrefreshrepo(module, auto_import_keys, shortname)
exit_unchanged()
rc, stdout, stderr = addmodify_repo(module, repodata, old_repos, zypper_version, warnings)
if rc == 0 and (runrefresh or auto_import_keys):
runrefreshrepo(module, auto_import_keys, shortname)
elif state == 'absent':
if not exists:
exit_unchanged()
rc, stdout, stderr = remove_repo(module, shortname)
if rc == 0:
module.exit_json(changed=True, repodata=repodata, state=state, warnings=warnings)
else:
module.fail_json(msg="Zypper failed with rc %s" % rc, rc=rc, stdout=stdout, stderr=stderr, repodata=repodata, state=state, warnings=warnings)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()