Initial commit

This commit is contained in:
Ansible Core Team 2020-03-09 09:11:07 +00:00
commit aebc1b03fd
4861 changed files with 812621 additions and 0 deletions

View file

@ -0,0 +1,228 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Michael Warkentin <mwarkentin@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: bower
short_description: Manage bower packages with bower
description:
- Manage bower packages with bower
author: "Michael Warkentin (@mwarkentin)"
options:
name:
description:
- The name of a bower package to install
offline:
description:
- Install packages from local cache, if the packages were installed before
type: bool
default: 'no'
production:
description:
- Install with --production flag
type: bool
default: 'no'
path:
description:
- The base path where to install the bower packages
required: true
relative_execpath:
description:
- Relative path to bower executable from install path
state:
description:
- The state of the bower package
default: present
choices: [ "present", "absent", "latest" ]
version:
description:
- The version to be installed
'''
EXAMPLES = '''
- name: Install "bootstrap" bower package.
bower:
name: bootstrap
- name: Install "bootstrap" bower package on version 3.1.1.
bower:
name: bootstrap
version: '3.1.1'
- name: Remove the "bootstrap" bower package.
bower:
name: bootstrap
state: absent
- name: Install packages based on bower.json.
bower:
path: /app/location
- name: Update packages based on bower.json to their latest version.
bower:
path: /app/location
state: latest
# install bower locally and run from there
- npm:
path: /app/location
name: bower
global: no
- bower:
path: /app/location
relative_execpath: node_modules/.bin
'''
import json
import os
from ansible.module_utils.basic import AnsibleModule
class Bower(object):
def __init__(self, module, **kwargs):
self.module = module
self.name = kwargs['name']
self.offline = kwargs['offline']
self.production = kwargs['production']
self.path = kwargs['path']
self.relative_execpath = kwargs['relative_execpath']
self.version = kwargs['version']
if kwargs['version']:
self.name_version = self.name + '#' + self.version
else:
self.name_version = self.name
def _exec(self, args, run_in_check_mode=False, check_rc=True):
if not self.module.check_mode or (self.module.check_mode and run_in_check_mode):
cmd = []
if self.relative_execpath:
cmd.append(os.path.join(self.path, self.relative_execpath, "bower"))
if not os.path.isfile(cmd[-1]):
self.module.fail_json(msg="bower not found at relative path %s" % self.relative_execpath)
else:
cmd.append("bower")
cmd.extend(args)
cmd.extend(['--config.interactive=false', '--allow-root'])
if self.name:
cmd.append(self.name_version)
if self.offline:
cmd.append('--offline')
if self.production:
cmd.append('--production')
# If path is specified, cd into that path and run the command.
cwd = None
if self.path:
if not os.path.exists(self.path):
os.makedirs(self.path)
if not os.path.isdir(self.path):
self.module.fail_json(msg="path %s is not a directory" % self.path)
cwd = self.path
rc, out, err = self.module.run_command(cmd, check_rc=check_rc, cwd=cwd)
return out
return ''
def list(self):
cmd = ['list', '--json']
installed = list()
missing = list()
outdated = list()
data = json.loads(self._exec(cmd, True, False))
if 'dependencies' in data:
for dep in data['dependencies']:
dep_data = data['dependencies'][dep]
if dep_data.get('missing', False):
missing.append(dep)
elif ('version' in dep_data['pkgMeta'] and
'update' in dep_data and
dep_data['pkgMeta']['version'] != dep_data['update']['latest']):
outdated.append(dep)
elif dep_data.get('incompatible', False):
outdated.append(dep)
else:
installed.append(dep)
# Named dependency not installed
else:
missing.append(self.name)
return installed, missing, outdated
def install(self):
return self._exec(['install'])
def update(self):
return self._exec(['update'])
def uninstall(self):
return self._exec(['uninstall'])
def main():
arg_spec = dict(
name=dict(default=None),
offline=dict(default='no', type='bool'),
production=dict(default='no', type='bool'),
path=dict(required=True, type='path'),
relative_execpath=dict(default=None, required=False, type='path'),
state=dict(default='present', choices=['present', 'absent', 'latest', ]),
version=dict(default=None),
)
module = AnsibleModule(
argument_spec=arg_spec
)
name = module.params['name']
offline = module.params['offline']
production = module.params['production']
path = module.params['path']
relative_execpath = module.params['relative_execpath']
state = module.params['state']
version = module.params['version']
if state == 'absent' and not name:
module.fail_json(msg='uninstalling a package is only available for named packages')
bower = Bower(module, name=name, offline=offline, production=production, path=path, relative_execpath=relative_execpath, version=version)
changed = False
if state == 'present':
installed, missing, outdated = bower.list()
if missing:
changed = True
bower.install()
elif state == 'latest':
installed, missing, outdated = bower.list()
if missing or outdated:
changed = True
bower.update()
else: # Absent
installed, missing, outdated = bower.list()
if name in installed:
changed = True
bower.uninstall()
module.exit_json(changed=changed)
if __name__ == '__main__':
main()

View file

@ -0,0 +1,199 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Tim Hoiberg <tim.hoiberg@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: bundler
short_description: Manage Ruby Gem dependencies with Bundler
description:
- Manage installation and Gem version dependencies for Ruby using the Bundler gem
options:
executable:
description:
- The path to the bundler executable
state:
description:
- The desired state of the Gem bundle. C(latest) updates gems to the most recent, acceptable version
choices: [present, latest]
default: present
chdir:
description:
- The directory to execute the bundler commands from. This directory
needs to contain a valid Gemfile or .bundle/ directory
default: temporary working directory
exclude_groups:
description:
- A list of Gemfile groups to exclude during operations. This only
applies when state is C(present). Bundler considers this
a 'remembered' property for the Gemfile and will automatically exclude
groups in future operations even if C(exclude_groups) is not set
clean:
description:
- Only applies if state is C(present). If set removes any gems on the
target host that are not in the gemfile
type: bool
default: 'no'
gemfile:
description:
- Only applies if state is C(present). The path to the gemfile to use to install gems.
default: Gemfile in current directory
local:
description:
- If set only installs gems from the cache on the target host
type: bool
default: 'no'
deployment_mode:
description:
- Only applies if state is C(present). If set it will install gems in
./vendor/bundle instead of the default location. Requires a Gemfile.lock
file to have been created prior
type: bool
default: 'no'
user_install:
description:
- Only applies if state is C(present). Installs gems in the local user's cache or for all users
type: bool
default: 'yes'
gem_path:
description:
- Only applies if state is C(present). Specifies the directory to
install the gems into. If C(chdir) is set then this path is relative to
C(chdir)
default: RubyGems gem paths
binstub_directory:
description:
- Only applies if state is C(present). Specifies the directory to
install any gem bins files to. When executed the bin files will run
within the context of the Gemfile and fail if any required gem
dependencies are not installed. If C(chdir) is set then this path is
relative to C(chdir)
extra_args:
description:
- A space separated string of additional commands that can be applied to
the Bundler command. Refer to the Bundler documentation for more
information
author: "Tim Hoiberg (@thoiberg)"
'''
EXAMPLES = '''
# Installs gems from a Gemfile in the current directory
- bundler:
state: present
executable: ~/.rvm/gems/2.1.5/bin/bundle
# Excludes the production group from installing
- bundler:
state: present
exclude_groups: production
# Install gems into ./vendor/bundle
- bundler:
state: present
deployment_mode: yes
# Installs gems using a Gemfile in another directory
- bundler:
state: present
gemfile: ../rails_project/Gemfile
# Updates Gemfile in another directory
- bundler:
state: latest
chdir: ~/rails_project
'''
from ansible.module_utils.basic import AnsibleModule
def get_bundler_executable(module):
if module.params.get('executable'):
result = module.params.get('executable').split(' ')
else:
result = [module.get_bin_path('bundle', True)]
return result
def main():
module = AnsibleModule(
argument_spec=dict(
executable=dict(default=None, required=False),
state=dict(default='present', required=False, choices=['present', 'latest']),
chdir=dict(default=None, required=False, type='path'),
exclude_groups=dict(default=None, required=False, type='list'),
clean=dict(default=False, required=False, type='bool'),
gemfile=dict(default=None, required=False, type='path'),
local=dict(default=False, required=False, type='bool'),
deployment_mode=dict(default=False, required=False, type='bool'),
user_install=dict(default=True, required=False, type='bool'),
gem_path=dict(default=None, required=False, type='path'),
binstub_directory=dict(default=None, required=False, type='path'),
extra_args=dict(default=None, required=False),
),
supports_check_mode=True
)
state = module.params.get('state')
chdir = module.params.get('chdir')
exclude_groups = module.params.get('exclude_groups')
clean = module.params.get('clean')
gemfile = module.params.get('gemfile')
local = module.params.get('local')
deployment_mode = module.params.get('deployment_mode')
user_install = module.params.get('user_install')
gem_path = module.params.get('gem_path')
binstub_directory = module.params.get('binstub_directory')
extra_args = module.params.get('extra_args')
cmd = get_bundler_executable(module)
if module.check_mode:
cmd.append('check')
rc, out, err = module.run_command(cmd, cwd=chdir, check_rc=False)
module.exit_json(changed=rc != 0, state=state, stdout=out, stderr=err)
if state == 'present':
cmd.append('install')
if exclude_groups:
cmd.extend(['--without', ':'.join(exclude_groups)])
if clean:
cmd.append('--clean')
if gemfile:
cmd.extend(['--gemfile', gemfile])
if local:
cmd.append('--local')
if deployment_mode:
cmd.append('--deployment')
if not user_install:
cmd.append('--system')
if gem_path:
cmd.extend(['--path', gem_path])
if binstub_directory:
cmd.extend(['--binstubs', binstub_directory])
else:
cmd.append('update')
if local:
cmd.append('--local')
if extra_args:
cmd.extend(extra_args.split(' '))
rc, out, err = module.run_command(cmd, cwd=chdir, check_rc=True)
module.exit_json(changed='Installing' in out, state=state, stdout=out, stderr=err)
if __name__ == '__main__':
main()

View file

@ -0,0 +1,263 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Dimitrios Tydeas Mengidis <tydeas.dr@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: composer
author:
- "Dimitrios Tydeas Mengidis (@dmtrs)"
- "René Moser (@resmo)"
short_description: Dependency Manager for PHP
description:
- >
Composer is a tool for dependency management in PHP. It allows you to
declare the dependent libraries your project needs and it will install
them in your project for you.
options:
command:
description:
- Composer command like "install", "update" and so on.
default: install
arguments:
description:
- Composer arguments like required package, version and so on.
executable:
description:
- Path to PHP Executable on the remote host, if PHP is not in PATH.
aliases: [ php_path ]
working_dir:
description:
- Directory of your project (see --working-dir). This is required when
the command is not run globally.
- Will be ignored if C(global_command=true).
aliases: [ working-dir ]
global_command:
description:
- Runs the specified command globally.
type: bool
default: false
aliases: [ global-command ]
prefer_source:
description:
- Forces installation from package sources when possible (see --prefer-source).
default: false
type: bool
aliases: [ prefer-source ]
prefer_dist:
description:
- Forces installation from package dist even for dev versions (see --prefer-dist).
default: false
type: bool
aliases: [ prefer-dist ]
no_dev:
description:
- Disables installation of require-dev packages (see --no-dev).
default: true
type: bool
aliases: [ no-dev ]
no_scripts:
description:
- Skips the execution of all scripts defined in composer.json (see --no-scripts).
default: false
type: bool
aliases: [ no-scripts ]
no_plugins:
description:
- Disables all plugins ( see --no-plugins ).
default: false
type: bool
aliases: [ no-plugins ]
optimize_autoloader:
description:
- Optimize autoloader during autoloader dump (see --optimize-autoloader).
- Convert PSR-0/4 autoloading to classmap to get a faster autoloader.
- Recommended especially for production, but can take a bit of time to run.
default: true
type: bool
aliases: [ optimize-autoloader ]
classmap_authoritative:
description:
- Autoload classes from classmap only.
- Implicitely enable optimize_autoloader.
- Recommended especially for production, but can take a bit of time to run.
default: false
type: bool
aliases: [ classmap-authoritative ]
apcu_autoloader:
description:
- Uses APCu to cache found/not-found classes
default: false
type: bool
aliases: [ apcu-autoloader ]
ignore_platform_reqs:
description:
- Ignore php, hhvm, lib-* and ext-* requirements and force the installation even if the local machine does not fulfill these.
default: false
type: bool
aliases: [ ignore-platform-reqs ]
requirements:
- php
- composer installed in bin path (recommended /usr/local/bin)
notes:
- Default options that are always appended in each execution are --no-ansi, --no-interaction and --no-progress if available.
- We received reports about issues on macOS if composer was installed by Homebrew. Please use the official install method to avoid issues.
'''
EXAMPLES = '''
# Downloads and installs all the libs and dependencies outlined in the /path/to/project/composer.lock
- composer:
command: install
working_dir: /path/to/project
- composer:
command: require
arguments: my/package
working_dir: /path/to/project
# Clone project and install with all dependencies
- composer:
command: create-project
arguments: package/package /path/to/project ~1.0
working_dir: /path/to/project
prefer_dist: yes
# Installs package globally
- composer:
command: require
global_command: yes
arguments: my/package
'''
import re
from ansible.module_utils.basic import AnsibleModule
def parse_out(string):
return re.sub(r"\s+", " ", string).strip()
def has_changed(string):
return "Nothing to install or update" not in string
def get_available_options(module, command='install'):
# get all available options from a composer command using composer help to json
rc, out, err = composer_command(module, "help %s --format=json" % command)
if rc != 0:
output = parse_out(err)
module.fail_json(msg=output)
command_help_json = module.from_json(out)
return command_help_json['definition']['options']
def composer_command(module, command, arguments="", options=None, global_command=False):
if options is None:
options = []
if module.params['executable'] is None:
php_path = module.get_bin_path("php", True, ["/usr/local/bin"])
else:
php_path = module.params['executable']
composer_path = module.get_bin_path("composer", True, ["/usr/local/bin"])
cmd = "%s %s %s %s %s %s" % (php_path, composer_path, "global" if global_command else "", command, " ".join(options), arguments)
return module.run_command(cmd)
def main():
module = AnsibleModule(
argument_spec=dict(
command=dict(default="install", type="str"),
arguments=dict(default="", type="str"),
executable=dict(type="path", aliases=["php_path"]),
working_dir=dict(type="path", aliases=["working-dir"]),
global_command=dict(default=False, type="bool", aliases=["global-command"]),
prefer_source=dict(default=False, type="bool", aliases=["prefer-source"]),
prefer_dist=dict(default=False, type="bool", aliases=["prefer-dist"]),
no_dev=dict(default=True, type="bool", aliases=["no-dev"]),
no_scripts=dict(default=False, type="bool", aliases=["no-scripts"]),
no_plugins=dict(default=False, type="bool", aliases=["no-plugins"]),
apcu_autoloader=dict(default=False, type="bool", aliases=["apcu-autoloader"]),
optimize_autoloader=dict(default=True, type="bool", aliases=["optimize-autoloader"]),
classmap_authoritative=dict(default=False, type="bool", aliases=["classmap-authoritative"]),
ignore_platform_reqs=dict(default=False, type="bool", aliases=["ignore-platform-reqs"]),
),
required_if=[('global_command', False, ['working_dir'])],
supports_check_mode=True
)
# Get composer command with fallback to default
command = module.params['command']
if re.search(r"\s", command):
module.fail_json(msg="Use the 'arguments' param for passing arguments with the 'command'")
arguments = module.params['arguments']
global_command = module.params['global_command']
available_options = get_available_options(module=module, command=command)
options = []
# Default options
default_options = [
'no-ansi',
'no-interaction',
'no-progress',
]
for option in default_options:
if option in available_options:
option = "--%s" % option
options.append(option)
if not global_command:
options.extend(['--working-dir', "'%s'" % module.params['working_dir']])
option_params = {
'prefer_source': 'prefer-source',
'prefer_dist': 'prefer-dist',
'no_dev': 'no-dev',
'no_scripts': 'no-scripts',
'no_plugins': 'no-plugins',
'apcu_autoloader': 'acpu-autoloader',
'optimize_autoloader': 'optimize-autoloader',
'classmap_authoritative': 'classmap-authoritative',
'ignore_platform_reqs': 'ignore-platform-reqs',
}
for param, option in option_params.items():
if module.params.get(param) and option in available_options:
option = "--%s" % option
options.append(option)
if module.check_mode:
if 'dry-run' in available_options:
options.append('--dry-run')
else:
module.exit_json(skipped=True, msg="command '%s' does not support check mode, skipping" % command)
rc, out, err = composer_command(module, command, arguments, options, global_command)
if rc != 0:
output = parse_out(err)
module.fail_json(msg=output, stdout=err)
else:
# Composer version > 1.0.0-alpha9 now use stderr for standard notification messages
output = parse_out(out + err)
module.exit_json(changed=has_changed(output), msg=output, stdout=out + err)
if __name__ == '__main__':
main()

View file

@ -0,0 +1,218 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Franck Cuny <franck@lumberjaph.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cpanm
short_description: Manages Perl library dependencies.
description:
- Manage Perl library dependencies.
options:
name:
description:
- The name of the Perl library to install. You may use the "full distribution path", e.g. MIYAGAWA/Plack-0.99_05.tar.gz
aliases: ["pkg"]
from_path:
description:
- The local directory from where to install
notest:
description:
- Do not run unit tests
type: bool
default: no
locallib:
description:
- Specify the install base to install modules
type: path
mirror:
description:
- Specifies the base URL for the CPAN mirror to use
type: str
mirror_only:
description:
- Use the mirror's index file instead of the CPAN Meta DB
type: bool
default: no
installdeps:
description:
- Only install dependencies
type: bool
default: no
version:
description:
- minimum version of perl module to consider acceptable
type: str
system_lib:
description:
- Use this if you want to install modules to the system perl include path. You must be root or have "passwordless" sudo for this to work.
- This uses the cpanm commandline option '--sudo', which has nothing to do with ansible privilege escalation.
type: bool
default: no
aliases: ['use_sudo']
executable:
description:
- Override the path to the cpanm executable
type: path
notes:
- Please note that U(http://search.cpan.org/dist/App-cpanminus/bin/cpanm, cpanm) must be installed on the remote host.
author: "Franck Cuny (@fcuny)"
'''
EXAMPLES = '''
# install Dancer perl package
- cpanm:
name: Dancer
# install version 0.99_05 of the Plack perl package
- cpanm:
name: MIYAGAWA/Plack-0.99_05.tar.gz
# install Dancer into the specified locallib
- cpanm:
name: Dancer
locallib: /srv/webapps/my_app/extlib
# install perl dependencies from local directory
- cpanm:
from_path: /srv/webapps/my_app/src/
# install Dancer perl package without running the unit tests in indicated locallib
- cpanm:
name: Dancer
notest: True
locallib: /srv/webapps/my_app/extlib
# install Dancer perl package from a specific mirror
- cpanm:
name: Dancer
mirror: 'http://cpan.cpantesters.org/'
# install Dancer perl package into the system root path
- cpanm:
name: Dancer
system_lib: yes
# install Dancer if it's not already installed
# OR the installed version is older than version 1.0
- cpanm:
name: Dancer
version: '1.0'
'''
import os
from ansible.module_utils.basic import AnsibleModule
def _is_package_installed(module, name, locallib, cpanm, version):
cmd = ""
if locallib:
os.environ["PERL5LIB"] = "%s/lib/perl5" % locallib
cmd = "%s perl -e ' use %s" % (cmd, name)
if version:
cmd = "%s %s;'" % (cmd, version)
else:
cmd = "%s;'" % cmd
res, stdout, stderr = module.run_command(cmd, check_rc=False)
return res == 0
def _build_cmd_line(name, from_path, notest, locallib, mirror, mirror_only, installdeps, cpanm, use_sudo):
# this code should use "%s" like everything else and just return early but not fixing all of it now.
# don't copy stuff like this
if from_path:
cmd = cpanm + " " + from_path
else:
cmd = cpanm + " " + name
if notest is True:
cmd = cmd + " -n"
if locallib is not None:
cmd = cmd + " -l " + locallib
if mirror is not None:
cmd = cmd + " --mirror " + mirror
if mirror_only is True:
cmd = cmd + " --mirror-only"
if installdeps is True:
cmd = cmd + " --installdeps"
if use_sudo is True:
cmd = cmd + " --sudo"
return cmd
def _get_cpanm_path(module):
if module.params['executable']:
result = module.params['executable']
else:
result = module.get_bin_path('cpanm', True)
return result
def main():
arg_spec = dict(
name=dict(default=None, required=False, aliases=['pkg']),
from_path=dict(default=None, required=False, type='path'),
notest=dict(default=False, type='bool'),
locallib=dict(default=None, required=False, type='path'),
mirror=dict(default=None, required=False),
mirror_only=dict(default=False, type='bool'),
installdeps=dict(default=False, type='bool'),
system_lib=dict(default=False, type='bool', aliases=['use_sudo']),
version=dict(default=None, required=False),
executable=dict(required=False, type='path'),
)
module = AnsibleModule(
argument_spec=arg_spec,
required_one_of=[['name', 'from_path']],
)
cpanm = _get_cpanm_path(module)
name = module.params['name']
from_path = module.params['from_path']
notest = module.boolean(module.params.get('notest', False))
locallib = module.params['locallib']
mirror = module.params['mirror']
mirror_only = module.params['mirror_only']
installdeps = module.params['installdeps']
use_sudo = module.params['system_lib']
version = module.params['version']
changed = False
installed = _is_package_installed(module, name, locallib, cpanm, version)
if not installed:
cmd = _build_cmd_line(name, from_path, notest, locallib, mirror, mirror_only, installdeps, cpanm, use_sudo)
rc_cpanm, out_cpanm, err_cpanm = module.run_command(cmd, check_rc=False)
if rc_cpanm != 0:
module.fail_json(msg=err_cpanm, cmd=cmd)
if (err_cpanm.find('is up to date') == -1 and out_cpanm.find('is up to date') == -1):
changed = True
module.exit_json(changed=changed, binary=cpanm, name=name)
if __name__ == '__main__':
main()

View file

@ -0,0 +1,197 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Matt Wright <matt@nobien.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: easy_install
short_description: Installs Python libraries
description:
- Installs Python libraries, optionally in a I(virtualenv)
options:
name:
description:
- A Python library name
required: true
virtualenv:
description:
- an optional I(virtualenv) directory path to install into. If the
I(virtualenv) does not exist, it is created automatically
virtualenv_site_packages:
description:
- Whether the virtual environment will inherit packages from the
global site-packages directory. Note that if this setting is
changed on an already existing virtual environment it will not
have any effect, the environment must be deleted and newly
created.
type: bool
default: 'no'
virtualenv_command:
description:
- The command to create the virtual environment with. For example
C(pyvenv), C(virtualenv), C(virtualenv2).
default: virtualenv
executable:
description:
- The explicit executable or a pathname to the executable to be used to
run easy_install for a specific version of Python installed in the
system. For example C(easy_install-3.3), if there are both Python 2.7
and 3.3 installations in the system and you want to run easy_install
for the Python 3.3 installation.
state:
description:
- The desired state of the library. C(latest) ensures that the latest version is installed.
choices: [present, latest]
default: present
notes:
- Please note that the C(easy_install) module can only install Python
libraries. Thus this module is not able to remove libraries. It is
generally recommended to use the M(pip) module which you can first install
using M(easy_install).
- Also note that I(virtualenv) must be installed on the remote host if the
C(virtualenv) parameter is specified.
requirements: [ "virtualenv" ]
author: "Matt Wright (@mattupstate)"
'''
EXAMPLES = '''
# Examples from Ansible Playbooks
- easy_install:
name: pip
state: latest
# Install Bottle into the specified virtualenv.
- easy_install:
name: bottle
virtualenv: /webapps/myapp/venv
'''
import os
import os.path
import tempfile
from ansible.module_utils.basic import AnsibleModule
def install_package(module, name, easy_install, executable_arguments):
cmd = '%s %s %s' % (easy_install, ' '.join(executable_arguments), name)
rc, out, err = module.run_command(cmd)
return rc, out, err
def _is_package_installed(module, name, easy_install, executable_arguments):
# Copy and add to the arguments
executable_arguments = executable_arguments[:]
executable_arguments.append('--dry-run')
rc, out, err = install_package(module, name, easy_install, executable_arguments)
if rc:
module.fail_json(msg=err)
return 'Downloading' not in out
def _get_easy_install(module, env=None, executable=None):
candidate_easy_inst_basenames = ['easy_install']
easy_install = None
if executable is not None:
if os.path.isabs(executable):
easy_install = executable
else:
candidate_easy_inst_basenames.insert(0, executable)
if easy_install is None:
if env is None:
opt_dirs = []
else:
# Try easy_install with the virtualenv directory first.
opt_dirs = ['%s/bin' % env]
for basename in candidate_easy_inst_basenames:
easy_install = module.get_bin_path(basename, False, opt_dirs)
if easy_install is not None:
break
# easy_install should have been found by now. The final call to
# get_bin_path will trigger fail_json.
if easy_install is None:
basename = candidate_easy_inst_basenames[0]
easy_install = module.get_bin_path(basename, True, opt_dirs)
return easy_install
def main():
arg_spec = dict(
name=dict(required=True),
state=dict(required=False,
default='present',
choices=['present', 'latest'],
type='str'),
virtualenv=dict(default=None, required=False),
virtualenv_site_packages=dict(default='no', type='bool'),
virtualenv_command=dict(default='virtualenv', required=False),
executable=dict(default='easy_install', required=False),
)
module = AnsibleModule(argument_spec=arg_spec, supports_check_mode=True)
name = module.params['name']
env = module.params['virtualenv']
executable = module.params['executable']
site_packages = module.params['virtualenv_site_packages']
virtualenv_command = module.params['virtualenv_command']
executable_arguments = []
if module.params['state'] == 'latest':
executable_arguments.append('--upgrade')
rc = 0
err = ''
out = ''
if env:
virtualenv = module.get_bin_path(virtualenv_command, True)
if not os.path.exists(os.path.join(env, 'bin', 'activate')):
if module.check_mode:
module.exit_json(changed=True)
command = '%s %s' % (virtualenv, env)
if site_packages:
command += ' --system-site-packages'
cwd = tempfile.gettempdir()
rc_venv, out_venv, err_venv = module.run_command(command, cwd=cwd)
rc += rc_venv
out += out_venv
err += err_venv
easy_install = _get_easy_install(module, env, executable)
cmd = None
changed = False
installed = _is_package_installed(module, name, easy_install, executable_arguments)
if not installed:
if module.check_mode:
module.exit_json(changed=True)
rc_easy_inst, out_easy_inst, err_easy_inst = install_package(module, name, easy_install, executable_arguments)
rc += rc_easy_inst
out += out_easy_inst
err += err_easy_inst
changed = True
if rc != 0:
module.fail_json(msg=err, cmd=cmd)
module.exit_json(changed=changed, binary=easy_install,
name=name, virtualenv=env)
if __name__ == '__main__':
main()

View file

@ -0,0 +1,308 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Johan Wiren <johan.wiren.se@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gem
short_description: Manage Ruby gems
description:
- Manage installation and uninstallation of Ruby gems.
options:
name:
description:
- The name of the gem to be managed.
required: true
state:
description:
- The desired state of the gem. C(latest) ensures that the latest version is installed.
required: false
choices: [present, absent, latest]
default: present
gem_source:
description:
- The path to a local gem used as installation source.
required: false
include_dependencies:
description:
- Whether to include dependencies or not.
required: false
type: bool
default: "yes"
repository:
description:
- The repository from which the gem will be installed
required: false
aliases: [source]
user_install:
description:
- Install gem in user's local gems cache or for all users
required: false
type: bool
default: "yes"
executable:
description:
- Override the path to the gem executable
required: false
install_dir:
description:
- Install the gems into a specific directory.
These gems will be independent from the global installed ones.
Specifying this requires user_install to be false.
required: false
env_shebang:
description:
- Rewrite the shebang line on installed scripts to use /usr/bin/env.
required: false
default: "no"
type: bool
version:
description:
- Version of the gem to be installed/removed.
required: false
pre_release:
description:
- Allow installation of pre-release versions of the gem.
required: false
default: "no"
type: bool
include_doc:
description:
- Install with or without docs.
required: false
default: "no"
type: bool
build_flags:
description:
- Allow adding build flags for gem compilation
required: false
force:
description:
- Force gem to install, bypassing dependency checks.
required: false
default: "no"
type: bool
author:
- "Ansible Core Team"
- "Johan Wiren (@johanwiren)"
'''
EXAMPLES = '''
# Installs version 1.0 of vagrant.
- gem:
name: vagrant
version: 1.0
state: present
# Installs latest available version of rake.
- gem:
name: rake
state: latest
# Installs rake version 1.0 from a local gem on disk.
- gem:
name: rake
gem_source: /path/to/gems/rake-1.0.gem
state: present
'''
import re
from ansible.module_utils.basic import AnsibleModule
def get_rubygems_path(module):
if module.params['executable']:
result = module.params['executable'].split(' ')
else:
result = [module.get_bin_path('gem', True)]
return result
def get_rubygems_version(module):
cmd = get_rubygems_path(module) + ['--version']
(rc, out, err) = module.run_command(cmd, check_rc=True)
match = re.match(r'^(\d+)\.(\d+)\.(\d+)', out)
if not match:
return None
return tuple(int(x) for x in match.groups())
def get_rubygems_environ(module):
if module.params['install_dir']:
return {'GEM_HOME': module.params['install_dir']}
return None
def get_installed_versions(module, remote=False):
cmd = get_rubygems_path(module)
cmd.append('query')
if remote:
cmd.append('--remote')
if module.params['repository']:
cmd.extend(['--source', module.params['repository']])
cmd.append('-n')
cmd.append('^%s$' % module.params['name'])
environ = get_rubygems_environ(module)
(rc, out, err) = module.run_command(cmd, environ_update=environ, check_rc=True)
installed_versions = []
for line in out.splitlines():
match = re.match(r"\S+\s+\((.+)\)", line)
if match:
versions = match.group(1)
for version in versions.split(', '):
installed_versions.append(version.split()[0])
return installed_versions
def exists(module):
if module.params['state'] == 'latest':
remoteversions = get_installed_versions(module, remote=True)
if remoteversions:
module.params['version'] = remoteversions[0]
installed_versions = get_installed_versions(module)
if module.params['version']:
if module.params['version'] in installed_versions:
return True
else:
if installed_versions:
return True
return False
def uninstall(module):
if module.check_mode:
return
cmd = get_rubygems_path(module)
environ = get_rubygems_environ(module)
cmd.append('uninstall')
if module.params['install_dir']:
cmd.extend(['--install-dir', module.params['install_dir']])
if module.params['version']:
cmd.extend(['--version', module.params['version']])
else:
cmd.append('--all')
cmd.append('--executable')
cmd.append(module.params['name'])
module.run_command(cmd, environ_update=environ, check_rc=True)
def install(module):
if module.check_mode:
return
ver = get_rubygems_version(module)
if ver:
major = ver[0]
else:
major = None
cmd = get_rubygems_path(module)
cmd.append('install')
if module.params['version']:
cmd.extend(['--version', module.params['version']])
if module.params['repository']:
cmd.extend(['--source', module.params['repository']])
if not module.params['include_dependencies']:
cmd.append('--ignore-dependencies')
else:
if major and major < 2:
cmd.append('--include-dependencies')
if module.params['user_install']:
cmd.append('--user-install')
else:
cmd.append('--no-user-install')
if module.params['install_dir']:
cmd.extend(['--install-dir', module.params['install_dir']])
if module.params['pre_release']:
cmd.append('--pre')
if not module.params['include_doc']:
if major and major < 2:
cmd.append('--no-rdoc')
cmd.append('--no-ri')
else:
cmd.append('--no-document')
if module.params['env_shebang']:
cmd.append('--env-shebang')
cmd.append(module.params['gem_source'])
if module.params['build_flags']:
cmd.extend(['--', module.params['build_flags']])
if module.params['force']:
cmd.append('--force')
module.run_command(cmd, check_rc=True)
def main():
module = AnsibleModule(
argument_spec=dict(
executable=dict(required=False, type='path'),
gem_source=dict(required=False, type='path'),
include_dependencies=dict(required=False, default=True, type='bool'),
name=dict(required=True, type='str'),
repository=dict(required=False, aliases=['source'], type='str'),
state=dict(required=False, default='present', choices=['present', 'absent', 'latest'], type='str'),
user_install=dict(required=False, default=True, type='bool'),
install_dir=dict(required=False, type='path'),
pre_release=dict(required=False, default=False, type='bool'),
include_doc=dict(required=False, default=False, type='bool'),
env_shebang=dict(required=False, default=False, type='bool'),
version=dict(required=False, type='str'),
build_flags=dict(required=False, type='str'),
force=dict(required=False, default=False, type='bool'),
),
supports_check_mode=True,
mutually_exclusive=[['gem_source', 'repository'], ['gem_source', 'version']],
)
if module.params['version'] and module.params['state'] == 'latest':
module.fail_json(msg="Cannot specify version when state=latest")
if module.params['gem_source'] and module.params['state'] == 'latest':
module.fail_json(msg="Cannot maintain state=latest when installing from local source")
if module.params['user_install'] and module.params['install_dir']:
module.fail_json(msg="install_dir requires user_install=false")
if not module.params['gem_source']:
module.params['gem_source'] = module.params['name']
changed = False
if module.params['state'] in ['present', 'latest']:
if not exists(module):
install(module)
changed = True
elif module.params['state'] == 'absent':
if exists(module):
uninstall(module)
changed = True
result = {}
result['name'] = module.params['name']
result['state'] = module.params['state']
if module.params['version']:
result['version'] = module.params['version']
result['changed'] = changed
module.exit_json(**result)
if __name__ == '__main__':
main()

View file

@ -0,0 +1,667 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2014, Chris Schmidt <chris.schmidt () contrastsecurity.com>
#
# Built using https://github.com/hamnis/useful-scripts/blob/master/python/download-maven-artifact
# as a reference and starting point.
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: maven_artifact
short_description: Downloads an Artifact from a Maven Repository
description:
- Downloads an artifact from a maven repository given the maven coordinates provided to the module.
- Can retrieve snapshots or release versions of the artifact and will resolve the latest available
version if one is not available.
author: "Chris Schmidt (@chrisisbeef)"
requirements:
- lxml
- boto if using a S3 repository (s3://...)
options:
group_id:
description:
- The Maven groupId coordinate
required: true
artifact_id:
description:
- The maven artifactId coordinate
required: true
version:
description:
- The maven version coordinate
- Mutually exclusive with I(version_by_spec).
version_by_spec:
description:
- The maven dependency version ranges.
- See supported version ranges on U(https://cwiki.apache.org/confluence/display/MAVENOLD/Dependency+Mediation+and+Conflict+Resolution)
- The range type "(,1.0],[1.2,)" and "(,1.1),(1.1,)" is not supported.
- Mutually exclusive with I(version).
classifier:
description:
- The maven classifier coordinate
extension:
description:
- The maven type/extension coordinate
default: jar
repository_url:
description:
- The URL of the Maven Repository to download from.
- Use s3://... if the repository is hosted on Amazon S3, added in version 2.2.
- Use file://... if the repository is local, added in version 2.6
default: https://repo1.maven.org/maven2
username:
description:
- The username to authenticate as to the Maven Repository. Use AWS secret key of the repository is hosted on S3
aliases: [ "aws_secret_key" ]
password:
description:
- The password to authenticate with to the Maven Repository. Use AWS secret access key of the repository is hosted on S3
aliases: [ "aws_secret_access_key" ]
headers:
description:
- Add custom HTTP headers to a request in hash/dict format.
type: dict
force_basic_auth:
description:
- httplib2, the library used by the uri module only sends authentication information when a webservice
responds to an initial request with a 401 status. Since some basic auth services do not properly
send a 401, logins will fail. This option forces the sending of the Basic authentication header
upon initial request.
default: 'no'
type: bool
dest:
description:
- The path where the artifact should be written to
- If file mode or ownerships are specified and destination path already exists, they affect the downloaded file
required: true
state:
description:
- The desired state of the artifact
default: present
choices: [present,absent]
timeout:
description:
- Specifies a timeout in seconds for the connection attempt
default: 10
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be set to C(no) when no other option exists.
type: bool
default: 'yes'
keep_name:
description:
- If C(yes), the downloaded artifact's name is preserved, i.e the version number remains part of it.
- This option only has effect when C(dest) is a directory and C(version) is set to C(latest) or C(version_by_spec)
is defined.
type: bool
default: 'no'
verify_checksum:
description:
- If C(never), the md5 checksum will never be downloaded and verified.
- If C(download), the md5 checksum will be downloaded and verified only after artifact download. This is the default.
- If C(change), the md5 checksum will be downloaded and verified if the destination already exist,
to verify if they are identical. This was the behaviour before 2.6. Since it downloads the md5 before (maybe)
downloading the artifact, and since some repository software, when acting as a proxy/cache, return a 404 error
if the artifact has not been cached yet, it may fail unexpectedly.
If you still need it, you should consider using C(always) instead - if you deal with a checksum, it is better to
use it to verify integrity after download.
- C(always) combines C(download) and C(change).
required: false
default: 'download'
choices: ['never', 'download', 'change', 'always']
extends_documentation_fragment:
- files
'''
EXAMPLES = '''
# Download the latest version of the JUnit framework artifact from Maven Central
- maven_artifact:
group_id: junit
artifact_id: junit
dest: /tmp/junit-latest.jar
# Download JUnit 4.11 from Maven Central
- maven_artifact:
group_id: junit
artifact_id: junit
version: 4.11
dest: /tmp/junit-4.11.jar
# Download an artifact from a private repository requiring authentication
- maven_artifact:
group_id: com.company
artifact_id: library-name
repository_url: 'https://repo.company.com/maven'
username: user
password: pass
dest: /tmp/library-name-latest.jar
# Download a WAR File to the Tomcat webapps directory to be deployed
- maven_artifact:
group_id: com.company
artifact_id: web-app
extension: war
repository_url: 'https://repo.company.com/maven'
dest: /var/lib/tomcat7/webapps/web-app.war
# Keep a downloaded artifact's name, i.e. retain the version
- maven_artifact:
version: latest
artifact_id: spring-core
group_id: org.springframework
dest: /tmp/
keep_name: yes
# Download the latest version of the JUnit framework artifact from Maven local
- maven_artifact:
group_id: junit
artifact_id: junit
dest: /tmp/junit-latest.jar
repository_url: "file://{{ lookup('env','HOME') }}/.m2/repository"
# Download the latest version between 3.8 and 4.0 (exclusive) of the JUnit framework artifact from Maven Central
- maven_artifact:
group_id: junit
artifact_id: junit
version_by_spec: "[3.8,4.0)"
dest: /tmp/
'''
import hashlib
import os
import posixpath
import shutil
import io
import tempfile
import traceback
from ansible.module_utils.ansible_release import __version__ as ansible_version
from re import match
LXML_ETREE_IMP_ERR = None
try:
from lxml import etree
HAS_LXML_ETREE = True
except ImportError:
LXML_ETREE_IMP_ERR = traceback.format_exc()
HAS_LXML_ETREE = False
BOTO_IMP_ERR = None
try:
import boto3
HAS_BOTO = True
except ImportError:
BOTO_IMP_ERR = traceback.format_exc()
HAS_BOTO = False
SEMANTIC_VERSION_IMP_ERR = None
try:
from semantic_version import Version, Spec
HAS_SEMANTIC_VERSION = True
except ImportError:
SEMANTIC_VERSION_IMP_ERR = traceback.format_exc()
HAS_SEMANTIC_VERSION = False
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible.module_utils.six.moves.urllib.parse import urlparse
from ansible.module_utils.urls import fetch_url
from ansible.module_utils._text import to_bytes, to_native, to_text
def split_pre_existing_dir(dirname):
'''
Return the first pre-existing directory and a list of the new directories that will be created.
'''
head, tail = os.path.split(dirname)
b_head = to_bytes(head, errors='surrogate_or_strict')
if not os.path.exists(b_head):
if head == dirname:
return None, [head]
else:
(pre_existing_dir, new_directory_list) = split_pre_existing_dir(head)
else:
return head, [tail]
new_directory_list.append(tail)
return pre_existing_dir, new_directory_list
def adjust_recursive_directory_permissions(pre_existing_dir, new_directory_list, module, directory_args, changed):
'''
Walk the new directories list and make sure that permissions are as we would expect
'''
if new_directory_list:
first_sub_dir = new_directory_list.pop(0)
if not pre_existing_dir:
working_dir = first_sub_dir
else:
working_dir = os.path.join(pre_existing_dir, first_sub_dir)
directory_args['path'] = working_dir
changed = module.set_fs_attributes_if_different(directory_args, changed)
changed = adjust_recursive_directory_permissions(working_dir, new_directory_list, module, directory_args, changed)
return changed
class Artifact(object):
def __init__(self, group_id, artifact_id, version, version_by_spec, classifier='', extension='jar'):
if not group_id:
raise ValueError("group_id must be set")
if not artifact_id:
raise ValueError("artifact_id must be set")
self.group_id = group_id
self.artifact_id = artifact_id
self.version = version
self.version_by_spec = version_by_spec
self.classifier = classifier
if not extension:
self.extension = "jar"
else:
self.extension = extension
def is_snapshot(self):
return self.version and self.version.endswith("SNAPSHOT")
def path(self, with_version=True):
base = posixpath.join(self.group_id.replace(".", "/"), self.artifact_id)
if with_version and self.version:
base = posixpath.join(base, self.version)
return base
def _generate_filename(self):
filename = self.artifact_id + "-" + self.classifier + "." + self.extension
if not self.classifier:
filename = self.artifact_id + "." + self.extension
return filename
def get_filename(self, filename=None):
if not filename:
filename = self._generate_filename()
elif os.path.isdir(filename):
filename = os.path.join(filename, self._generate_filename())
return filename
def __str__(self):
result = "%s:%s:%s" % (self.group_id, self.artifact_id, self.version)
if self.classifier:
result = "%s:%s:%s:%s:%s" % (self.group_id, self.artifact_id, self.extension, self.classifier, self.version)
elif self.extension != "jar":
result = "%s:%s:%s:%s" % (self.group_id, self.artifact_id, self.extension, self.version)
return result
@staticmethod
def parse(input):
parts = input.split(":")
if len(parts) >= 3:
g = parts[0]
a = parts[1]
v = parts[len(parts) - 1]
t = None
c = None
if len(parts) == 4:
t = parts[2]
if len(parts) == 5:
t = parts[2]
c = parts[3]
return Artifact(g, a, v, c, t)
else:
return None
class MavenDownloader:
def __init__(self, module, base, local=False, headers=None):
self.module = module
if base.endswith("/"):
base = base.rstrip("/")
self.base = base
self.local = local
self.headers = headers
self.user_agent = "Ansible {0} maven_artifact".format(ansible_version)
self.latest_version_found = None
self.metadata_file_name = "maven-metadata-local.xml" if local else "maven-metadata.xml"
def find_version_by_spec(self, artifact):
path = "/%s/%s" % (artifact.path(False), self.metadata_file_name)
content = self._getContent(self.base + path, "Failed to retrieve the maven metadata file: " + path)
xml = etree.fromstring(content)
original_versions = xml.xpath("/metadata/versioning/versions/version/text()")
versions = []
for version in original_versions:
try:
versions.append(Version.coerce(version))
except ValueError:
# This means that version string is not a valid semantic versioning
pass
parse_versions_syntax = {
# example -> (,1.0]
r"^\(,(?P<upper_bound>[0-9.]*)]$": "<={upper_bound}",
# example -> 1.0
r"^(?P<version>[0-9.]*)$": "~={version}",
# example -> [1.0]
r"^\[(?P<version>[0-9.]*)\]$": "=={version}",
# example -> [1.2, 1.3]
r"^\[(?P<lower_bound>[0-9.]*),\s*(?P<upper_bound>[0-9.]*)\]$": ">={lower_bound},<={upper_bound}",
# example -> [1.2, 1.3)
r"^\[(?P<lower_bound>[0-9.]*),\s*(?P<upper_bound>[0-9.]+)\)$": ">={lower_bound},<{upper_bound}",
# example -> [1.5,)
r"^\[(?P<lower_bound>[0-9.]*),\)$": ">={lower_bound}",
}
for regex, spec_format in parse_versions_syntax.items():
regex_result = match(regex, artifact.version_by_spec)
if regex_result:
spec = Spec(spec_format.format(**regex_result.groupdict()))
selected_version = spec.select(versions)
if not selected_version:
raise ValueError("No version found with this spec version: {0}".format(artifact.version_by_spec))
# To deal when repos on maven don't have patch number on first build (e.g. 3.8 instead of 3.8.0)
if str(selected_version) not in original_versions:
selected_version.patch = None
return str(selected_version)
raise ValueError("The spec version {0} is not supported! ".format(artifact.version_by_spec))
def find_latest_version_available(self, artifact):
if self.latest_version_found:
return self.latest_version_found
path = "/%s/%s" % (artifact.path(False), self.metadata_file_name)
content = self._getContent(self.base + path, "Failed to retrieve the maven metadata file: " + path)
xml = etree.fromstring(content)
v = xml.xpath("/metadata/versioning/versions/version[last()]/text()")
if v:
self.latest_version_found = v[0]
return v[0]
def find_uri_for_artifact(self, artifact):
if artifact.version_by_spec:
artifact.version = self.find_version_by_spec(artifact)
if artifact.version == "latest":
artifact.version = self.find_latest_version_available(artifact)
if artifact.is_snapshot():
if self.local:
return self._uri_for_artifact(artifact, artifact.version)
path = "/%s/%s" % (artifact.path(), self.metadata_file_name)
content = self._getContent(self.base + path, "Failed to retrieve the maven metadata file: " + path)
xml = etree.fromstring(content)
for snapshotArtifact in xml.xpath("/metadata/versioning/snapshotVersions/snapshotVersion"):
classifier = snapshotArtifact.xpath("classifier/text()")
artifact_classifier = classifier[0] if classifier else ''
extension = snapshotArtifact.xpath("extension/text()")
artifact_extension = extension[0] if extension else ''
if artifact_classifier == artifact.classifier and artifact_extension == artifact.extension:
return self._uri_for_artifact(artifact, snapshotArtifact.xpath("value/text()")[0])
timestamp_xmlpath = xml.xpath("/metadata/versioning/snapshot/timestamp/text()")
if timestamp_xmlpath:
timestamp = timestamp_xmlpath[0]
build_number = xml.xpath("/metadata/versioning/snapshot/buildNumber/text()")[0]
return self._uri_for_artifact(artifact, artifact.version.replace("SNAPSHOT", timestamp + "-" + build_number))
return self._uri_for_artifact(artifact, artifact.version)
def _uri_for_artifact(self, artifact, version=None):
if artifact.is_snapshot() and not version:
raise ValueError("Expected uniqueversion for snapshot artifact " + str(artifact))
elif not artifact.is_snapshot():
version = artifact.version
if artifact.classifier:
return posixpath.join(self.base, artifact.path(), artifact.artifact_id + "-" + version + "-" + artifact.classifier + "." + artifact.extension)
return posixpath.join(self.base, artifact.path(), artifact.artifact_id + "-" + version + "." + artifact.extension)
# for small files, directly get the full content
def _getContent(self, url, failmsg, force=True):
if self.local:
parsed_url = urlparse(url)
if os.path.isfile(parsed_url.path):
with io.open(parsed_url.path, 'rb') as f:
return f.read()
if force:
raise ValueError(failmsg + " because can not find file: " + url)
return None
response = self._request(url, failmsg, force)
if response:
return response.read()
return None
# only for HTTP request
def _request(self, url, failmsg, force=True):
url_to_use = url
parsed_url = urlparse(url)
if parsed_url.scheme == 's3':
parsed_url = urlparse(url)
bucket_name = parsed_url.netloc
key_name = parsed_url.path[1:]
client = boto3.client('s3', aws_access_key_id=self.module.params.get('username', ''), aws_secret_access_key=self.module.params.get('password', ''))
url_to_use = client.generate_presigned_url('get_object', Params={'Bucket': bucket_name, 'Key': key_name}, ExpiresIn=10)
req_timeout = self.module.params.get('timeout')
# Hack to add parameters in the way that fetch_url expects
self.module.params['url_username'] = self.module.params.get('username', '')
self.module.params['url_password'] = self.module.params.get('password', '')
self.module.params['http_agent'] = self.user_agent
response, info = fetch_url(self.module, url_to_use, timeout=req_timeout, headers=self.headers)
if info['status'] == 200:
return response
if force:
raise ValueError(failmsg + " because of " + info['msg'] + "for URL " + url_to_use)
return None
def download(self, tmpdir, artifact, verify_download, filename=None):
if (not artifact.version and not artifact.version_by_spec) or artifact.version == "latest":
artifact = Artifact(artifact.group_id, artifact.artifact_id, self.find_latest_version_available(artifact), None,
artifact.classifier, artifact.extension)
url = self.find_uri_for_artifact(artifact)
tempfd, tempname = tempfile.mkstemp(dir=tmpdir)
try:
# copy to temp file
if self.local:
parsed_url = urlparse(url)
if os.path.isfile(parsed_url.path):
shutil.copy2(parsed_url.path, tempname)
else:
return "Can not find local file: " + parsed_url.path
else:
response = self._request(url, "Failed to download artifact " + str(artifact))
with os.fdopen(tempfd, 'wb') as f:
shutil.copyfileobj(response, f)
if verify_download:
invalid_md5 = self.is_invalid_md5(tempname, url)
if invalid_md5:
# if verify_change was set, the previous file would be deleted
os.remove(tempname)
return invalid_md5
except Exception as e:
os.remove(tempname)
raise e
# all good, now copy temp file to target
shutil.move(tempname, artifact.get_filename(filename))
return None
def is_invalid_md5(self, file, remote_url):
if os.path.exists(file):
local_md5 = self._local_md5(file)
if self.local:
parsed_url = urlparse(remote_url)
remote_md5 = self._local_md5(parsed_url.path)
else:
try:
remote_md5 = to_text(self._getContent(remote_url + '.md5', "Failed to retrieve MD5", False), errors='strict')
except UnicodeError as e:
return "Cannot retrieve a valid md5 from %s: %s" % (remote_url, to_native(e))
if(not remote_md5):
return "Cannot find md5 from " + remote_url
try:
# Check if remote md5 only contains md5 or md5 + filename
_remote_md5 = remote_md5.split(None)[0]
remote_md5 = _remote_md5
# remote_md5 is empty so we continue and keep original md5 string
# This should not happen since we check for remote_md5 before
except IndexError as e:
pass
if local_md5 == remote_md5:
return None
else:
return "Checksum does not match: we computed " + local_md5 + "but the repository states " + remote_md5
return "Path does not exist: " + file
def _local_md5(self, file):
md5 = hashlib.md5()
with io.open(file, 'rb') as f:
for chunk in iter(lambda: f.read(8192), b''):
md5.update(chunk)
return md5.hexdigest()
def main():
module = AnsibleModule(
argument_spec=dict(
group_id=dict(required=True),
artifact_id=dict(required=True),
version=dict(default=None),
version_by_spec=dict(default=None),
classifier=dict(default=''),
extension=dict(default='jar'),
repository_url=dict(default='https://repo1.maven.org/maven2'),
username=dict(default=None, aliases=['aws_secret_key']),
password=dict(default=None, no_log=True, aliases=['aws_secret_access_key']),
headers=dict(type='dict'),
force_basic_auth=dict(default=False, type='bool'),
state=dict(default="present", choices=["present", "absent"]), # TODO - Implement a "latest" state
timeout=dict(default=10, type='int'),
dest=dict(type="path", required=True),
validate_certs=dict(required=False, default=True, type='bool'),
keep_name=dict(required=False, default=False, type='bool'),
verify_checksum=dict(required=False, default='download', choices=['never', 'download', 'change', 'always']),
directory_mode=dict(type='str'), # Used since https://github.com/ansible/ansible/pull/24965, not sure
# if this should really be here.
),
add_file_common_args=True,
mutually_exclusive=([('version', 'version_by_spec')])
)
if not HAS_LXML_ETREE:
module.fail_json(msg=missing_required_lib('lxml'), exception=LXML_ETREE_IMP_ERR)
if module.params['version_by_spec'] and not HAS_SEMANTIC_VERSION:
module.fail_json(msg=missing_required_lib('semantic_version'), exception=SEMANTIC_VERSION_IMP_ERR)
repository_url = module.params["repository_url"]
if not repository_url:
repository_url = "https://repo1.maven.org/maven2"
try:
parsed_url = urlparse(repository_url)
except AttributeError as e:
module.fail_json(msg='url parsing went wrong %s' % e)
local = parsed_url.scheme == "file"
if parsed_url.scheme == 's3' and not HAS_BOTO:
module.fail_json(msg=missing_required_lib('boto3', reason='when using s3:// repository URLs'),
exception=BOTO_IMP_ERR)
group_id = module.params["group_id"]
artifact_id = module.params["artifact_id"]
version = module.params["version"]
version_by_spec = module.params["version_by_spec"]
classifier = module.params["classifier"]
extension = module.params["extension"]
headers = module.params['headers']
state = module.params["state"]
dest = module.params["dest"]
b_dest = to_bytes(dest, errors='surrogate_or_strict')
keep_name = module.params["keep_name"]
verify_checksum = module.params["verify_checksum"]
verify_download = verify_checksum in ['download', 'always']
verify_change = verify_checksum in ['change', 'always']
downloader = MavenDownloader(module, repository_url, local, headers)
if not version_by_spec and not version:
version = "latest"
try:
artifact = Artifact(group_id, artifact_id, version, version_by_spec, classifier, extension)
except ValueError as e:
module.fail_json(msg=e.args[0])
changed = False
prev_state = "absent"
if dest.endswith(os.sep):
b_dest = to_bytes(dest, errors='surrogate_or_strict')
if not os.path.exists(b_dest):
(pre_existing_dir, new_directory_list) = split_pre_existing_dir(dest)
os.makedirs(b_dest)
directory_args = module.load_file_common_arguments(module.params)
directory_mode = module.params["directory_mode"]
if directory_mode is not None:
directory_args['mode'] = directory_mode
else:
directory_args['mode'] = None
changed = adjust_recursive_directory_permissions(pre_existing_dir, new_directory_list, module, directory_args, changed)
if os.path.isdir(b_dest):
version_part = version
if version == 'latest':
version_part = downloader.find_latest_version_available(artifact)
elif version_by_spec:
version_part = downloader.find_version_by_spec(artifact)
filename = "{artifact_id}{version_part}{classifier}.{extension}".format(
artifact_id=artifact_id,
version_part="-{0}".format(version_part) if keep_name else "",
classifier="-{0}".format(classifier) if classifier else "",
extension=extension
)
dest = posixpath.join(dest, filename)
b_dest = to_bytes(dest, errors='surrogate_or_strict')
if os.path.lexists(b_dest) and ((not verify_change) or not downloader.is_invalid_md5(dest, downloader.find_uri_for_artifact(artifact))):
prev_state = "present"
if prev_state == "absent":
try:
download_error = downloader.download(module.tmpdir, artifact, verify_download, b_dest)
if download_error is None:
changed = True
else:
module.fail_json(msg="Cannot retrieve the artifact to destination: " + download_error)
except ValueError as e:
module.fail_json(msg=e.args[0])
file_args = module.load_file_common_arguments(module.params, path=dest)
changed = module.set_fs_attributes_if_different(file_args, changed)
if changed:
module.exit_json(state=state, dest=dest, group_id=group_id, artifact_id=artifact_id, version=version, classifier=classifier,
extension=extension, repository_url=repository_url, changed=changed)
else:
module.exit_json(state=state, dest=dest, changed=changed)
if __name__ == '__main__':
main()

View file

@ -0,0 +1,311 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2017 Chris Hoffman <christopher.hoffman@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: npm
short_description: Manage node.js packages with npm
description:
- Manage node.js packages with Node Package Manager (npm)
author: "Chris Hoffman (@chrishoffman)"
options:
name:
description:
- The name of a node.js library to install
type: str
required: false
path:
description:
- The base path where to install the node.js libraries
type: path
required: false
version:
description:
- The version to be installed
type: str
required: false
global:
description:
- Install the node.js library globally
required: false
default: no
type: bool
executable:
description:
- The executable location for npm.
- This is useful if you are using a version manager, such as nvm
type: path
required: false
ignore_scripts:
description:
- Use the C(--ignore-scripts) flag when installing.
required: false
type: bool
default: no
unsafe_perm:
description:
- Use the C(--unsafe-perm) flag when installing.
type: bool
default: no
ci:
description:
- Install packages based on package-lock file, same as running npm ci
type: bool
default: no
production:
description:
- Install dependencies in production mode, excluding devDependencies
required: false
type: bool
default: no
registry:
description:
- The registry to install modules from.
required: false
type: str
state:
description:
- The state of the node.js library
required: false
type: str
default: present
choices: [ "present", "absent", "latest" ]
requirements:
- npm installed in bin path (recommended /usr/local/bin)
'''
EXAMPLES = '''
- name: Install "coffee-script" node.js package.
npm:
name: coffee-script
path: /app/location
- name: Install "coffee-script" node.js package on version 1.6.1.
npm:
name: coffee-script
version: '1.6.1'
path: /app/location
- name: Install "coffee-script" node.js package globally.
npm:
name: coffee-script
global: yes
- name: Remove the globally package "coffee-script".
npm:
name: coffee-script
global: yes
state: absent
- name: Install "coffee-script" node.js package from custom registry.
npm:
name: coffee-script
registry: 'http://registry.mysite.com'
- name: Install packages based on package.json.
npm:
path: /app/location
- name: Update packages based on package.json to their latest version.
npm:
path: /app/location
state: latest
- name: Install packages based on package.json using the npm installed with nvm v0.10.1.
npm:
path: /app/location
executable: /opt/nvm/v0.10.1/bin/npm
state: present
'''
import os
import re
from ansible.module_utils.basic import AnsibleModule
import json
class Npm(object):
def __init__(self, module, **kwargs):
self.module = module
self.glbl = kwargs['glbl']
self.name = kwargs['name']
self.version = kwargs['version']
self.path = kwargs['path']
self.registry = kwargs['registry']
self.production = kwargs['production']
self.ignore_scripts = kwargs['ignore_scripts']
self.unsafe_perm = kwargs['unsafe_perm']
self.state = kwargs['state']
if kwargs['executable']:
self.executable = kwargs['executable'].split(' ')
else:
self.executable = [module.get_bin_path('npm', True)]
if kwargs['version'] and self.state != 'absent':
self.name_version = self.name + '@' + str(self.version)
else:
self.name_version = self.name
def _exec(self, args, run_in_check_mode=False, check_rc=True):
if not self.module.check_mode or (self.module.check_mode and run_in_check_mode):
cmd = self.executable + args
if self.glbl:
cmd.append('--global')
if self.production and ('install' in cmd or 'update' in cmd):
cmd.append('--production')
if self.ignore_scripts:
cmd.append('--ignore-scripts')
if self.unsafe_perm:
cmd.append('--unsafe-perm')
if self.name:
cmd.append(self.name_version)
if self.registry:
cmd.append('--registry')
cmd.append(self.registry)
# If path is specified, cd into that path and run the command.
cwd = None
if self.path:
if not os.path.exists(self.path):
os.makedirs(self.path)
if not os.path.isdir(self.path):
self.module.fail_json(msg="path %s is not a directory" % self.path)
cwd = self.path
rc, out, err = self.module.run_command(cmd, check_rc=check_rc, cwd=cwd)
return out
return ''
def list(self):
cmd = ['list', '--json', '--long']
installed = list()
missing = list()
data = json.loads(self._exec(cmd, True, False))
if 'dependencies' in data:
for dep in data['dependencies']:
if 'missing' in data['dependencies'][dep] and data['dependencies'][dep]['missing']:
missing.append(dep)
elif 'invalid' in data['dependencies'][dep] and data['dependencies'][dep]['invalid']:
missing.append(dep)
else:
installed.append(dep)
if self.name and self.name not in installed:
missing.append(self.name)
# Named dependency not installed
else:
missing.append(self.name)
return installed, missing
def install(self):
return self._exec(['install'])
def ci_install(self):
return self._exec(['ci'])
def update(self):
return self._exec(['update'])
def uninstall(self):
return self._exec(['uninstall'])
def list_outdated(self):
outdated = list()
data = self._exec(['outdated'], True, False)
for dep in data.splitlines():
if dep:
# node.js v0.10.22 changed the `npm outdated` module separator
# from "@" to " ". Split on both for backwards compatibility.
pkg, other = re.split(r'\s|@', dep, 1)
outdated.append(pkg)
return outdated
def main():
arg_spec = dict(
name=dict(default=None, type='str'),
path=dict(default=None, type='path'),
version=dict(default=None, type='str'),
production=dict(default='no', type='bool'),
executable=dict(default=None, type='path'),
registry=dict(default=None, type='str'),
state=dict(default='present', choices=['present', 'absent', 'latest']),
ignore_scripts=dict(default=False, type='bool'),
unsafe_perm=dict(default=False, type='bool'),
ci=dict(default=False, type='bool'),
)
arg_spec['global'] = dict(default='no', type='bool')
module = AnsibleModule(
argument_spec=arg_spec,
supports_check_mode=True
)
name = module.params['name']
path = module.params['path']
version = module.params['version']
glbl = module.params['global']
production = module.params['production']
executable = module.params['executable']
registry = module.params['registry']
state = module.params['state']
ignore_scripts = module.params['ignore_scripts']
unsafe_perm = module.params['unsafe_perm']
ci = module.params['ci']
if not path and not glbl:
module.fail_json(msg='path must be specified when not using global')
if state == 'absent' and not name:
module.fail_json(msg='uninstalling a package is only available for named packages')
npm = Npm(module, name=name, path=path, version=version, glbl=glbl, production=production,
executable=executable, registry=registry, ignore_scripts=ignore_scripts,
unsafe_perm=unsafe_perm, state=state)
changed = False
if ci:
npm.ci_install()
changed = True
elif state == 'present':
installed, missing = npm.list()
if missing:
changed = True
npm.install()
elif state == 'latest':
installed, missing = npm.list()
outdated = npm.list_outdated()
if missing:
changed = True
npm.install()
if outdated:
changed = True
npm.update()
else: # absent
installed, missing = npm.list()
if name in installed:
changed = True
npm.uninstall()
module.exit_json(changed=changed)
if __name__ == '__main__':
main()

View file

@ -0,0 +1,233 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Afterburn <https://github.com/afterburn>
# (c) 2013, Aaron Bull Schaefer <aaron@elasticdog.com>
# (c) 2015, Jonathan Lestrelin <jonathan.lestrelin@gmail.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: pear
short_description: Manage pear/pecl packages
description:
- Manage PHP packages with the pear package manager.
author:
- Jonathan Lestrelin (@jle64) <jonathan.lestrelin@gmail.com>
options:
name:
description:
- Name of the package to install, upgrade, or remove.
required: true
state:
description:
- Desired state of the package.
default: "present"
choices: ["present", "absent", "latest"]
executable:
description:
- Path to the pear executable
'''
EXAMPLES = '''
# Install pear package
- pear:
name: Net_URL2
state: present
# Install pecl package
- pear:
name: pecl/json_post
state: present
# Upgrade package
- pear:
name: Net_URL2
state: latest
# Remove packages
- pear:
name: Net_URL2,pecl/json_post
state: absent
'''
import os
from ansible.module_utils._text import to_text
from ansible.module_utils.basic import AnsibleModule
def get_local_version(pear_output):
"""Take pear remoteinfo output and get the installed version"""
lines = pear_output.split('\n')
for line in lines:
if 'Installed ' in line:
installed = line.rsplit(None, 1)[-1].strip()
if installed == '-':
continue
return installed
return None
def _get_pear_path(module):
if module.params['executable'] and os.path.isfile(module.params['executable']):
result = module.params['executable']
else:
result = module.get_bin_path('pear', True, [module.params['executable']])
return result
def get_repository_version(pear_output):
"""Take pear remote-info output and get the latest version"""
lines = pear_output.split('\n')
for line in lines:
if 'Latest ' in line:
return line.rsplit(None, 1)[-1].strip()
return None
def query_package(module, name, state="present"):
"""Query the package status in both the local system and the repository.
Returns a boolean to indicate if the package is installed,
and a second boolean to indicate if the package is up-to-date."""
if state == "present":
lcmd = "%s info %s" % (_get_pear_path(module), name)
lrc, lstdout, lstderr = module.run_command(lcmd, check_rc=False)
if lrc != 0:
# package is not installed locally
return False, False
rcmd = "%s remote-info %s" % (_get_pear_path(module), name)
rrc, rstdout, rstderr = module.run_command(rcmd, check_rc=False)
# get the version installed locally (if any)
lversion = get_local_version(rstdout)
# get the version in the repository
rversion = get_repository_version(rstdout)
if rrc == 0:
# Return True to indicate that the package is installed locally,
# and the result of the version number comparison
# to determine if the package is up-to-date.
return True, (lversion == rversion)
return False, False
def remove_packages(module, packages):
remove_c = 0
# Using a for loop in case of error, we can report the package that failed
for package in packages:
# Query the package first, to see if we even need to remove
installed, updated = query_package(module, package)
if not installed:
continue
cmd = "%s uninstall %s" % (_get_pear_path(module), package)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0:
module.fail_json(msg="failed to remove %s: %s" % (package, to_text(stdout + stderr)))
remove_c += 1
if remove_c > 0:
module.exit_json(changed=True, msg="removed %s package(s)" % remove_c)
module.exit_json(changed=False, msg="package(s) already absent")
def install_packages(module, state, packages):
install_c = 0
for i, package in enumerate(packages):
# if the package is installed and state == present
# or state == latest and is up-to-date then skip
installed, updated = query_package(module, package)
if installed and (state == 'present' or (state == 'latest' and updated)):
continue
if state == 'present':
command = 'install'
if state == 'latest':
command = 'upgrade'
cmd = "%s %s %s" % (_get_pear_path(module), command, package)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0:
module.fail_json(msg="failed to install %s: %s" % (package, to_text(stdout + stderr)))
install_c += 1
if install_c > 0:
module.exit_json(changed=True, msg="installed %s package(s)" % (install_c))
module.exit_json(changed=False, msg="package(s) already installed")
def check_packages(module, packages, state):
would_be_changed = []
for package in packages:
installed, updated = query_package(module, package)
if ((state in ["present", "latest"] and not installed) or
(state == "absent" and installed) or
(state == "latest" and not updated)):
would_be_changed.append(package)
if would_be_changed:
if state == "absent":
state = "removed"
module.exit_json(changed=True, msg="%s package(s) would be %s" % (
len(would_be_changed), state))
else:
module.exit_json(change=False, msg="package(s) already %s" % state)
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(aliases=['pkg'], required=True),
state=dict(default='present', choices=['present', 'installed', "latest", 'absent', 'removed']),
executable=dict(default=None, required=False, type='path')),
supports_check_mode=True)
p = module.params
# normalize the state parameter
if p['state'] in ['present', 'installed']:
p['state'] = 'present'
elif p['state'] in ['absent', 'removed']:
p['state'] = 'absent'
if p['name']:
pkgs = p['name'].split(',')
pkg_files = []
for i, pkg in enumerate(pkgs):
pkg_files.append(None)
if module.check_mode:
check_packages(module, pkgs, p['state'])
if p['state'] in ['present', 'latest']:
install_packages(module, p['state'], pkgs)
elif p['state'] == 'absent':
remove_packages(module, pkgs)
if __name__ == '__main__':
main()

View file

@ -0,0 +1,151 @@
#!/usr/bin/python
# (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# started out with AWX's scan_packages module
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: pip_package_info
short_description: pip package information
description:
- Return information about installed pip packages
options:
clients:
description:
- A list of the pip executables that will be used to get the packages.
They can be supplied with the full path or just the executable name, i.e `pip3.7`.
default: ['pip']
required: False
type: list
requirements:
- The requested pip executables must be installed on the target.
author:
- Matthew Jones (@matburt)
- Brian Coca (@bcoca)
- Adam Miller (@maxamillion)
'''
EXAMPLES = '''
- name: Just get the list from default pip
pip_package_info:
- name: get the facts for default pip, pip2 and pip3.6
pip_package_info:
clients: ['pip', 'pip2', 'pip3.6']
- name: get from specific paths (virtualenvs?)
pip_package_info:
clients: '/home/me/projec42/python/pip3.5'
'''
RETURN = '''
packages:
description: a dictionary of installed package data
returned: always
type: dict
contains:
python:
description: A dictionary with each pip client which then contains a list of dicts with python package information
returned: always
type: dict
sample:
"packages": {
"pip": {
"Babel": [
{
"name": "Babel",
"source": "pip",
"version": "2.6.0"
}
],
"Flask": [
{
"name": "Flask",
"source": "pip",
"version": "1.0.2"
}
],
"Flask-SQLAlchemy": [
{
"name": "Flask-SQLAlchemy",
"source": "pip",
"version": "2.3.2"
}
],
"Jinja2": [
{
"name": "Jinja2",
"source": "pip",
"version": "2.10"
}
],
},
}
'''
import json
import os
from ansible.module_utils._text import to_text
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.facts.packages import CLIMgr
class PIP(CLIMgr):
def __init__(self, pip):
self.CLI = pip
def list_installed(self):
global module
rc, out, err = module.run_command([self._cli, 'list', '-l', '--format=json'])
if rc != 0:
raise Exception("Unable to list packages rc=%s : %s" % (rc, err))
return json.loads(out)
def get_package_details(self, package):
package['source'] = self.CLI
return package
def main():
# start work
global module
module = AnsibleModule(argument_spec=dict(clients={'type': 'list', 'default': ['pip']},), supports_check_mode=True)
packages = {}
results = {'packages': {}}
clients = module.params['clients']
found = 0
for pip in clients:
if not os.path.basename(pip).startswith('pip'):
module.warn('Skipping invalid pip client: %s' % (pip))
continue
try:
pip_mgr = PIP(pip)
if pip_mgr.is_available():
found += 1
packages[pip] = pip_mgr.get_packages()
except Exception as e:
module.warn('Failed to retrieve packages with %s: %s' % (pip, to_text(e)))
continue
if found == 0:
module.fail_json(msg='Unable to use any of the supplied pip clients: %s' % clients)
# return info
results['packages'] = packages
module.exit_json(**results)
if __name__ == '__main__':
main()

View file

@ -0,0 +1,389 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017 David Gunter <david.gunter@tivix.com>
# Copyright (c) 2017 Chris Hoffman <christopher.hoffman@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: yarn
short_description: Manage node.js packages with Yarn
description:
- Manage node.js packages with the Yarn package manager (https://yarnpkg.com/)
author:
- "David Gunter (@verkaufer)"
- "Chris Hoffman (@chrishoffman), creator of NPM Ansible module)"
options:
name:
description:
- The name of a node.js library to install
- If omitted all packages in package.json are installed.
- To globally install from local node.js library. Prepend "file:" to the path of the node.js library.
required: false
path:
description:
- The base path where Node.js libraries will be installed.
- This is where the node_modules folder lives.
required: false
version:
description:
- The version of the library to be installed.
- Must be in semver format. If "latest" is desired, use "state" arg instead
required: false
global:
description:
- Install the node.js library globally
required: false
default: no
type: bool
executable:
description:
- The executable location for yarn.
required: false
ignore_scripts:
description:
- Use the --ignore-scripts flag when installing.
required: false
type: bool
default: no
production:
description:
- Install dependencies in production mode.
- Yarn will ignore any dependencies under devDependencies in package.json
required: false
type: bool
default: no
registry:
description:
- The registry to install modules from.
required: false
state:
description:
- Installation state of the named node.js library
- If absent is selected, a name option must be provided
required: false
default: present
choices: [ "present", "absent", "latest" ]
requirements:
- Yarn installed in bin path (typically /usr/local/bin)
'''
EXAMPLES = '''
- name: Install "imagemin" node.js package.
yarn:
name: imagemin
path: /app/location
- name: Install "imagemin" node.js package on version 5.3.1
yarn:
name: imagemin
version: '5.3.1'
path: /app/location
- name: Install "imagemin" node.js package globally.
yarn:
name: imagemin
global: yes
- name: Remove the globally-installed package "imagemin".
yarn:
name: imagemin
global: yes
state: absent
- name: Install "imagemin" node.js package from custom registry.
yarn:
name: imagemin
registry: 'http://registry.mysite.com'
- name: Install packages based on package.json.
yarn:
path: /app/location
- name: Update all packages in package.json to their latest version.
yarn:
path: /app/location
state: latest
'''
RETURN = '''
changed:
description: Whether Yarn changed any package data
returned: always
type: bool
sample: true
msg:
description: Provides an error message if Yarn syntax was incorrect
returned: failure
type: str
sample: "Package must be explicitly named when uninstalling."
invocation:
description: Parameters and values used during execution
returned: success
type: dict
sample: {
"module_args": {
"executable": null,
"globally": false,
"ignore_scripts": false,
"name": null,
"path": "/some/path/folder",
"production": false,
"registry": null,
"state": "present",
"version": null
}
}
out:
description: Output generated from Yarn with emojis removed.
returned: always
type: str
sample: "yarn add v0.16.1[1/4] Resolving packages...[2/4] Fetching packages...[3/4] Linking dependencies...[4/4]
Building fresh packages...success Saved lockfile.success Saved 1 new dependency..left-pad@1.1.3 Done in 0.59s."
'''
import os
import re
import json
from ansible.module_utils.basic import AnsibleModule
class Yarn(object):
DEFAULT_GLOBAL_INSTALLATION_PATH = '~/.config/yarn/global'
def __init__(self, module, **kwargs):
self.module = module
self.globally = kwargs['globally']
self.name = kwargs['name']
self.version = kwargs['version']
self.path = kwargs['path']
self.registry = kwargs['registry']
self.production = kwargs['production']
self.ignore_scripts = kwargs['ignore_scripts']
# Specify a version of package if version arg passed in
self.name_version = None
if kwargs['executable']:
self.executable = kwargs['executable'].split(' ')
else:
self.executable = [module.get_bin_path('yarn', True)]
if kwargs['version'] and self.name is not None:
self.name_version = self.name + '@' + str(self.version)
elif self.name is not None:
self.name_version = self.name
def _exec(self, args, run_in_check_mode=False, check_rc=True):
if not self.module.check_mode or (self.module.check_mode and run_in_check_mode):
if self.globally:
# Yarn global arg is inserted before the command (e.g. `yarn global {some-command}`)
args.insert(0, 'global')
cmd = self.executable + args
if self.production:
cmd.append('--production')
if self.ignore_scripts:
cmd.append('--ignore-scripts')
if self.registry:
cmd.append('--registry')
cmd.append(self.registry)
# always run Yarn without emojis when called via Ansible
cmd.append('--no-emoji')
# If path is specified, cd into that path and run the command.
cwd = None
if self.path and not self.globally:
if not os.path.exists(self.path):
# Module will make directory if not exists.
os.makedirs(self.path)
if not os.path.isdir(self.path):
self.module.fail_json(msg="Path provided %s is not a directory" % self.path)
cwd = self.path
if not os.path.isfile(os.path.join(self.path, 'package.json')):
self.module.fail_json(msg="Package.json does not exist in provided path.")
rc, out, err = self.module.run_command(cmd, check_rc=check_rc, cwd=cwd)
return out, err
return ''
def list(self):
cmd = ['list', '--depth=0', '--json']
installed = list()
missing = list()
if not os.path.isfile(os.path.join(self.path, 'yarn.lock')):
missing.append(self.name)
return installed, missing
result, error = self._exec(cmd, True, False)
if error:
self.module.fail_json(msg=error)
data = json.loads(result)
try:
dependencies = data['data']['trees']
except KeyError:
missing.append(self.name)
return installed, missing
for dep in dependencies:
name, version = dep['name'].split('@')
installed.append(name)
if self.name not in installed:
missing.append(self.name)
return installed, missing
def install(self):
if self.name_version:
# Yarn has a separate command for installing packages by name...
return self._exec(['add', self.name_version])
# And one for installing all packages in package.json
return self._exec(['install', '--non-interactive'])
def update(self):
return self._exec(['upgrade', '--latest'])
def uninstall(self):
return self._exec(['remove', self.name])
def list_outdated(self):
outdated = list()
if not os.path.isfile(os.path.join(self.path, 'yarn.lock')):
return outdated
cmd_result, err = self._exec(['outdated', '--json'], True, False)
if err:
self.module.fail_json(msg=err)
outdated_packages_data = cmd_result.splitlines()[1]
data = json.loads(outdated_packages_data)
try:
outdated_dependencies = data['data']['body']
except KeyError:
return outdated
for dep in outdated_dependencies:
# Outdated dependencies returned as a list of lists, where
# item at index 0 is the name of the dependency
outdated.append(dep[0])
return outdated
def main():
arg_spec = dict(
name=dict(default=None),
path=dict(default=None, type='path'),
version=dict(default=None),
production=dict(default='no', type='bool'),
executable=dict(default=None, type='path'),
registry=dict(default=None),
state=dict(default='present', choices=['present', 'absent', 'latest']),
ignore_scripts=dict(default=False, type='bool'),
)
arg_spec['global'] = dict(default='no', type='bool')
module = AnsibleModule(
argument_spec=arg_spec,
supports_check_mode=True
)
name = module.params['name']
path = module.params['path']
version = module.params['version']
globally = module.params['global']
production = module.params['production']
executable = module.params['executable']
registry = module.params['registry']
state = module.params['state']
ignore_scripts = module.params['ignore_scripts']
# When installing globally, users should not be able to define a path for installation.
# Require a path if global is False, though!
if path is None and globally is False:
module.fail_json(msg='Path must be specified when not using global arg')
elif path and globally is True:
module.fail_json(msg='Cannot specify path if doing global installation')
if state == 'absent' and not name:
module.fail_json(msg='Package must be explicitly named when uninstalling.')
if state == 'latest':
version = 'latest'
# When installing globally, use the defined path for global node_modules
if globally:
path = Yarn.DEFAULT_GLOBAL_INSTALLATION_PATH
yarn = Yarn(module,
name=name,
path=path,
version=version,
globally=globally,
production=production,
executable=executable,
registry=registry,
ignore_scripts=ignore_scripts)
changed = False
out = ''
err = ''
if state == 'present':
if not name:
changed = True
out, err = yarn.install()
else:
installed, missing = yarn.list()
if len(missing):
changed = True
out, err = yarn.install()
elif state == 'latest':
if not name:
changed = True
out, err = yarn.install()
else:
installed, missing = yarn.list()
outdated = yarn.list_outdated()
if len(missing):
changed = True
out, err = yarn.install()
if len(outdated):
changed = True
out, err = yarn.update()
else:
# state == absent
installed, missing = yarn.list()
if name in installed:
changed = True
out, err = yarn.uninstall()
module.exit_json(changed=changed, out=out, err=err)
if __name__ == '__main__':
main()