Initial commit

This commit is contained in:
Ansible Core Team 2020-03-09 09:11:07 +00:00
commit aebc1b03fd
4861 changed files with 812621 additions and 0 deletions

0
plugins/cache/__init__.py vendored Normal file
View file

62
plugins/cache/jsonfile.py vendored Normal file
View file

@ -0,0 +1,62 @@
# (c) 2014, Brian Coca, Josh Drake, et al
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
cache: jsonfile
short_description: JSON formatted files.
description:
- This cache uses JSON formatted, per host, files saved to the filesystem.
author: Ansible Core (@ansible-core)
options:
_uri:
required: True
description:
- Path in which the cache plugin will save the JSON files
env:
- name: ANSIBLE_CACHE_PLUGIN_CONNECTION
ini:
- key: fact_caching_connection
section: defaults
_prefix:
description: User defined prefix to use when creating the JSON files
env:
- name: ANSIBLE_CACHE_PLUGIN_PREFIX
ini:
- key: fact_caching_prefix
section: defaults
_timeout:
default: 86400
description: Expiration timeout in seconds for the cache plugin data. Set to 0 to never expire
env:
- name: ANSIBLE_CACHE_PLUGIN_TIMEOUT
ini:
- key: fact_caching_timeout
section: defaults
type: integer
'''
import codecs
import json
from ansible.parsing.ajson import AnsibleJSONEncoder, AnsibleJSONDecoder
from ansible.plugins.cache import BaseFileCacheModule
class CacheModule(BaseFileCacheModule):
"""
A caching module backed by json files.
"""
def _load(self, filepath):
# Valid JSON is always UTF-8 encoded.
with codecs.open(filepath, 'r', encoding='utf-8') as f:
return json.load(f, cls=AnsibleJSONDecoder)
def _dump(self, value, filepath):
with codecs.open(filepath, 'w', encoding='utf-8') as f:
f.write(json.dumps(value, cls=AnsibleJSONEncoder, sort_keys=True, indent=4))

242
plugins/cache/memcached.py vendored Normal file
View file

@ -0,0 +1,242 @@
# (c) 2014, Brian Coca, Josh Drake, et al
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
cache: memcached
short_description: Use memcached DB for cache
description:
- This cache uses JSON formatted, per host records saved in memcached.
requirements:
- memcache (python lib)
options:
_uri:
description:
- List of connection information for the memcached DBs
default: ['127.0.0.1:11211']
type: list
env:
- name: ANSIBLE_CACHE_PLUGIN_CONNECTION
ini:
- key: fact_caching_connection
section: defaults
_prefix:
description: User defined prefix to use when creating the DB entries
default: ansible_facts
env:
- name: ANSIBLE_CACHE_PLUGIN_PREFIX
ini:
- key: fact_caching_prefix
section: defaults
_timeout:
default: 86400
description: Expiration timeout in seconds for the cache plugin data. Set to 0 to never expire
env:
- name: ANSIBLE_CACHE_PLUGIN_TIMEOUT
ini:
- key: fact_caching_timeout
section: defaults
type: integer
'''
import collections
import os
import time
from multiprocessing import Lock
from itertools import chain
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.module_utils.common._collections_compat import MutableSet
from ansible.plugins.cache import BaseCacheModule
from ansible.utils.display import Display
try:
import memcache
except ImportError:
raise AnsibleError("python-memcached is required for the memcached fact cache")
display = Display()
class ProxyClientPool(object):
"""
Memcached connection pooling for thread/fork safety. Inspired by py-redis
connection pool.
Available connections are maintained in a deque and released in a FIFO manner.
"""
def __init__(self, *args, **kwargs):
self.max_connections = kwargs.pop('max_connections', 1024)
self.connection_args = args
self.connection_kwargs = kwargs
self.reset()
def reset(self):
self.pid = os.getpid()
self._num_connections = 0
self._available_connections = collections.deque(maxlen=self.max_connections)
self._locked_connections = set()
self._lock = Lock()
def _check_safe(self):
if self.pid != os.getpid():
with self._lock:
if self.pid == os.getpid():
# bail out - another thread already acquired the lock
return
self.disconnect_all()
self.reset()
def get_connection(self):
self._check_safe()
try:
connection = self._available_connections.popleft()
except IndexError:
connection = self.create_connection()
self._locked_connections.add(connection)
return connection
def create_connection(self):
if self._num_connections >= self.max_connections:
raise RuntimeError("Too many memcached connections")
self._num_connections += 1
return memcache.Client(*self.connection_args, **self.connection_kwargs)
def release_connection(self, connection):
self._check_safe()
self._locked_connections.remove(connection)
self._available_connections.append(connection)
def disconnect_all(self):
for conn in chain(self._available_connections, self._locked_connections):
conn.disconnect_all()
def __getattr__(self, name):
def wrapped(*args, **kwargs):
return self._proxy_client(name, *args, **kwargs)
return wrapped
def _proxy_client(self, name, *args, **kwargs):
conn = self.get_connection()
try:
return getattr(conn, name)(*args, **kwargs)
finally:
self.release_connection(conn)
class CacheModuleKeys(MutableSet):
"""
A set subclass that keeps track of insertion time and persists
the set in memcached.
"""
PREFIX = 'ansible_cache_keys'
def __init__(self, cache, *args, **kwargs):
self._cache = cache
self._keyset = dict(*args, **kwargs)
def __contains__(self, key):
return key in self._keyset
def __iter__(self):
return iter(self._keyset)
def __len__(self):
return len(self._keyset)
def add(self, key):
self._keyset[key] = time.time()
self._cache.set(self.PREFIX, self._keyset)
def discard(self, key):
del self._keyset[key]
self._cache.set(self.PREFIX, self._keyset)
def remove_by_timerange(self, s_min, s_max):
for k in self._keyset.keys():
t = self._keyset[k]
if s_min < t < s_max:
del self._keyset[k]
self._cache.set(self.PREFIX, self._keyset)
class CacheModule(BaseCacheModule):
def __init__(self, *args, **kwargs):
connection = ['127.0.0.1:11211']
try:
super(CacheModule, self).__init__(*args, **kwargs)
if self.get_option('_uri'):
connection = self.get_option('_uri')
self._timeout = self.get_option('_timeout')
self._prefix = self.get_option('_prefix')
except KeyError:
display.deprecated('Rather than importing CacheModules directly, '
'use ansible.plugins.loader.cache_loader', version='2.12')
if C.CACHE_PLUGIN_CONNECTION:
connection = C.CACHE_PLUGIN_CONNECTION.split(',')
self._timeout = C.CACHE_PLUGIN_TIMEOUT
self._prefix = C.CACHE_PLUGIN_PREFIX
self._cache = {}
self._db = ProxyClientPool(connection, debug=0)
self._keys = CacheModuleKeys(self._db, self._db.get(CacheModuleKeys.PREFIX) or [])
def _make_key(self, key):
return "{0}{1}".format(self._prefix, key)
def _expire_keys(self):
if self._timeout > 0:
expiry_age = time.time() - self._timeout
self._keys.remove_by_timerange(0, expiry_age)
def get(self, key):
if key not in self._cache:
value = self._db.get(self._make_key(key))
# guard against the key not being removed from the keyset;
# this could happen in cases where the timeout value is changed
# between invocations
if value is None:
self.delete(key)
raise KeyError
self._cache[key] = value
return self._cache.get(key)
def set(self, key, value):
self._db.set(self._make_key(key), value, time=self._timeout, min_compress_len=1)
self._cache[key] = value
self._keys.add(key)
def keys(self):
self._expire_keys()
return list(iter(self._keys))
def contains(self, key):
self._expire_keys()
return key in self._keys
def delete(self, key):
del self._cache[key]
self._db.delete(self._make_key(key))
self._keys.discard(key)
def flush(self):
for key in self.keys():
self.delete(key)
def copy(self):
return self._keys.copy()
def __getstate__(self):
return dict()
def __setstate__(self, data):
self.__init__()

67
plugins/cache/pickle.py vendored Normal file
View file

@ -0,0 +1,67 @@
# (c) 2017, Brian Coca
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
cache: pickle
short_description: Pickle formatted files.
description:
- This cache uses Python's pickle serialization format, in per host files, saved to the filesystem.
author: Brian Coca (@bcoca)
options:
_uri:
required: True
description:
- Path in which the cache plugin will save the files
env:
- name: ANSIBLE_CACHE_PLUGIN_CONNECTION
ini:
- key: fact_caching_connection
section: defaults
_prefix:
description: User defined prefix to use when creating the files
env:
- name: ANSIBLE_CACHE_PLUGIN_PREFIX
ini:
- key: fact_caching_prefix
section: defaults
_timeout:
default: 86400
description: Expiration timeout in seconds for the cache plugin data. Set to 0 to never expire
env:
- name: ANSIBLE_CACHE_PLUGIN_TIMEOUT
ini:
- key: fact_caching_timeout
section: defaults
'''
try:
import cPickle as pickle
except ImportError:
import pickle
from ansible.module_utils.six import PY3
from ansible.plugins.cache import BaseFileCacheModule
class CacheModule(BaseFileCacheModule):
"""
A caching module backed by pickle files.
"""
def _load(self, filepath):
# Pickle is a binary format
with open(filepath, 'rb') as f:
if PY3:
return pickle.load(f, encoding='bytes')
else:
return pickle.load(f)
def _dump(self, value, filepath):
with open(filepath, 'wb') as f:
# Use pickle protocol 2 which is compatible with Python 2.3+.
pickle.dump(value, f, protocol=2)

155
plugins/cache/redis.py vendored Normal file
View file

@ -0,0 +1,155 @@
# (c) 2014, Brian Coca, Josh Drake, et al
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
cache: redis
short_description: Use Redis DB for cache
description:
- This cache uses JSON formatted, per host records saved in Redis.
requirements:
- redis>=2.4.5 (python lib)
options:
_uri:
description:
- A colon separated string of connection information for Redis.
required: True
env:
- name: ANSIBLE_CACHE_PLUGIN_CONNECTION
ini:
- key: fact_caching_connection
section: defaults
_prefix:
description: User defined prefix to use when creating the DB entries
default: ansible_facts
env:
- name: ANSIBLE_CACHE_PLUGIN_PREFIX
ini:
- key: fact_caching_prefix
section: defaults
_timeout:
default: 86400
description: Expiration timeout in seconds for the cache plugin data. Set to 0 to never expire
env:
- name: ANSIBLE_CACHE_PLUGIN_TIMEOUT
ini:
- key: fact_caching_timeout
section: defaults
type: integer
'''
import time
import json
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.parsing.ajson import AnsibleJSONEncoder, AnsibleJSONDecoder
from ansible.plugins.cache import BaseCacheModule
from ansible.utils.display import Display
try:
from redis import StrictRedis, VERSION
except ImportError:
raise AnsibleError("The 'redis' python module (version 2.4.5 or newer) is required for the redis fact cache, 'pip install redis'")
display = Display()
class CacheModule(BaseCacheModule):
"""
A caching module backed by redis.
Keys are maintained in a zset with their score being the timestamp
when they are inserted. This allows for the usage of 'zremrangebyscore'
to expire keys. This mechanism is used or a pattern matched 'scan' for
performance.
"""
def __init__(self, *args, **kwargs):
connection = []
try:
super(CacheModule, self).__init__(*args, **kwargs)
if self.get_option('_uri'):
connection = self.get_option('_uri').split(':')
self._timeout = float(self.get_option('_timeout'))
self._prefix = self.get_option('_prefix')
except KeyError:
display.deprecated('Rather than importing CacheModules directly, '
'use ansible.plugins.loader.cache_loader', version='2.12')
if C.CACHE_PLUGIN_CONNECTION:
connection = C.CACHE_PLUGIN_CONNECTION.split(':')
self._timeout = float(C.CACHE_PLUGIN_TIMEOUT)
self._prefix = C.CACHE_PLUGIN_PREFIX
self._cache = {}
self._db = StrictRedis(*connection)
self._keys_set = 'ansible_cache_keys'
def _make_key(self, key):
return self._prefix + key
def get(self, key):
if key not in self._cache:
value = self._db.get(self._make_key(key))
# guard against the key not being removed from the zset;
# this could happen in cases where the timeout value is changed
# between invocations
if value is None:
self.delete(key)
raise KeyError
self._cache[key] = json.loads(value, cls=AnsibleJSONDecoder)
return self._cache.get(key)
def set(self, key, value):
value2 = json.dumps(value, cls=AnsibleJSONEncoder, sort_keys=True, indent=4)
if self._timeout > 0: # a timeout of 0 is handled as meaning 'never expire'
self._db.setex(self._make_key(key), int(self._timeout), value2)
else:
self._db.set(self._make_key(key), value2)
if VERSION[0] == 2:
self._db.zadd(self._keys_set, time.time(), key)
else:
self._db.zadd(self._keys_set, {key: time.time()})
self._cache[key] = value
def _expire_keys(self):
if self._timeout > 0:
expiry_age = time.time() - self._timeout
self._db.zremrangebyscore(self._keys_set, 0, expiry_age)
def keys(self):
self._expire_keys()
return self._db.zrange(self._keys_set, 0, -1)
def contains(self, key):
self._expire_keys()
return (self._db.zrank(self._keys_set, key) is not None)
def delete(self, key):
if key in self._cache:
del self._cache[key]
self._db.delete(self._make_key(key))
self._db.zrem(self._keys_set, key)
def flush(self):
for key in self.keys():
self.delete(key)
def copy(self):
# TODO: there is probably a better way to do this in redis
ret = dict()
for key in self.keys():
ret[key] = self.get(key)
return ret
def __getstate__(self):
return dict()
def __setstate__(self, data):
self.__init__()

64
plugins/cache/yaml.py vendored Normal file
View file

@ -0,0 +1,64 @@
# (c) 2017, Brian Coca
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
cache: yaml
short_description: YAML formatted files.
description:
- This cache uses YAML formatted, per host, files saved to the filesystem.
author: Brian Coca (@bcoca)
options:
_uri:
required: True
description:
- Path in which the cache plugin will save the files
env:
- name: ANSIBLE_CACHE_PLUGIN_CONNECTION
ini:
- key: fact_caching_connection
section: defaults
_prefix:
description: User defined prefix to use when creating the files
env:
- name: ANSIBLE_CACHE_PLUGIN_PREFIX
ini:
- key: fact_caching_prefix
section: defaults
_timeout:
default: 86400
description: Expiration timeout in seconds for the cache plugin data. Set to 0 to never expire
env:
- name: ANSIBLE_CACHE_PLUGIN_TIMEOUT
ini:
- key: fact_caching_timeout
section: defaults
type: integer
'''
import codecs
import yaml
from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.parsing.yaml.dumper import AnsibleDumper
from ansible.plugins.cache import BaseFileCacheModule
class CacheModule(BaseFileCacheModule):
"""
A caching module backed by yaml files.
"""
def _load(self, filepath):
with codecs.open(filepath, 'r', encoding='utf-8') as f:
return AnsibleLoader(f).get_single_data()
def _dump(self, value, filepath):
with codecs.open(filepath, 'w', encoding='utf-8') as f:
yaml.dump(value, f, Dumper=AnsibleDumper, default_flow_style=False)