mirror of
https://github.com/ansible-collections/community.general.git
synced 2025-04-24 11:21:25 -07:00
added pickle and yaml cache plugins
added new base class for file based cache plugins as 99% of code was common now also catches unexpected decoding exceptions allows per module file modes and encoding moved jsonfile code to base
This commit is contained in:
parent
3812c76168
commit
374af06cbf
6 changed files with 250 additions and 149 deletions
154
lib/ansible/plugins/cache/jsonfile.py
vendored
154
lib/ansible/plugins/cache/jsonfile.py
vendored
|
@ -19,162 +19,22 @@
|
|||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
import time
|
||||
import errno
|
||||
import codecs
|
||||
|
||||
try:
|
||||
import simplejson as json
|
||||
except ImportError:
|
||||
import json
|
||||
|
||||
from ansible import constants as C
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.module_utils._text import to_bytes
|
||||
from ansible.parsing.utils.jsonify import jsonify
|
||||
from ansible.plugins.cache.base import BaseCacheModule
|
||||
from ansible.plugins.cache.base import BaseFileCacheModule
|
||||
|
||||
try:
|
||||
from __main__ import display
|
||||
except ImportError:
|
||||
from ansible.utils.display import Display
|
||||
display = Display()
|
||||
|
||||
|
||||
class CacheModule(BaseCacheModule):
|
||||
class CacheModule(BaseFileCacheModule):
|
||||
"""
|
||||
A caching module backed by json files.
|
||||
"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
plugin_name = 'jsonfile'
|
||||
|
||||
self._timeout = float(C.CACHE_PLUGIN_TIMEOUT)
|
||||
self._cache = {}
|
||||
self._cache_dir = None
|
||||
def _load(self, f):
|
||||
return json.load(f)
|
||||
|
||||
if C.CACHE_PLUGIN_CONNECTION:
|
||||
# expects a dir path
|
||||
self._cache_dir = os.path.expanduser(os.path.expandvars(C.CACHE_PLUGIN_CONNECTION))
|
||||
|
||||
if not self._cache_dir:
|
||||
raise AnsibleError("error, 'jsonfile' cache plugin requires the 'fact_caching_connection' config option to be set (to a writeable directory path)")
|
||||
|
||||
if not os.path.exists(self._cache_dir):
|
||||
try:
|
||||
os.makedirs(self._cache_dir)
|
||||
except (OSError,IOError) as e:
|
||||
display.warning("error in 'jsonfile' cache plugin while trying to create cache dir %s : %s" % (self._cache_dir, to_bytes(e)))
|
||||
return None
|
||||
|
||||
def get(self, key):
|
||||
""" This checks the in memory cache first as the fact was not expired at 'gather time'
|
||||
and it would be problematic if the key did expire after some long running tasks and
|
||||
user gets 'undefined' error in the same play """
|
||||
|
||||
if key in self._cache:
|
||||
return self._cache.get(key)
|
||||
|
||||
if self.has_expired(key) or key == "":
|
||||
raise KeyError
|
||||
|
||||
cachefile = "%s/%s" % (self._cache_dir, key)
|
||||
try:
|
||||
with codecs.open(cachefile, 'r', encoding='utf-8') as f:
|
||||
try:
|
||||
value = json.load(f)
|
||||
self._cache[key] = value
|
||||
return value
|
||||
except ValueError as e:
|
||||
display.warning("error in 'jsonfile' cache plugin while trying to read %s : %s. Most likely a corrupt file, so erasing and failing." % (cachefile, to_bytes(e)))
|
||||
self.delete(key)
|
||||
raise AnsibleError("The JSON cache file %s was corrupt, or did not otherwise contain valid JSON data."
|
||||
" It has been removed, so you can re-run your command now." % cachefile)
|
||||
except (OSError,IOError) as e:
|
||||
display.warning("error in 'jsonfile' cache plugin while trying to read %s : %s" % (cachefile, to_bytes(e)))
|
||||
raise KeyError
|
||||
|
||||
def set(self, key, value):
|
||||
|
||||
self._cache[key] = value
|
||||
|
||||
cachefile = "%s/%s" % (self._cache_dir, key)
|
||||
try:
|
||||
f = codecs.open(cachefile, 'w', encoding='utf-8')
|
||||
except (OSError,IOError) as e:
|
||||
display.warning("error in 'jsonfile' cache plugin while trying to write to %s : %s" % (cachefile, to_bytes(e)))
|
||||
pass
|
||||
else:
|
||||
f.write(jsonify(value, format=True))
|
||||
finally:
|
||||
try:
|
||||
f.close()
|
||||
except UnboundLocalError:
|
||||
pass
|
||||
|
||||
def has_expired(self, key):
|
||||
|
||||
if self._timeout == 0:
|
||||
return False
|
||||
|
||||
cachefile = "%s/%s" % (self._cache_dir, key)
|
||||
try:
|
||||
st = os.stat(cachefile)
|
||||
except (OSError,IOError) as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
return False
|
||||
else:
|
||||
display.warning("error in 'jsonfile' cache plugin while trying to stat %s : %s" % (cachefile, to_bytes(e)))
|
||||
pass
|
||||
|
||||
if time.time() - st.st_mtime <= self._timeout:
|
||||
return False
|
||||
|
||||
if key in self._cache:
|
||||
del self._cache[key]
|
||||
return True
|
||||
|
||||
def keys(self):
|
||||
keys = []
|
||||
for k in os.listdir(self._cache_dir):
|
||||
if not (k.startswith('.') or self.has_expired(k)):
|
||||
keys.append(k)
|
||||
return keys
|
||||
|
||||
def contains(self, key):
|
||||
cachefile = "%s/%s" % (self._cache_dir, key)
|
||||
|
||||
if key in self._cache:
|
||||
return True
|
||||
|
||||
if self.has_expired(key):
|
||||
return False
|
||||
try:
|
||||
os.stat(cachefile)
|
||||
return True
|
||||
except (OSError,IOError) as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
return False
|
||||
else:
|
||||
display.warning("error in 'jsonfile' cache plugin while trying to stat %s : %s" % (cachefile, to_bytes(e)))
|
||||
pass
|
||||
|
||||
def delete(self, key):
|
||||
try:
|
||||
del self._cache[key]
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
os.remove("%s/%s" % (self._cache_dir, key))
|
||||
except (OSError, IOError):
|
||||
pass #TODO: only pass on non existing?
|
||||
|
||||
def flush(self):
|
||||
self._cache = {}
|
||||
for key in self.keys():
|
||||
self.delete(key)
|
||||
|
||||
def copy(self):
|
||||
ret = dict()
|
||||
for key in self.keys():
|
||||
ret[key] = self.get(key)
|
||||
return ret
|
||||
def _dump(self, value, f):
|
||||
f.write(jsonify(value, format=True))
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue