mirror of
https://github.com/ansible-collections/community.general.git
synced 2025-04-24 03:11:24 -07:00
Fix the mapping of module_name to Locks
This was reinitialized every time we forked before so we weren't sharing the same Locks. It also was not accounting for modules which were directly invoked by an action plugin instead of going through the strategy plguins.
This commit is contained in:
parent
1d6608e84f
commit
98feafb411
2 changed files with 55 additions and 15 deletions
|
@ -34,7 +34,10 @@ from ansible.release import __version__, __author__
|
|||
from ansible import constants as C
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.utils.unicode import to_bytes, to_unicode
|
||||
from ansible.plugins.strategy import action_write_locks
|
||||
# Must import strategy and use write_locks from there
|
||||
# If we import write_locks directly then we end up binding a
|
||||
# variable to the object and then it never gets updated.
|
||||
from ansible.plugins import strategy
|
||||
|
||||
try:
|
||||
from __main__ import display
|
||||
|
@ -552,14 +555,29 @@ def _find_snippet_imports(module_name, module_data, module_path, module_args, ta
|
|||
zipdata = None
|
||||
# Optimization -- don't lock if the module has already been cached
|
||||
if os.path.exists(cached_module_filename):
|
||||
display.debug('ZIPLOADER: using cached module: %s' % cached_module_filename)
|
||||
zipdata = open(cached_module_filename, 'rb').read()
|
||||
# Fool the check later... I think we should just remove the check
|
||||
py_module_names.add(('basic',))
|
||||
else:
|
||||
with action_write_locks[module_name]:
|
||||
if module_name in strategy.action_write_locks:
|
||||
display.debug('ZIPLOADER: Using lock for %s' % module_name)
|
||||
lock = strategy.action_write_locks[module_name]
|
||||
else:
|
||||
# If the action plugin directly invokes the module (instead of
|
||||
# going through a strategy) then we don't have a cross-process
|
||||
# Lock specifically for this module. Use the "unexpected
|
||||
# module" lock instead
|
||||
display.debug('ZIPLOADER: Using generic lock for %s' % module_name)
|
||||
lock = strategy.action_write_locks[None]
|
||||
|
||||
display.debug('ZIPLOADER: Acquiring lock')
|
||||
with lock:
|
||||
display.debug('ZIPLOADER: Lock acquired: %s' % id(lock))
|
||||
# Check that no other process has created this while we were
|
||||
# waiting for the lock
|
||||
if not os.path.exists(cached_module_filename):
|
||||
display.debug('ZIPLOADER: Creating module')
|
||||
# Create the module zip data
|
||||
zipoutput = BytesIO()
|
||||
zf = zipfile.ZipFile(zipoutput, mode='w', compression=compression_method)
|
||||
|
@ -580,15 +598,19 @@ def _find_snippet_imports(module_name, module_data, module_path, module_args, ta
|
|||
# Note -- if we have a global function to setup, that would
|
||||
# be a better place to run this
|
||||
os.mkdir(lookup_path)
|
||||
display.debug('ZIPLOADER: Writing module')
|
||||
with open(cached_module_filename + '-part', 'w') as f:
|
||||
f.write(zipdata)
|
||||
|
||||
# Rename the file into its final position in the cache so
|
||||
# future users of this module can read it off the
|
||||
# filesystem instead of constructing from scratch.
|
||||
display.debug('ZIPLOADER: Renaming module')
|
||||
os.rename(cached_module_filename + '-part', cached_module_filename)
|
||||
display.debug('ZIPLOADER: Done creating module')
|
||||
|
||||
if zipdata is None:
|
||||
display.debug('ZIPLOADER: Reading module after lock')
|
||||
# Another process wrote the file while we were waiting for
|
||||
# the write lock. Go ahead and read the data from disk
|
||||
# instead of re-creating it.
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue