mirror of
https://github.com/ansible-collections/community.general.git
synced 2025-07-22 21:00:22 -07:00
monit: fix module detection of monitored process state (#1107)
* refactor and test
* require version >= 5.21.0
Prior to this version the status output was different
* python version compatability
* use exception classes from utils
* modify monit to use 'status' output instead of 'summary' output
The summary output is a fixed width table which truncates the
contents and prevents us from parsing the actual status of the
program.
* add integration tests + fixes
* remove unused handlers in monit integration test
* fix lint
* add '__metaclass__ = type' to integration python files
* raise AttributeError
* simplify status
* lint: add type to parameter docs
* remove lint ignore
* move monit process config into main file
* specify path to monit PID file
* set config location based on os_family
* create required directories
* update aliases to set group and skips
* add changelog
* add author
* add types to docs
* add EPEL repo
* custom vars for centos-6
* uninstall EPEL
* support older versions
* wait for status to change before exiting
* use 'validate' to force status updates
* handle 'execution failed'
* better status output for errors
* add more context to failure + standardize
* don't check rc for validate
* legacy string format support
* add integration test for 'reloaded' and 'present'
* don't wait after reload
* lint
* Revert "uninstall EPEL"
This reverts commit 4d548718d0
.
* make 'present' more robust
* Apply suggestions from code review
Co-authored-by: Andrew Klychkov <aaklychkov@mail.ru>
* add license header
* drop daemon.py and use python-daemon instead
* skip python2.6 which is not supported by python-daemon
* refactor test tasks for reuse
* cleanup files after test
* lint
* start process before enabling monit
This shouldn't be necessary but I'm adding it in the hopes
it will make tests more robust.
* retry task
* attempt to rescue the task on failure
* fix indentation
* ignore check if rescue ran
* restart monit instead of reload
Co-authored-by: Andrew Klychkov <aaklychkov@mail.ru>
This commit is contained in:
parent
6d960e9e10
commit
8de1c0c205
21 changed files with 698 additions and 127 deletions
8
tests/integration/targets/monit/aliases
Normal file
8
tests/integration/targets/monit/aliases
Normal file
|
@ -0,0 +1,8 @@
|
|||
destructive
|
||||
needs/target/setup_epel
|
||||
shippable/posix/group2
|
||||
skip/osx
|
||||
skip/macos
|
||||
skip/freebsd
|
||||
skip/aix
|
||||
skip/python2.6 # python-daemon package used in integration tests requires >=2.7
|
4
tests/integration/targets/monit/defaults/main.yml
Normal file
4
tests/integration/targets/monit/defaults/main.yml
Normal file
|
@ -0,0 +1,4 @@
|
|||
process_root: /opt/httpd_echo
|
||||
process_file: "{{ process_root }}/httpd_echo.py"
|
||||
process_venv: "{{ process_root }}/venv"
|
||||
process_run_cmd: "{{ process_venv }}/bin/python {{ process_file }}"
|
50
tests/integration/targets/monit/files/httpd_echo.py
Normal file
50
tests/integration/targets/monit/files/httpd_echo.py
Normal file
|
@ -0,0 +1,50 @@
|
|||
# (c) 2020, Simon Kelly <simongdkelly@gmail.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import daemon
|
||||
|
||||
try:
|
||||
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
|
||||
|
||||
def write_to_output(stream, content):
|
||||
stream.write(content)
|
||||
except ImportError:
|
||||
from http.server import BaseHTTPRequestHandler, HTTPServer
|
||||
|
||||
def write_to_output(stream, content):
|
||||
stream.write(bytes(content, "utf-8"))
|
||||
|
||||
|
||||
hostname = "localhost"
|
||||
server_port = 8082
|
||||
|
||||
|
||||
class EchoServer(BaseHTTPRequestHandler):
|
||||
def do_GET(self):
|
||||
self.send_response(200)
|
||||
self.send_header("Content-type", "text/plain")
|
||||
self.end_headers()
|
||||
write_to_output(self.wfile, self.path)
|
||||
|
||||
|
||||
def run_webserver():
|
||||
webServer = HTTPServer((hostname, server_port), EchoServer)
|
||||
print("Server started http://%s:%s" % (hostname, server_port))
|
||||
|
||||
try:
|
||||
webServer.serve_forever()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
webServer.server_close()
|
||||
print("Server stopped.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
context = daemon.DaemonContext()
|
||||
|
||||
with context:
|
||||
run_webserver()
|
2
tests/integration/targets/monit/meta/main.yml
Normal file
2
tests/integration/targets/monit/meta/main.yml
Normal file
|
@ -0,0 +1,2 @@
|
|||
dependencies:
|
||||
- setup_pkg_mgr
|
20
tests/integration/targets/monit/tasks/check_state.yml
Normal file
20
tests/integration/targets/monit/tasks/check_state.yml
Normal file
|
@ -0,0 +1,20 @@
|
|||
- name: "{{ reason }} ('up')"
|
||||
command: "curl -sf http://localhost:8082/hello"
|
||||
args:
|
||||
warn: false
|
||||
when: service_state == 'up'
|
||||
register: curl_result
|
||||
until: not curl_result.failed
|
||||
retries: 5
|
||||
delay: 1
|
||||
|
||||
- name: "{{ reason }} ('down')"
|
||||
command: "curl -sf http://localhost:8082/hello"
|
||||
args:
|
||||
warn: false
|
||||
register: curl_result
|
||||
failed_when: curl_result == 0
|
||||
when: service_state == 'down'
|
||||
until: not curl_result.failed
|
||||
retries: 5
|
||||
delay: 1
|
78
tests/integration/targets/monit/tasks/main.yml
Normal file
78
tests/integration/targets/monit/tasks/main.yml
Normal file
|
@ -0,0 +1,78 @@
|
|||
####################################################################
|
||||
# WARNING: These are designed specifically for Ansible tests #
|
||||
# and should not be used as examples of how to write Ansible roles #
|
||||
####################################################################
|
||||
|
||||
- block:
|
||||
- name: Install EPEL repository (RHEL only)
|
||||
include_role:
|
||||
name: setup_epel
|
||||
|
||||
- name: create required directories
|
||||
become: yes
|
||||
file:
|
||||
path: "{{ item }}"
|
||||
state: directory
|
||||
loop:
|
||||
- /var/lib/monit
|
||||
- /var/run/monit
|
||||
- "{{ process_root }}"
|
||||
|
||||
- name: install monit
|
||||
become: yes
|
||||
package:
|
||||
name: monit
|
||||
state: present
|
||||
|
||||
- include_vars: '{{ item }}'
|
||||
with_first_found:
|
||||
- files:
|
||||
- "{{ ansible_facts.distribution }}-{{ ansible_facts.distribution_major_version }}.yml"
|
||||
- '{{ ansible_os_family }}.yml'
|
||||
- 'defaults.yml'
|
||||
|
||||
- name: monit config
|
||||
become: yes
|
||||
template:
|
||||
src: "monitrc.j2"
|
||||
dest: "{{ monitrc }}"
|
||||
|
||||
- name: copy process file
|
||||
become: yes
|
||||
copy:
|
||||
src: httpd_echo.py
|
||||
dest: "{{ process_file }}"
|
||||
|
||||
- name: install dependencies
|
||||
pip:
|
||||
name: "{{ item }}"
|
||||
virtualenv: "{{ process_venv }}"
|
||||
loop:
|
||||
- setuptools==44
|
||||
- python-daemon
|
||||
|
||||
- name: restart monit
|
||||
become: yes
|
||||
service:
|
||||
name: monit
|
||||
state: restarted
|
||||
|
||||
- include_tasks: test.yml
|
||||
|
||||
always:
|
||||
- name: stop monit
|
||||
become: yes
|
||||
service:
|
||||
name: monit
|
||||
state: stopped
|
||||
|
||||
- name: uninstall monit
|
||||
become: yes
|
||||
package:
|
||||
name: monit
|
||||
state: absent
|
||||
|
||||
- name: remove process files
|
||||
file:
|
||||
path: "{{ process_root }}"
|
||||
state: absent
|
28
tests/integration/targets/monit/tasks/test.yml
Normal file
28
tests/integration/targets/monit/tasks/test.yml
Normal file
|
@ -0,0 +1,28 @@
|
|||
# order is important
|
||||
- import_tasks: test_reload_present.yml
|
||||
|
||||
- import_tasks: test_state.yml
|
||||
vars:
|
||||
state: stopped
|
||||
initial_state: up
|
||||
expected_state: down
|
||||
|
||||
- import_tasks: test_state.yml
|
||||
vars:
|
||||
state: started
|
||||
initial_state: down
|
||||
expected_state: up
|
||||
|
||||
- import_tasks: test_state.yml
|
||||
vars:
|
||||
state: unmonitored
|
||||
initial_state: up
|
||||
expected_state: down
|
||||
|
||||
- import_tasks: test_state.yml
|
||||
vars:
|
||||
state: monitored
|
||||
initial_state: down
|
||||
expected_state: up
|
||||
|
||||
- import_tasks: test_errors.yml
|
6
tests/integration/targets/monit/tasks/test_errors.yml
Normal file
6
tests/integration/targets/monit/tasks/test_errors.yml
Normal file
|
@ -0,0 +1,6 @@
|
|||
- name: Check an error occurs when wrong process name is used
|
||||
monit:
|
||||
name: missing
|
||||
state: started
|
||||
register: result
|
||||
failed_when: result is not skip and (result is success or result is not failed)
|
|
@ -0,0 +1,60 @@
|
|||
- name: reload monit when process is missing
|
||||
monit:
|
||||
name: httpd_echo
|
||||
state: reloaded
|
||||
register: result
|
||||
|
||||
- name: check that state is changed
|
||||
assert:
|
||||
that:
|
||||
- result is success
|
||||
- result is changed
|
||||
|
||||
- name: test process not present
|
||||
monit:
|
||||
name: httpd_echo
|
||||
state: present
|
||||
timeout: 5
|
||||
register: result
|
||||
failed_when: result is not skip and result is success
|
||||
|
||||
- name: test monitor missing process
|
||||
monit:
|
||||
name: httpd_echo
|
||||
state: monitored
|
||||
register: result
|
||||
failed_when: result is not skip and result is success
|
||||
|
||||
- name: start process
|
||||
shell: "{{ process_run_cmd }}"
|
||||
|
||||
- import_tasks: check_state.yml
|
||||
vars:
|
||||
reason: verify service running
|
||||
service_state: "up"
|
||||
|
||||
- name: add process config
|
||||
blockinfile:
|
||||
path: "{{ monitrc }}"
|
||||
block: |
|
||||
check process httpd_echo with matching "httpd_echo"
|
||||
start program = "{{ process_run_cmd }}"
|
||||
stop program = "/bin/sh -c 'kill `pgrep -f httpd_echo`'"
|
||||
if failed host localhost port 8082 then restart
|
||||
|
||||
- name: restart monit
|
||||
service:
|
||||
name: monit
|
||||
state: restarted
|
||||
|
||||
- name: test process present again
|
||||
monit:
|
||||
name: httpd_echo
|
||||
state: present
|
||||
register: result
|
||||
|
||||
- name: check that state is unchanged
|
||||
assert:
|
||||
that:
|
||||
- result is success
|
||||
- result is not changed
|
33
tests/integration/targets/monit/tasks/test_state.yml
Normal file
33
tests/integration/targets/monit/tasks/test_state.yml
Normal file
|
@ -0,0 +1,33 @@
|
|||
- import_tasks: check_state.yml
|
||||
vars:
|
||||
reason: verify initial service state
|
||||
service_state: "{{ initial_state }}"
|
||||
|
||||
- name: change httpd_echo process state to {{ state }}
|
||||
monit:
|
||||
name: httpd_echo
|
||||
state: "{{ state }}"
|
||||
register: result
|
||||
|
||||
- name: check that state changed
|
||||
assert:
|
||||
that:
|
||||
- result is success
|
||||
- result is changed
|
||||
|
||||
- import_tasks: check_state.yml
|
||||
vars:
|
||||
reason: check service state after action
|
||||
service_state: "{{ expected_state }}"
|
||||
|
||||
- name: try change state again to {{ state }}
|
||||
monit:
|
||||
name: httpd_echo
|
||||
state: "{{ state }}"
|
||||
register: result
|
||||
|
||||
- name: check that state is not changed
|
||||
assert:
|
||||
that:
|
||||
- result is success
|
||||
- result is not changed
|
13
tests/integration/targets/monit/templates/monitrc.j2
Normal file
13
tests/integration/targets/monit/templates/monitrc.j2
Normal file
|
@ -0,0 +1,13 @@
|
|||
set daemon 2
|
||||
set logfile /var/log/monit.log
|
||||
set idfile /var/lib/monit/id
|
||||
set statefile /var/lib/monit/state
|
||||
set pidfile /var/run/monit.pid
|
||||
|
||||
set eventqueue
|
||||
basedir /var/lib/monit/events
|
||||
slots 100
|
||||
|
||||
set httpd port 2812 and
|
||||
use address localhost
|
||||
allow localhost
|
1
tests/integration/targets/monit/vars/CentOS-6.yml
Normal file
1
tests/integration/targets/monit/vars/CentOS-6.yml
Normal file
|
@ -0,0 +1 @@
|
|||
monitrc: "/etc/monit.conf"
|
1
tests/integration/targets/monit/vars/RedHat.yml
Normal file
1
tests/integration/targets/monit/vars/RedHat.yml
Normal file
|
@ -0,0 +1 @@
|
|||
monitrc: "/etc/monitrc"
|
1
tests/integration/targets/monit/vars/Suse.yml
Normal file
1
tests/integration/targets/monit/vars/Suse.yml
Normal file
|
@ -0,0 +1 @@
|
|||
monitrc: "/etc/monitrc"
|
1
tests/integration/targets/monit/vars/defaults.yml
Normal file
1
tests/integration/targets/monit/vars/defaults.yml
Normal file
|
@ -0,0 +1 @@
|
|||
monitrc: "/etc/monit/monitrc"
|
|
@ -675,8 +675,6 @@ plugins/modules/monitoring/logentries.py validate-modules:undocumented-parameter
|
|||
plugins/modules/monitoring/logstash_plugin.py validate-modules:doc-missing-type
|
||||
plugins/modules/monitoring/logstash_plugin.py validate-modules:invalid-ansiblemodule-schema
|
||||
plugins/modules/monitoring/logstash_plugin.py validate-modules:parameter-type-not-in-doc
|
||||
plugins/modules/monitoring/monit.py validate-modules:doc-missing-type
|
||||
plugins/modules/monitoring/monit.py validate-modules:parameter-type-not-in-doc
|
||||
plugins/modules/monitoring/newrelic_deployment.py validate-modules:doc-missing-type
|
||||
plugins/modules/monitoring/pagerduty.py validate-modules:doc-default-does-not-match-spec
|
||||
plugins/modules/monitoring/pagerduty.py validate-modules:doc-missing-type
|
||||
|
|
|
@ -675,8 +675,6 @@ plugins/modules/monitoring/logentries.py validate-modules:undocumented-parameter
|
|||
plugins/modules/monitoring/logstash_plugin.py validate-modules:doc-missing-type
|
||||
plugins/modules/monitoring/logstash_plugin.py validate-modules:invalid-ansiblemodule-schema
|
||||
plugins/modules/monitoring/logstash_plugin.py validate-modules:parameter-type-not-in-doc
|
||||
plugins/modules/monitoring/monit.py validate-modules:doc-missing-type
|
||||
plugins/modules/monitoring/monit.py validate-modules:parameter-type-not-in-doc
|
||||
plugins/modules/monitoring/newrelic_deployment.py validate-modules:doc-missing-type
|
||||
plugins/modules/monitoring/pagerduty.py validate-modules:doc-default-does-not-match-spec
|
||||
plugins/modules/monitoring/pagerduty.py validate-modules:doc-missing-type
|
||||
|
|
|
@ -543,8 +543,6 @@ plugins/modules/monitoring/logentries.py validate-modules:parameter-type-not-in-
|
|||
plugins/modules/monitoring/logentries.py validate-modules:undocumented-parameter
|
||||
plugins/modules/monitoring/logstash_plugin.py validate-modules:doc-missing-type
|
||||
plugins/modules/monitoring/logstash_plugin.py validate-modules:parameter-type-not-in-doc
|
||||
plugins/modules/monitoring/monit.py validate-modules:doc-missing-type
|
||||
plugins/modules/monitoring/monit.py validate-modules:parameter-type-not-in-doc
|
||||
plugins/modules/monitoring/newrelic_deployment.py validate-modules:doc-missing-type
|
||||
plugins/modules/monitoring/pagerduty.py validate-modules:doc-default-does-not-match-spec
|
||||
plugins/modules/monitoring/pagerduty.py validate-modules:doc-missing-type
|
||||
|
|
140
tests/unit/plugins/modules/monitoring/test_monit.py
Normal file
140
tests/unit/plugins/modules/monitoring/test_monit.py
Normal file
|
@ -0,0 +1,140 @@
|
|||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import mock
|
||||
import pytest
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat import unittest
|
||||
from ansible_collections.community.general.plugins.modules.monitoring import monit
|
||||
from ansible_collections.community.general.tests.unit.plugins.modules.utils import AnsibleExitJson, AnsibleFailJson
|
||||
|
||||
|
||||
TEST_OUTPUT = """
|
||||
Process '%s'
|
||||
status %s
|
||||
monitoring status Not monitored
|
||||
monitoring mode active
|
||||
"""
|
||||
|
||||
|
||||
class MonitTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.module = mock.MagicMock()
|
||||
self.module.exit_json.side_effect = AnsibleExitJson
|
||||
self.module.fail_json.side_effect = AnsibleFailJson
|
||||
self.monit = monit.Monit(self.module, 'monit', 'processX', 1)
|
||||
self.monit._status_change_retry_count = 1
|
||||
mock_sleep = mock.patch('time.sleep')
|
||||
mock_sleep.start()
|
||||
self.addCleanup(mock_sleep.stop)
|
||||
|
||||
def patch_status(self, side_effect):
|
||||
if not isinstance(side_effect, list):
|
||||
side_effect = [side_effect]
|
||||
return mock.patch.object(self.monit, 'get_status', side_effect=side_effect)
|
||||
|
||||
def test_change_state_success(self):
|
||||
with self.patch_status([monit.Status.OK, monit.Status.NOT_MONITORED]):
|
||||
with self.assertRaises(AnsibleExitJson):
|
||||
self.monit.stop()
|
||||
self.module.fail_json.assert_not_called()
|
||||
self.module.run_command.assert_called_with('monit stop processX', check_rc=True)
|
||||
|
||||
def test_change_state_fail(self):
|
||||
with self.patch_status([monit.Status.OK] * 3):
|
||||
with self.assertRaises(AnsibleFailJson):
|
||||
self.monit.stop()
|
||||
|
||||
def test_reload_fail(self):
|
||||
self.module.run_command.return_value = (1, 'stdout', 'stderr')
|
||||
with self.assertRaises(AnsibleFailJson):
|
||||
self.monit.reload()
|
||||
|
||||
def test_reload(self):
|
||||
self.module.run_command.return_value = (0, '', '')
|
||||
with self.patch_status(monit.Status.OK):
|
||||
with self.assertRaises(AnsibleExitJson):
|
||||
self.monit.reload()
|
||||
|
||||
def test_wait_for_status_to_stop_pending(self):
|
||||
status = [
|
||||
monit.Status.MISSING,
|
||||
monit.Status.DOES_NOT_EXIST,
|
||||
monit.Status.INITIALIZING,
|
||||
monit.Status.OK.pending(),
|
||||
monit.Status.OK
|
||||
]
|
||||
with self.patch_status(status) as get_status:
|
||||
self.monit.wait_for_monit_to_stop_pending()
|
||||
self.assertEqual(get_status.call_count, len(status))
|
||||
|
||||
def test_wait_for_status_change(self):
|
||||
with self.patch_status([monit.Status.NOT_MONITORED, monit.Status.OK]) as get_status:
|
||||
self.monit.wait_for_status_change(monit.Status.NOT_MONITORED)
|
||||
self.assertEqual(get_status.call_count, 2)
|
||||
|
||||
def test_wait_for_status_change_fail(self):
|
||||
with self.patch_status([monit.Status.OK] * 3):
|
||||
with self.assertRaises(AnsibleFailJson):
|
||||
self.monit.wait_for_status_change(monit.Status.OK)
|
||||
|
||||
def test_monitor(self):
|
||||
with self.patch_status([monit.Status.NOT_MONITORED, monit.Status.OK.pending(), monit.Status.OK]):
|
||||
with self.assertRaises(AnsibleExitJson):
|
||||
self.monit.monitor()
|
||||
|
||||
def test_monitor_fail(self):
|
||||
with self.patch_status([monit.Status.NOT_MONITORED] * 3):
|
||||
with self.assertRaises(AnsibleFailJson):
|
||||
self.monit.monitor()
|
||||
|
||||
def test_timeout(self):
|
||||
self.monit.timeout = 0
|
||||
with self.patch_status(monit.Status.NOT_MONITORED.pending()):
|
||||
with self.assertRaises(AnsibleFailJson):
|
||||
self.monit.wait_for_monit_to_stop_pending()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('status_name', [name for name in monit.StatusValue.ALL_STATUS])
|
||||
def test_status_value(status_name):
|
||||
value = getattr(monit.StatusValue, status_name.upper())
|
||||
status = monit.StatusValue(value)
|
||||
assert getattr(status, 'is_%s' % status_name)
|
||||
assert not all(getattr(status, 'is_%s' % name) for name in monit.StatusValue.ALL_STATUS if name != status_name)
|
||||
|
||||
|
||||
BASIC_OUTPUT_CASES = [
|
||||
(TEST_OUTPUT % ('processX', name), getattr(monit.Status, name.upper()))
|
||||
for name in monit.StatusValue.ALL_STATUS
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize('output, expected', BASIC_OUTPUT_CASES + [
|
||||
('', monit.Status.MISSING),
|
||||
(TEST_OUTPUT % ('processY', 'OK'), monit.Status.MISSING),
|
||||
(TEST_OUTPUT % ('processX', 'Not Monitored - start pending'), monit.Status.OK),
|
||||
(TEST_OUTPUT % ('processX', 'Monitored - stop pending'), monit.Status.NOT_MONITORED),
|
||||
(TEST_OUTPUT % ('processX', 'Monitored - restart pending'), monit.Status.OK),
|
||||
(TEST_OUTPUT % ('processX', 'Not Monitored - monitor pending'), monit.Status.OK),
|
||||
(TEST_OUTPUT % ('processX', 'Does not exist'), monit.Status.DOES_NOT_EXIST),
|
||||
(TEST_OUTPUT % ('processX', 'Not monitored'), monit.Status.NOT_MONITORED),
|
||||
(TEST_OUTPUT % ('processX', 'Running'), monit.Status.OK),
|
||||
(TEST_OUTPUT % ('processX', 'Execution failed | Does not exist'), monit.Status.EXECUTION_FAILED),
|
||||
])
|
||||
def test_parse_status(output, expected):
|
||||
status = monit.Monit(None, '', 'processX', 0)._parse_status(output, '')
|
||||
assert status == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize('output, expected', [
|
||||
('This is monit version 5.18.1', '5.18.1'),
|
||||
('This is monit version 12.18', '12.18'),
|
||||
('This is monit version 5.1.12', '5.1.12'),
|
||||
])
|
||||
def test_parse_version(output, expected):
|
||||
module = mock.MagicMock()
|
||||
module.run_command.return_value = (0, output, '')
|
||||
raw_version, version_tuple = monit.Monit(module, '', 'processX', 0)._get_monit_version()
|
||||
assert raw_version == expected
|
Loading…
Add table
Add a link
Reference in a new issue