Skip to content

Commit

Permalink
Add option to disable streaming to stdout file (#937)
Browse files Browse the repository at this point in the history
Add option to disable streaming to stdout file

This addresses #926
My current intention is to make use of the setting (set it to True) in AWX, unconditionally. We don't want the additional disk writes this incurs. If the executor team is okay with the looks of this, I may ask @amolgautam25 to make that PR.
This use case is only concerned with the pexpect runner mode. I only messed with subprocess here for completeness. The subprocess mode also appears to somehow be connected with the introduction of this behavior. Maybe @ganeshrn has more history to share about this.

Reviewed-by: John Westcott IV <None>
Reviewed-by: Alan Rominger <arominge@redhat.com>
Reviewed-by: David Shrewsbury <None>
Reviewed-by: None <None>
  • Loading branch information
AlanCoding committed Dec 15, 2021
1 parent 7f9de03 commit cb7a971
Show file tree
Hide file tree
Showing 6 changed files with 60 additions and 22 deletions.
1 change: 1 addition & 0 deletions ansible_runner/config/_base.py
Expand Up @@ -244,6 +244,7 @@ def _prepare_env(self, runner_mode='pexpect'):
self.ssh_key_path = os.path.join(self.artifact_dir, 'ssh_key_data')
open_fifo_write(self.ssh_key_path, self.ssh_key_data)

self.suppress_output_file = self.settings.get('suppress_output_file', False)
self.suppress_ansible_output = self.settings.get('suppress_ansible_output', self.quiet)

if 'fact_cache' in self.settings:
Expand Down
3 changes: 2 additions & 1 deletion ansible_runner/config/runner.py
Expand Up @@ -65,7 +65,7 @@ class RunnerConfig(BaseConfig):
def __init__(self,
private_data_dir, playbook=None, inventory=None, roles_path=None, limit=None,
module=None, module_args=None, verbosity=None, host_pattern=None, binary=None,
extravars=None, suppress_ansible_output=False, process_isolation_path=None,
extravars=None, suppress_output_file=False, suppress_ansible_output=False, process_isolation_path=None,
process_isolation_hide_paths=None, process_isolation_show_paths=None,
process_isolation_ro_paths=None, resource_profiling=False,
resource_profiling_base_cgroup='ansible-runner', resource_profiling_cpu_poll_interval=0.25,
Expand Down Expand Up @@ -101,6 +101,7 @@ def __init__(self,

self.directory_isolation_path = directory_isolation_base_path
self.verbosity = verbosity
self.suppress_output_file = suppress_output_file
self.suppress_ansible_output = suppress_ansible_output
self.tags = tags
self.skip_tags = skip_tags
Expand Down
19 changes: 13 additions & 6 deletions ansible_runner/runner.py
Expand Up @@ -10,6 +10,7 @@
import collections
import datetime
import logging
from io import StringIO

import six
import pexpect
Expand Down Expand Up @@ -117,9 +118,7 @@ def run(self):
password_values = []

self.status_callback('starting')
stdout_filename = os.path.join(self.config.artifact_dir, 'stdout')
command_filename = os.path.join(self.config.artifact_dir, 'command')
stderr_filename = os.path.join(self.config.artifact_dir, 'stderr')

try:
os.makedirs(self.config.artifact_dir, mode=0o700)
Expand All @@ -128,7 +127,6 @@ def run(self):
pass
else:
raise
os.close(os.open(stdout_filename, os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR))

job_events_path = os.path.join(self.config.artifact_dir, 'job_events')
if not os.path.exists(job_events_path):
Expand All @@ -151,9 +149,17 @@ def run(self):
else:
suppress_ansible_output = False

stdout_handle = codecs.open(stdout_filename, 'w', encoding='utf-8')
if not self.config.suppress_output_file:
stdout_filename = os.path.join(self.config.artifact_dir, 'stdout')
stderr_filename = os.path.join(self.config.artifact_dir, 'stderr')
os.close(os.open(stdout_filename, os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR))
stdout_handle = codecs.open(stdout_filename, 'w', encoding='utf-8')
stderr_handle = codecs.open(stderr_filename, 'w', encoding='utf-8')
else:
stdout_handle = StringIO()
stderr_handle = StringIO()

stdout_handle = OutputEventFilter(stdout_handle, self.event_callback, suppress_ansible_output, output_json=self.config.json_mode)
stderr_handle = codecs.open(stderr_filename, 'w', encoding='utf-8')
stderr_handle = OutputEventFilter(stderr_handle, self.event_callback, suppress_ansible_output, output_json=self.config.json_mode)

if self.runner_mode == 'pexpect' and not isinstance(self.config.expect_passwords, collections.OrderedDict):
Expand Down Expand Up @@ -307,7 +313,8 @@ def run(self):
echo=False,
use_poll=self.config.pexpect_use_poll,
)
child.logfile_read = stdout_handle
if not self.config.suppress_output_file:
child.logfile_read = stdout_handle
except pexpect.exceptions.ExceptionPexpect as e:
child = collections.namedtuple(
'MissingProcess', 'exitstatus isalive close'
Expand Down
3 changes: 2 additions & 1 deletion docs/intro.rst
Expand Up @@ -139,7 +139,8 @@ The **settings** file is a little different than the other files provided in thi
* ``pexpect_timeout``: ``10`` Number of seconds for the internal pexpect command to wait to block on input before continuing
* ``pexpect_use_poll``: ``True`` Use ``poll()`` function for communication with child processes instead of ``select()``. ``select()`` is used when the value is set to ``False``. ``select()`` has a known limitation of using only up to 1024 file descriptors.

* ``suppress_ansible_output``: ``False`` Allow output from ansible to not be printed to the screen
* ``suppress_output_file``: ``False`` Allow output from ansible to not be streamed to the ``stdout`` or ``stderr`` files inside of the artifacts directory.
* ``suppress_ansible_output``: ``False`` Allow output from ansible to not be printed to the screen.
* ``fact_cache``: ``'fact_cache'`` The directory relative to ``artifacts`` where ``jsonfile`` fact caching will be stored. Defaults to ``fact_cache``. This is ignored if ``fact_cache_type`` is different than ``jsonfile``.
* ``fact_cache_type``: ``'jsonfile'`` The type of fact cache to use. Defaults to ``jsonfile``.

Expand Down
16 changes: 12 additions & 4 deletions test/integration/test_runner.py
Expand Up @@ -11,15 +11,20 @@

from ansible_runner.exceptions import AnsibleRunnerException

from test.utils.common import iterate_timeout


def test_password_prompt(rc):
rc.command = [sys.executable, '-c' 'import time; print(input("Password: "))']
rc.expect_passwords[re.compile(r'Password:\s*?$', re.M)] = '1234'
status, exitcode = Runner(config=rc).run()
assert status == 'successful'
assert exitcode == 0
with open(os.path.join(rc.artifact_dir, 'stdout')) as f:
assert '1234' in f.read()
# stdout file can be subject to a race condition
for _ in iterate_timeout(30.0, 'stdout file to be written with 1234 in it', interval=0.2):
with open(os.path.join(rc.artifact_dir, 'stdout')) as f:
if '1234' in f.read():
break


def test_run_command(rc):
Expand Down Expand Up @@ -270,5 +275,8 @@ def test_set_extra_vars(rc):
rc.prepare()
runner = Runner(config=rc)
status, exitcode = runner.run()
with open(os.path.join(rc.artifact_dir, 'stdout')) as f:
assert 'hello there' in f.read()
# stdout file can be subject to a race condition
for _ in iterate_timeout(30.0, 'stdout file to be written with "hello there" in it', interval=0.2):
with open(os.path.join(rc.artifact_dir, 'stdout')) as f:
if 'hello there' in f.read():
break
40 changes: 30 additions & 10 deletions test/unit/test_runner.py
Expand Up @@ -4,11 +4,14 @@
import os

import json
from pathlib import Path
import pexpect
import pytest
import six
import sys

from test.utils.common import iterate_timeout

from ansible_runner import Runner
from ansible_runner.exceptions import CallbackError, AnsibleRunnerException
from ansible_runner.config.runner import RunnerConfig
Expand All @@ -31,16 +34,6 @@ def rc(request, tmp_path):
return rc


@pytest.fixture(autouse=True)
def mock_sleep(request, mocker):
# the handle_termination process teardown mechanism uses `time.sleep` to
# wait on processes to respond to SIGTERM; these are tests and don't care
# about being nice
m = mocker.patch('time.sleep')
m.start()
request.addfinalizer(m.stop)


def test_simple_spawn(rc):
rc.command = ['ls', '-la']
status, exitcode = Runner(config=rc).run()
Expand Down Expand Up @@ -152,3 +145,30 @@ def test_status_callback_interface(rc, mocker):
assert runner.status_handler.call_count == 1
runner.status_handler.assert_called_with(dict(status='running', runner_ident=str(rc.ident)), runner_config=runner.config)
assert runner.status == 'running'


@pytest.mark.parametrize('runner_mode', ['pexpect', 'subprocess'])
def test_stdout_file_write(rc, runner_mode):
if runner_mode == 'pexpect':
pytest.skip('Writing to stdout can be flaky, probably due to some pexpect bug')
rc.command = ['echo', 'hello_world_marker']
rc.runner_mode = runner_mode
status, exitcode = Runner(config=rc).run()
assert status == 'successful'
stdout_path = Path(rc.artifact_dir) / 'stdout'
# this can be subject to a race condition so we will be patient with the check
for _ in iterate_timeout(30.0, 'stdout file to be written', interval=0.2):
if 'hello_world_marker' in stdout_path.read_text():
break


@pytest.mark.parametrize('runner_mode', ['pexpect', 'subprocess'])
def test_stdout_file_no_write(rc, runner_mode):
rc.command = ['echo', 'hello_world_marker']
rc.runner_mode = runner_mode
rc.suppress_output_file = True
status, exitcode = Runner(config=rc).run()
assert status == 'successful'
for filename in ('stdout', 'stderr'):
stdout_path = Path(rc.artifact_dir) / filename
assert not stdout_path.exists()

0 comments on commit cb7a971

Please sign in to comment.