Skip to content

Commit

Permalink
nrunner: enable automatic status server
Browse files Browse the repository at this point in the history
Users often complain about jobs with no test results.  There are of
course many possible causes, but a common one is a clash and failure
of communication with the status server.

Because it's currently dependent on a fixed TCP port, clashes are
quite easy, especially when running multiple jobs at once (or nested).

Also, a good number of tests have to go through the trouble of setting
up custom status server to avoid clashes.  This removes those custom
and simply relies on the now automatic status servers.

This enables the automatic status server, based on a much more private
UNIX domain socket, so that clashes become virtually impossible.

Signed-off-by: Cleber Rosa <[email protected]>
  • Loading branch information
clebergnu committed Aug 19, 2021
1 parent 2bd885c commit c470b8c
Show file tree
Hide file tree
Showing 9 changed files with 20 additions and 103 deletions.
8 changes: 4 additions & 4 deletions avocado/plugins/runner_nrunner.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,11 +55,11 @@ def initialize(self):

help_msg = ('If the status server should automatically choose '
'a "status_server_listen" and "status_server_uri" '
'configuration. Default is not to auto configure a '
'configuration. Default is to auto configure a '
'status server.')
settings.register_option(section=section,
key='status_server_auto',
default=False,
default=True,
key_type=bool,
help_msg=help_msg)

Expand Down Expand Up @@ -135,8 +135,8 @@ def configure(self, parser):
settings.add_argparser_to_option(
namespace='nrunner.status_server_auto',
parser=parser,
long_arg='--nrunner-status-server-auto',
action='store_true')
long_arg='--nrunner-status-server-disable-auto',
action='store_false')

settings.add_argparser_to_option(
namespace='nrunner.status_server_listen',
Expand Down
5 changes: 0 additions & 5 deletions examples/jobs/nrunner.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,9 @@

from avocado.core.job import Job
from avocado.core.suite import TestSuite
from avocado.utils.network.ports import find_free_port

status_server = '127.0.0.1:%u' % find_free_port()

config = {
'run.test_runner': 'nrunner',
'nrunner.status_server_listen': status_server,
'nrunner.status_server_uri': status_server,
'run.references': [
'selftests/unit/plugin/test_resolver.py',
'selftests/functional/test_argument_parsing.py',
Expand Down
4 changes: 0 additions & 4 deletions selftests/check.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
from avocado.core import exit_codes
from avocado.core.job import Job
from avocado.core.suite import TestSuite
from avocado.utils.network.ports import find_free_port

BOOLEAN_ENABLED = [True, 'true', 'on', 1]
BOOLEAN_DISABLED = [False, 'false', 'off', 0]
Expand Down Expand Up @@ -542,12 +541,9 @@ def create_suites(args): # pylint: disable=W0621
if args.functional:
selftests.append('selftests/functional/')

status_server = '127.0.0.1:%u' % find_free_port()
config_check = {
'run.references': selftests,
'run.test_runner': 'nrunner',
'nrunner.status_server_listen': status_server,
'nrunner.status_server_uri': status_server,
'run.ignore_missing_references': True,
'job.output.testlogs.statuses': ['FAIL']
}
Expand Down
25 changes: 5 additions & 20 deletions selftests/functional/plugin/test_jsonresult.py
Original file line number Diff line number Diff line change
@@ -1,28 +1,17 @@
import json
from os import path

from avocado.utils import process, script
from avocado.utils.network import find_free_port
from avocado.utils import process
from selftests.utils import AVOCADO, TestCaseTmpDir


class JsonResultTest(TestCaseTmpDir):

def setUp(self):
super(JsonResultTest, self).setUp()
status_server = '127.0.0.1:%u' % find_free_port()
self.config_file = script.TemporaryScript(
'avocado.conf',
("[nrunner]\n"
"status_server_listen = %s\n"
"status_server_uri = %s\n") % (status_server, status_server))
self.config_file.save()

def test_logfile(self):
cmd_line = ('%s --config %s run --test-runner=nrunner '
cmd_line = ('%s run --test-runner=nrunner '
'examples/tests/failtest.py '
'--job-results-dir %s --disable-sysinfo ' %
(AVOCADO, self.config_file.path, self.tmpdir.name))
(AVOCADO, self.tmpdir.name))
process.run(cmd_line, ignore_status=True)
json_path = path.join(self.tmpdir.name, 'latest', 'results.json')

Expand All @@ -33,18 +22,14 @@ def test_logfile(self):
self.assertEqual(expected_logfile, test_data['logfile'])

def test_fail_reason(self):
cmd_line = ('%s --config %s run --test-runner=nrunner '
cmd_line = ('%s run --test-runner=nrunner '
'examples/tests/failtest.py '
'--job-results-dir %s --disable-sysinfo ' %
(AVOCADO, self.config_file.path, self.tmpdir.name))
(AVOCADO, self.tmpdir.name))
process.run(cmd_line, ignore_status=True)
json_path = path.join(self.tmpdir.name, 'latest', 'results.json')
with open(json_path, 'r') as json_file:
data = json.load(json_file)
test_data = data['tests'].pop()
self.assertEqual('This test is supposed to fail',
test_data['fail_reason'])

def tearDown(self):
super(JsonResultTest, self).tearDown()
self.config_file.remove()
19 changes: 2 additions & 17 deletions selftests/functional/plugin/test_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

from avocado.core import exit_codes
from avocado.utils import process, script
from avocado.utils.network import find_free_port
from selftests.utils import AVOCADO, BASEDIR, TestCaseTmpDir

CONFIG = """[job.output.testlogs]
Expand Down Expand Up @@ -65,27 +64,13 @@ def tearDown(self):

class TestLogging(TestCaseTmpDir):

def setUp(self):
super(TestLogging, self).setUp()
status_server = '127.0.0.1:%u' % find_free_port()
self.config_file = script.TemporaryScript(
'avocado.conf',
("[nrunner]\n"
"status_server_listen = %s\n"
"status_server_uri = %s\n") % (status_server, status_server))
self.config_file.save()

def test_job_log(self):
pass_test = os.path.join(BASEDIR, 'examples', 'tests', 'passtest.py')
cmd_line = ('%s --config %s run --job-results-dir %s --test-runner=nrunner %s' %
(AVOCADO, self.config_file.path, self.tmpdir.name, pass_test))
cmd_line = ('%s run --job-results-dir %s --test-runner=nrunner %s' %
(AVOCADO, self.tmpdir.name, pass_test))
process.run(cmd_line)
log_file = os.path.join(self.tmpdir.name, 'latest', 'job.log')
with open(log_file, 'r') as fp:
log = fp.read()
self.assertIn('passtest.py:PassTest.test: STARTED', log)
self.assertIn('passtest.py:PassTest.test: PASS', log)

def tearDown(self):
super(TestLogging, self).tearDown()
self.config_file.remove()
18 changes: 1 addition & 17 deletions selftests/functional/test_basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
from avocado.utils import astring, genio
from avocado.utils import path as utils_path
from avocado.utils import process, script
from avocado.utils.network import find_free_port
from selftests.utils import (AVOCADO, BASEDIR, TestCaseTmpDir,
python_module_available, skipOnLevelsInferiorThan,
skipUnlessPathExists, temp_dir_prefix)
Expand Down Expand Up @@ -588,24 +587,13 @@ def test_store_logging_stream(self):

class DryRunTest(TestCaseTmpDir):

def setUp(self):
super(DryRunTest, self).setUp()
status_server = '127.0.0.1:%u' % find_free_port()
self.config_file = script.TemporaryScript(
'avocado.conf',
("[nrunner]\n"
"status_server_listen = %s\n"
"status_server_uri = %s\n") % (status_server, status_server))
self.config_file.save()

def test_dry_run(self):
examples_path = os.path.join('examples', 'tests')
passtest = os.path.join(examples_path, 'passtest.py')
failtest = os.path.join(examples_path, 'failtest.py')
gendata = os.path.join(examples_path, 'gendata.py')
cmd = ("%s --config %s run --test-runner=nrunner --disable-sysinfo --dry-run "
cmd = ("%s run --test-runner=nrunner --disable-sysinfo --dry-run "
"--dry-run-no-cleanup --json - -- %s %s %s " % (AVOCADO,
self.config_file.path,
passtest,
failtest,
gendata))
Expand All @@ -618,10 +606,6 @@ def test_dry_run(self):
self.assertEqual(test['fail_reason'],
u'Test cancelled due to --dry-run')

def tearDown(self):
super(DryRunTest, self).tearDown()
self.config_file.remove()


class RunnerHumanOutputTest(TestCaseTmpDir):

Expand Down
7 changes: 0 additions & 7 deletions selftests/functional/test_nrunner.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@

from avocado.core.job import Job
from avocado.utils import process
from avocado.utils.network.ports import find_free_port
from selftests.utils import (AVOCADO, BASEDIR, TestCaseTmpDir,
skipUnlessPathExists)

Expand All @@ -14,29 +13,23 @@
class NRunnerFeatures(unittest.TestCase):
@skipUnlessPathExists('/bin/false')
def test_custom_exit_codes(self):
status_server = "127.0.0.1:%u" % find_free_port()
config = {'run.references': ['/bin/false'],
'run.test_runner': 'nrunner',
'runner.exectest.exitcodes.skip': [1],
'nrunner.status_server_listen': status_server,
'nrunner.status_server_uri': status_server,
'run.keep_tmp': True}
with Job.from_config(job_config=config) as job:
self.assertEqual(job.run(), 0)

@skipUnlessPathExists('/bin/false')
@skipUnlessPathExists('/bin/true')
def test_failfast(self):
status_server = "127.0.0.1:%u" % find_free_port()
config = {'run.references': ['/bin/true',
'/bin/false',
'/bin/true',
'/bin/true'],
'run.test_runner': 'nrunner',
'run.failfast': True,
'nrunner.shuffle': False,
'nrunner.status_server_listen': status_server,
'nrunner.status_server_uri': status_server,
'nrunner.max_parallel_tasks': 1}
with Job.from_config(job_config=config) as job:
self.assertEqual(job.run(), 9)
Expand Down
33 changes: 8 additions & 25 deletions selftests/functional/test_output.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,38 +168,21 @@ def test3(self):
from avocado.core.task.runtime import RuntimeTask
from avocado.core.test_id import TestID
from avocado.plugins import runner_nrunner
from avocado.utils.network.ports import find_free_port
class RunnerNRunnerWithFixedTasks(runner_nrunner.Runner):
@staticmethod
def _get_all_runtime_tasks(test_suite, job_id):
runtime_tasks = []
no_digits = len(str(len(test_suite)))
status_uris = [test_suite.config.get('nrunner.status_server_uri')]
for index, runnable in enumerate(test_suite.tests, start=1):
prefix = index
test_id = TestID(prefix, runnable.uri, None, no_digits)
if '/bin/true' in runnable.uri:
task = nrunner.Task(
runnable, test_id, status_uris,
nrunner.RUNNERS_REGISTRY_PYTHON_CLASS,
job_id=job_id)
else:
task = nrunner.Task(
runnable, test_id, status_uris,
nrunner.RUNNERS_REGISTRY_PYTHON_CLASS,
'non-test',
job_id=job_id)
runtime_tasks.append(RuntimeTask(task))
return runtime_tasks
def _create_runtime_tasks_for_test(self, test_suite, runnable, no_digits,
index, variant, job_id):
result = super(RunnerNRunnerWithFixedTasks, self)._create_runtime_tasks_for_test(
test_suite, runnable, no_digits, index, variant, job_id)
for rt_task in result:
if rt_task.task.runnable.uri != '/bin/true':
rt_task.task.category = 'non-test'
return result
if __name__ == '__main__':
status_server = '127.0.0.1:%u' % find_free_port()
config = {'run.test_runner': 'nrunner',
'nrunner.status_server_listen': status_server,
'nrunner.status_server_uri': status_server,
'run.references': ['/bin/true', '/bin/false']}
job = Job.from_config(config)
job.setup()
Expand Down
4 changes: 0 additions & 4 deletions selftests/functional/test_task_timeout.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from avocado.core.job import Job
from avocado.utils import script
from avocado.utils.network.ports import find_free_port
from selftests.utils import TestCaseTmpDir, skipUnlessPathExists

SCRIPT_CONTENT = """#!/bin/bash
Expand All @@ -20,10 +19,7 @@ def setUp(self):

@skipUnlessPathExists('/bin/sleep')
def test_sleep_longer_timeout(self):
status_server = '127.0.0.1:%u' % find_free_port()
config = {'run.references': [self.script.path],
'nrunner.status_server_listen': status_server,
'nrunner.status_server_uri': status_server,
'run.results_dir': self.tmpdir.name,
'run.keep_tmp': True,
'task.timeout.running': 2,
Expand Down

0 comments on commit c470b8c

Please sign in to comment.