Skip to content

Commit 662c93e

Browse files
committed
Add a perflog entry also when sanity fails
1 parent 5688d9c commit 662c93e

File tree

7 files changed

+95
-36
lines changed

7 files changed

+95
-36
lines changed

reframe/core/logging.py

-8
Original file line numberDiff line numberDiff line change
@@ -128,14 +128,6 @@ def ignore_brokenpipe(hdlr, l):
128128
logging.Handler.handleError = handleError(logging.Handler.handleError)
129129

130130

131-
def _expand_params(check):
132-
cls = type(check)
133-
return {
134-
name: getattr(check, name) for name, param in cls.param_space.items
135-
if param.is_loggable()
136-
}
137-
138-
139131
def _guess_delim(s):
140132
'''Guess the delimiter in the given logging format string'''
141133
delims = set()

reframe/core/pipeline.py

+23-23
Original file line numberDiff line numberDiff line change
@@ -2235,14 +2235,11 @@ def check_performance(self):
22352235
self.perf_variables[var] = sn.make_performance_function(expr,
22362236
unit)
22372237

2238-
if self.is_dry_run():
2239-
return
2240-
22412238
# Evaluate the performance function and retrieve the metrics
22422239
with osext.change_dir(self._stagedir):
22432240
for tag, expr in self.perf_variables.items():
22442241
try:
2245-
value = expr.evaluate()
2242+
value = expr.evaluate() if not self.is_dry_run() else None
22462243
unit = expr.unit
22472244
except Exception as e:
22482245
logging.getlogger().warning(
@@ -2282,27 +2279,30 @@ def check_performance(self):
22822279

22832280
self._perfvalues[key] = (value, *ref, unit)
22842281

2285-
# Check the performance variables against their references.
2286-
for key, values in self._perfvalues.items():
2287-
val, ref, low_thres, high_thres, *_ = values
2282+
if self.is_dry_run():
2283+
return
22882284

2289-
# Verify that val is a number
2290-
if not isinstance(val, numbers.Number):
2291-
raise SanityError(
2292-
f'the value extracted for performance variable '
2293-
f'{key!r} is not a number: {val}'
2294-
)
2285+
# Check the performance variables against their references.
2286+
for key, values in self._perfvalues.items():
2287+
val, ref, low_thres, high_thres, *_ = values
22952288

2296-
tag = key.split(':')[-1]
2297-
try:
2298-
sn.evaluate(
2299-
sn.assert_reference(
2300-
val, ref, low_thres, high_thres,
2301-
msg=('failed to meet reference: %s={0}, '
2302-
'expected {1} (l={2}, u={3})' % tag))
2303-
)
2304-
except SanityError as e:
2305-
raise PerformanceError(e) from None
2289+
# Verify that val is a number
2290+
if not isinstance(val, numbers.Number):
2291+
raise SanityError(
2292+
f'the value extracted for performance variable '
2293+
f'{key!r} is not a number: {val}'
2294+
)
2295+
2296+
tag = key.split(':')[-1]
2297+
try:
2298+
sn.evaluate(
2299+
sn.assert_reference(
2300+
val, ref, low_thres, high_thres,
2301+
msg=('failed to meet reference: %s={0}, '
2302+
'expected {1} (l={2}, u={3})' % tag))
2303+
)
2304+
except SanityError as e:
2305+
raise PerformanceError(e) from None
23062306

23072307
def _copy_job_files(self, job, dst):
23082308
if job is None:

reframe/frontend/cli.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1432,7 +1432,7 @@ def module_unuse(*paths):
14321432
printer, options.duration or options.reruns
14331433
)
14341434

1435-
if options.performance_report:
1435+
if options.performance_report and not options.dry_run:
14361436
printer.info(runner.stats.performance_report())
14371437

14381438
# Generate the report for this session

reframe/frontend/executors/__init__.py

+39-3
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
# SPDX-License-Identifier: BSD-3-Clause
55

66
import abc
7+
import contextlib
78
import copy
89
import os
910
import signal
@@ -140,6 +141,17 @@ def clone_testcases(cases):
140141
return dependencies.toposort(dependencies.build_deps(new_cases)[0])
141142

142143

144+
class _temp_dry_run:
145+
def __init__(self, check):
146+
self._check = check
147+
self._dry_run_save = check._rfm_dry_run
148+
149+
def __enter__(self):
150+
self._check._rfm_dry_run = True
151+
152+
def __exit__(self, exc_type, exc_val, exc_tb):
153+
self._check._rfm_dry_run = self._dry_run_save
154+
143155
class RegressionTask:
144156
'''A class representing a :class:`RegressionTest` through the regression
145157
pipeline.'''
@@ -328,6 +340,27 @@ def __exit__(this, exc_type, exc_value, traceback):
328340
self.fail()
329341
raise TaskExit from e
330342

343+
def _dry_run_call(self, fn, *args, **kwargs):
344+
'''Call check's fn method in dry-run mode.'''
345+
346+
@contextlib.contextmanager
347+
def temp_dry_run(check):
348+
dry_run_save = check._rfm_dry_run
349+
try:
350+
check._rfm_dry_run = True
351+
yield check
352+
except ABORT_REASONS:
353+
raise
354+
except BaseException:
355+
pass
356+
finally:
357+
check._rfm_dry_run = dry_run_save
358+
359+
with runtime.temp_config(self.testcase.partition.fullname):
360+
with temp_dry_run(self.check):
361+
return fn(*args, **kwargs)
362+
363+
331364
@logging.time_function
332365
def setup(self, *args, **kwargs):
333366
self.testcase.prepare()
@@ -372,12 +405,15 @@ def run_wait(self):
372405

373406
@logging.time_function
374407
def sanity(self):
408+
self._perflogger = logging.getperflogger(self.check)
375409
self._safe_call(self.check.sanity)
376410

377411
@logging.time_function
378-
def performance(self):
379-
self._perflogger = logging.getperflogger(self.check)
380-
self._safe_call(self.check.performance)
412+
def performance(self, dry_run=False):
413+
if dry_run:
414+
self._dry_run_call(self.check.performance)
415+
else:
416+
self._safe_call(self.check.performance)
381417

382418
@logging.time_function
383419
def finalize(self):

reframe/frontend/executors/policies.py

+8
Original file line numberDiff line numberDiff line change
@@ -199,6 +199,10 @@ def on_task_failure(self, task):
199199
self.printer.status('FAIL', msg, just='right')
200200

201201
_print_perf(task)
202+
if task.failed_stage == 'sanity':
203+
# Dry-run the performance stage to trigger performance logging
204+
task.performance(dry_run=True)
205+
202206
timings = task.pipeline_timings(['setup',
203207
'compile_complete',
204208
'run_complete',
@@ -617,6 +621,10 @@ def on_task_failure(self, task):
617621
self.printer.status('FAIL', msg, just='right')
618622

619623
_print_perf(task)
624+
if task.failed_stage == 'sanity':
625+
# Dry-run the performance stage to trigger performance logging
626+
task.performance(dry_run=True)
627+
620628
timings = task.pipeline_timings(['setup',
621629
'compile_complete',
622630
'run_complete',

unittests/test_cli.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -423,7 +423,7 @@ def test_perflogdir_from_env(run_reframe, tmp_path, monkeypatch):
423423
def test_performance_report(run_reframe, run_action):
424424
returncode, stdout, _ = run_reframe(
425425
checkpath=['unittests/resources/checks/frontend_checks.py'],
426-
more_options=['-n', 'PerformanceFailureCheck', '--performance-report'],
426+
more_options=['-n', '^PerformanceFailureCheck', '--performance-report'],
427427
action=run_action
428428
)
429429
if run_action == 'run':

unittests/test_policies.py

+23
Original file line numberDiff line numberDiff line change
@@ -1400,3 +1400,26 @@ def test_perf_logging_param_test(make_runner, make_exec_ctx, perf_param_tests,
14001400
'default' / '_MyPerfParamTest.log')
14011401
assert os.path.exists(logfile)
14021402
assert _count_lines(logfile) == 3
1403+
1404+
1405+
def test_perf_logging_sanity_failure(make_runner, make_exec_ctx,
1406+
config_perflog, tmp_path):
1407+
class _X(_MyPerfTest):
1408+
@sanity_function
1409+
def validate(self):
1410+
return False
1411+
1412+
make_exec_ctx(config_perflog(fmt='%(check_result)s|%(check_perfvalues)s',
1413+
perffmt='%(check_perf_value)s|'))
1414+
logging.configure_logging(rt.runtime().site_config)
1415+
runner = make_runner()
1416+
testcases = executors.generate_testcases([_X()])
1417+
_assert_no_logging_error(runner.runall, testcases)
1418+
1419+
logfile = (tmp_path / 'perflogs' / 'generic' / 'default' / '_X.log')
1420+
assert os.path.exists(logfile)
1421+
with open(logfile) as fp:
1422+
lines = fp.readlines()
1423+
1424+
assert len(lines) == 2
1425+
assert lines[1] == 'fail|None|None\n'

0 commit comments

Comments
 (0)