GitHub Actions / Repro Test Results
failed
Feb 19, 2025 in 0s
3 fail in 18m 18s
3 tests 0 ✅ 18m 18s ⏱️
1 suites 0 💤
1 files 3 ❌
Results for commit b494782.
Annotations
github-actions / Repro Test Results
test_bit_repro_historical (test-venv.lib.python3.10.site-packages.model_config_tests.test_bit_reproducibility.TestBitReproducibility) failed
/opt/testing/checksum/test_report.xml [took 3m 3s]
Raw output
AssertionError: Output file for the model does not exist. See the logs for more information on the experiment run
assert False
+ where False = output_exists()
+ where output_exists = <model_config_tests.models.accessom3.AccessOm3 object at 0x7fdcf20df7f0>.output_exists
+ where <model_config_tests.models.accessom3.AccessOm3 object at 0x7fdcf20df7f0> = <model_config_tests.exp_test_helper.ExpTestHelper object at 0x7fdcf2bf24d0>.model
self = <model_config_tests.test_bit_reproducibility.TestBitReproducibility object at 0x7fdce64aaa70>
output_path = PosixPath('/scratch/tm70/repro-ci/experiments/access-om3-configs/76dd7cb7698990314eb4b5225feaa71a90e9272e')
control_path = PosixPath('/scratch/tm70/repro-ci/experiments/access-om3-configs/76dd7cb7698990314eb4b5225feaa71a90e9272e/base-experiment')
checksum_path = PosixPath('/scratch/tm70/repro-ci/experiments/access-om3-configs/76dd7cb7698990314eb4b5225feaa71a90e9272e/compared/testing/checksum/historical-3hr-checksum.json')
keep_archive = False
@pytest.mark.checksum
def test_bit_repro_historical(
self,
output_path: Path,
control_path: Path,
checksum_path: Optional[Path],
keep_archive: Optional[bool],
):
"""
Test that a run reproduces historical checksums
Parameters (these are fixtures defined in conftest.py)
----------
output_path: Path
Output directory for test output and where the control and
lab directories are stored for the payu experiments. Default is
set in conftest.py
control_path: Path
Path to the model configuration to test. This is copied for
for control directories in experiments. Default is set in
conftests.py
checksum_path: Optional[Path]
Path to checksums to compare model output against. Default is
set to checksums saved on model configuration (set in )
keep_archive: Optional[bool]
This flag is used in testing for test code to use a previous test
archive, and to disable running the model with payu
"""
# Setup checksum output directory
checksum_output_dir = set_checksum_output_dir(output_path=output_path)
# Setup experiment
exp = setup_exp(
control_path, output_path, "test_bit_repro_historical", keep_archive
)
# Set model runtime using the configured default
exp.model.set_model_runtime()
# Run the experiment using payu
status, stdout, stderr, output_files = exp.setup_and_run()
if status != 0 or not exp.model.output_exists():
# Log the run information
exp.print_run_logs(status, stdout, stderr, output_files)
assert status == 0, (
"There was an error running the experiment. "
"See the logs for more infomation on the experiment run"
)
> assert exp.model.output_exists(), (
"Output file for the model does not exist. "
"See the logs for more information on the experiment run"
)
E AssertionError: Output file for the model does not exist. See the logs for more information on the experiment run
E assert False
E + where False = output_exists()
E + where output_exists = <model_config_tests.models.accessom3.AccessOm3 object at 0x7fdcf20df7f0>.output_exists
E + where <model_config_tests.models.accessom3.AccessOm3 object at 0x7fdcf20df7f0> = <model_config_tests.exp_test_helper.ExpTestHelper object at 0x7fdcf2bf24d0>.model
../test-venv/lib/python3.10/site-packages/model_config_tests/test_bit_reproducibility.py:101: AssertionError
github-actions / Repro Test Results
test_bit_repro_repeat (test-venv.lib.python3.10.site-packages.model_config_tests.test_bit_reproducibility.TestBitReproducibility) failed
/opt/testing/checksum/test_report.xml [took 6m 4s]
Raw output
assert False
+ where False = output_exists()
+ where output_exists = <model_config_tests.models.accessom3.AccessOm3 object at 0x7fdce650e8c0>.output_exists
+ where <model_config_tests.models.accessom3.AccessOm3 object at 0x7fdce650e8c0> = <model_config_tests.exp_test_helper.ExpTestHelper object at 0x7fdce650e920>.model
self = <model_config_tests.test_bit_reproducibility.TestBitReproducibility object at 0x7fdce64a9210>
output_path = PosixPath('/scratch/tm70/repro-ci/experiments/access-om3-configs/76dd7cb7698990314eb4b5225feaa71a90e9272e')
control_path = PosixPath('/scratch/tm70/repro-ci/experiments/access-om3-configs/76dd7cb7698990314eb4b5225feaa71a90e9272e/base-experiment')
@pytest.mark.checksum_slow
def test_bit_repro_repeat(self, output_path: Path, control_path: Path):
"""
Test that a run has same checksums when ran twice
"""
exp_bit_repo1 = setup_exp(control_path, output_path, "test_bit_repro_repeat_1")
exp_bit_repo2 = setup_exp(control_path, output_path, "test_bit_repro_repeat_2")
# Reconfigure to the default model runtime and run
for exp in [exp_bit_repo1, exp_bit_repo2]:
exp.model.set_model_runtime()
exp.setup_and_run()
# Compare expected to produced.
> assert exp_bit_repo1.model.output_exists()
E assert False
E + where False = output_exists()
E + where output_exists = <model_config_tests.models.accessom3.AccessOm3 object at 0x7fdce650e8c0>.output_exists
E + where <model_config_tests.models.accessom3.AccessOm3 object at 0x7fdce650e8c0> = <model_config_tests.exp_test_helper.ExpTestHelper object at 0x7fdce650e920>.model
../test-venv/lib/python3.10/site-packages/model_config_tests/test_bit_reproducibility.py:149: AssertionError
github-actions / Repro Test Results
test_restart_repro (test-venv.lib.python3.10.site-packages.model_config_tests.test_bit_reproducibility.TestBitReproducibility) failed
/opt/testing/checksum/test_report.xml [took 9m 10s]
Raw output
FileNotFoundError: [Errno 2] No such file or directory: '/scratch/tm70/repro-ci/experiments/access-om3-configs/76dd7cb7698990314eb4b5225feaa71a90e9272e/lab/archive/test_restart_repro_2x1day/output000/ocean.stats'
self = <model_config_tests.test_bit_reproducibility.TestBitReproducibility object at 0x7fdce64aac20>
output_path = PosixPath('/scratch/tm70/repro-ci/experiments/access-om3-configs/76dd7cb7698990314eb4b5225feaa71a90e9272e')
control_path = PosixPath('/scratch/tm70/repro-ci/experiments/access-om3-configs/76dd7cb7698990314eb4b5225feaa71a90e9272e/base-experiment')
@pytest.mark.checksum_slow
def test_restart_repro(self, output_path: Path, control_path: Path):
"""
Test that a run reproduces across restarts.
"""
# First do two short (1 day) runs.
exp_2x1day = setup_exp(control_path, output_path, "test_restart_repro_2x1day")
# Reconfigure to a 1 day run.
exp_2x1day.model.set_model_runtime(seconds=DAY_IN_SECONDS)
# Now run twice.
exp_2x1day.setup_and_run()
exp_2x1day.force_qsub_run()
# Now do a single 2 day run
exp_2day = setup_exp(control_path, output_path, "test_restart_repro_2day")
# Reconfigure
exp_2day.model.set_model_runtime(seconds=(2 * DAY_IN_SECONDS))
# Run once.
exp_2day.setup_and_run()
# Now compare the output between our two short and one long run.
> checksums_1d_0 = exp_2x1day.extract_checksums()
../test-venv/lib/python3.10/site-packages/model_config_tests/test_bit_reproducibility.py:181:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
../test-venv/lib/python3.10/site-packages/model_config_tests/exp_test_helper.py:49: in extract_checksums
return self.model.extract_checksums(output_directory, schema_version)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <model_config_tests.models.accessom3.AccessOm3 object at 0x7fdce64aab60>
output_directory = None, schema_version = None
def extract_checksums(
self, output_directory: Path = None, schema_version: str = None
) -> dict[str, Any]:
"""Parse output file and create checksum using defined schema"""
if output_directory:
output_filename = output_directory / "ocean.stats"
else:
output_filename = self.output_file
# ocean.stats is used for regression testing in MOM6's own test suite
# See https://github.com/mom-ocean/MOM6/blob/2ab885eddfc47fc0c8c0bae46bc61531104428d5/.testing/Makefile#L495-L501
# Rows in ocean.stats look like:
# 0, 693135.000, 0, En 3.0745627134675957E-23, CFL 0.00000, ...
# where the first three columns are Step, Day, Truncs and the remaining
# columns include a label for what they are (e.g. En = Energy/Mass)
# Header info is only included for new runs so can't be relied on
output_checksums: dict[str, list[any]] = defaultdict(list)
> with open(output_filename) as f:
E FileNotFoundError: [Errno 2] No such file or directory: '/scratch/tm70/repro-ci/experiments/access-om3-configs/76dd7cb7698990314eb4b5225feaa71a90e9272e/lab/archive/test_restart_repro_2x1day/output000/ocean.stats'
../test-venv/lib/python3.10/site-packages/model_config_tests/models/accessom3.py:84: FileNotFoundError
Check notice on line 0 in .github
github-actions / Repro Test Results
3 tests found
There are 3 tests, see "Raw output" for the full list of tests.
Raw output
test-venv.lib.python3.10.site-packages.model_config_tests.test_bit_reproducibility.TestBitReproducibility ‑ test_bit_repro_historical
test-venv.lib.python3.10.site-packages.model_config_tests.test_bit_reproducibility.TestBitReproducibility ‑ test_bit_repro_repeat
test-venv.lib.python3.10.site-packages.model_config_tests.test_bit_reproducibility.TestBitReproducibility ‑ test_restart_repro
Loading