Commit 63b12e54 authored by Willi Rath's avatar Willi Rath

Merge branch 'add-optional-large-tests' into 'master'

Add optional large tests

See merge request !6
parents 48c3c5d2 2c225d64
Pipeline #1559 passed with stage
in 19 minutes and 32 seconds
......@@ -35,15 +35,21 @@ cd ${sources_dir}/scientific-computing-use-cases
with `${path_to_output_dir}` pointing to the file system that should be tested.
## Modifying the timeout
## Modifying the timeout (removed)
We can use an env var `${PYTEST_TIMEOUT_PER_TEST}` to set the maximal number of
seconds a single test is allowed to take. Default is 5 seconds. To allow for,
e.g., up to 10 seconds per test, run the tests with
* Timeouts are buggy in pytest. The use of `pytest.mark.timeout` has been
removed. *
## Including the large tests
Setting an environment variable `PYTEST_LARGE_TESTS` to `TRUE`, will include
tests up to arrays of `256^4` double-precision numbers. This will amount to
about 34 GiB written to and read from disk:
cd ${sources_dir}/scientific-computing-use-cases
PYTEST_TIMEOUT_PER_TEST=10 ./ ${path_to_output_dir}
PYTEST_LARGE_TESTS=TRUE ./ ${path_to_output_dir}
......@@ -13,7 +13,15 @@ source ${mc3_target_dir}/bin/activate py3_std || \
echo "Please run first.";
exit; }
# If we're running the large tests, allow for 24GB virtual memory instead of 8
if [ $PYTEST_LARGE_TESTS == "TRUE" ]; then
ulimit -v "$(expr ${vlim} \* 1000 \* 1000)"
# run all tests using a local tmp dir and monitor
# timing of all tests
ulimit -v "$(expr 8 \* 1000 \* 1000)"
pytest -v -s --basetemp=${tmp_dir} --durations=0 tests/
......@@ -11,13 +11,9 @@ import os
# read timeout from environment
def _get_timeout():
if "PYTEST_TIMEOUT_PER_TEST" in os.environ:
pytest_timeout_per_test = float(os.environ["PYTEST_TIMEOUT_PER_TEST"])
pytest_timeout_per_test = 5.0 # seconds
return pytest_timeout_per_test
# do we want to run the large tests?
def _run_large_tests():
return (os.environ.get("PYTEST_LARGE_TESTS", "false").upper() == "TRUE")
def _get_xr_data_array(array):
......@@ -33,20 +29,19 @@ def temp_dir(tmpdir_factory):
pytest.mark.xfail(reason="Expect out-of-memory errors")(4)])
@pytest.mark.parametrize("ndim", [3, 4])
list(2 ** n for n in range(5, 6)) +
list(pytest.mark.xfail(reason="Expect out-of-memory errors")(2 ** n)
for n in range(6, 8)))
list(2 ** n for n in range(5, 8)) +
list(pytest.mark.xfail(reason="Expect out-of-memory errors")(
not _run_large_tests(),
reason="User did not ask for the large tests")(2 ** n))
for n in [8]))
@pytest.mark.parametrize("num_chunks", [2, 8])
@pytest.mark.parametrize("use_zarr_or_nc4", ["zarr", "nc4"])
@pytest.mark.parametrize("only_write", [False, True])
@pytest.mark.parametrize("threads_per_worker", [4, 1])
def test_large_arrays(num_chunks, N, ndim, temp_dir, use_zarr_or_nc4,
only_write, threads_per_worker):
"""Write random data sets to disk and re-read check for corrupted data.
Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment