From 64e9ab5a36e9459276e02e9df220177114c0e72c Mon Sep 17 00:00:00 2001 From: Mark <1515135+MarkKoz@users.noreply.github.com> Date: Sat, 16 Sep 2023 10:46:24 -0700 Subject: [PATCH] Use lower timeout for file parsing timeout test Updating to Bookworm may have increased performance of the file processing. In any case, this test started failing intermittently on when running on a local machine. Lower the timeout so even fast machines will hit the timeout. --- tests/test_nsjail.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_nsjail.py b/tests/test_nsjail.py index fe55290f..5d927c26 100644 --- a/tests/test_nsjail.py +++ b/tests/test_nsjail.py @@ -233,7 +233,7 @@ def test_file_parsing_timeout(self): size = 32 * 1024 * 1024 with open("file", "w") as f: - for _ in range((size // 1024) - 5): + for _ in range(size // 1024): f.write(data) for i in range(100): @@ -242,7 +242,7 @@ def test_file_parsing_timeout(self): ).strip() # A value higher than the actual memory needed is used to avoid the limit # on total file size being reached before the timeout when reading. - nsjail = NsJail(memfs_instance_size=512 * Size.MiB, files_timeout=1) + nsjail = NsJail(memfs_instance_size=128 * Size.MiB, files_timeout=0.1) result = nsjail.python3(["-c", code]) self.assertEqual(result.returncode, None) self.assertEqual(