From bea36c8e08b428e3812470097e6e7c8e11f0be9d Mon Sep 17 00:00:00 2001 From: Disconnect3d Date: Wed, 28 May 2025 15:34:49 +0200 Subject: [PATCH] Add total vs cumulative time to tests + disable some tests on CI for speedup (#3039) * Remove {next,step}* tests from all commands tests This optimizes CI test run; those commands are also kinda tested elsewhere, maybe not fully, but let's leave it as it is for now... * add total vs cumulative time for tests --- tests/gdb-tests/tests/test_commands.py | 8 ++++++- tests/gdb-tests/tests/test_commands_next.py | 23 ++++++++++++++------- tests/tests.py | 20 +++++++++--------- 3 files changed, 33 insertions(+), 18 deletions(-) diff --git a/tests/gdb-tests/tests/test_commands.py b/tests/gdb-tests/tests/test_commands.py index 2c6269634..e1341db6d 100644 --- a/tests/gdb-tests/tests/test_commands.py +++ b/tests/gdb-tests/tests/test_commands.py @@ -14,8 +14,14 @@ BINARY = tests.binaries.get("heap_bins.out") disallowed_commands = { # requires user input "ipi", - # takes too long + # Already tested by other tests & takes too long + "pc", + "nextcall", + "nextjump", "nextproginstr", + "nextret", + "nextsyscall", + "stepret", "stepsyscall", } diff --git a/tests/gdb-tests/tests/test_commands_next.py b/tests/gdb-tests/tests/test_commands_next.py index 3d82222b8..0a7ae4cb6 100644 --- a/tests/gdb-tests/tests/test_commands_next.py +++ b/tests/gdb-tests/tests/test_commands_next.py @@ -11,10 +11,22 @@ import tests REFERENCE_BINARY = tests.binaries.get("reference-binary.out") CRASH_SIMPLE_BINARY = tests.binaries.get("crash_simple.out.hardcoded") +NEXT_COMMANDS = ( + "pc", + "nextcall", + "nextjmp", + "nextproginstr", + "nextret", + "nextsyscall", + "stepret", + "stepsyscall", +) -def test_command_nextproginstr_binary_not_running(): - out = gdb.execute("nextproginstr", to_string=True) - assert out == "nextproginstr: The program is not being run.\n" + +@pytest.mark.parametrize("command", NEXT_COMMANDS) +def test_next_commands_binary_not_running(command): + out = gdb.execute(command, to_string=True) + assert out == f"{command}: The program is not being run.\n" def test_command_nextproginstr(start_binary): @@ -48,10 +60,7 @@ def test_command_nextproginstr(start_binary): assert out == "The pc is already at the binary objfile code. Not stepping.\n" -@pytest.mark.parametrize( - "command", - ("nextcall", "nextjump", "nextproginstr", "nextret", "nextsyscall", "stepret", "stepsyscall"), -) +@pytest.mark.parametrize("command", NEXT_COMMANDS) def test_next_command_doesnt_freeze_crashed_binary(start_binary, command): start_binary(CRASH_SIMPLE_BINARY) diff --git a/tests/tests.py b/tests/tests.py index 985379a2a..f26715a9b 100644 --- a/tests/tests.py +++ b/tests/tests.py @@ -165,6 +165,7 @@ def run_test( class TestStats: def __init__(self): + self.total_duration = 0 self.fail_tests = 0 self.pass_tests = 0 self.skip_tests = 0 @@ -196,6 +197,9 @@ class TestStats: skip_reason = " " + ( process.stdout.split(test_status)[1].split("\n\n\x1b[33m")[0].replace("\n", "") ) + + self.total_duration += duration + print(f"{test_case:<70} {test_status} {duration:.2f}s{skip_reason}") # Only show the output of failed tests unless the verbose flag was used @@ -212,19 +216,15 @@ def run_tests_and_print_stats( gdbinit_path: str, test_dir_path: str, ): - start = time.time() stats = TestStats() - - if args.cov: - print("Running tests with coverage") + start = time.time() if args.serial: for test in tests_list: result = run_test(test, args, gdb_path, gdbinit_path, reserve_port()) stats.handle_test_result(result, args, test_dir_path) else: - print("") - print("Running tests in parallel") + print("\nRunning tests in parallel") with concurrent.futures.ThreadPoolExecutor(max_workers=os.cpu_count()) as executor: for test in tests_list: executor.submit( @@ -234,15 +234,15 @@ def run_tests_and_print_stats( ) end = time.time() - seconds = int(end - start) - print(f"Tests completed in {seconds} seconds") + duration = end - start print("") print("*********************************") print("********* TESTS SUMMARY *********") print("*********************************") - print(f"Tests Passed: {stats.pass_tests}") + print(f"Time Spent : {duration:.2f}s (cumulative: {stats.total_duration:.2f}s)") + print(f"Tests Passed : {stats.pass_tests}") print(f"Tests Skipped: {stats.skip_tests}") - print(f"Tests Failed: {stats.fail_tests}") + print(f"Tests Failed : {stats.fail_tests}") if stats.fail_tests != 0: print("\nFailing tests:")