mirror of
https://gitlab.com/OpenMW/openmw.git
synced 2025-04-28 21:07:59 +03:00
Make integration_tests.py output more verbose
* Make it look more like googletest. * Print total and failed number of tests. * Print failed tests names. * Print duration of each test and total. * Hide all logs by default.
This commit is contained in:
parent
f80c7b2355
commit
c298210844
1 changed files with 51 additions and 28 deletions
|
@ -1,6 +1,13 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import argparse, datetime, os, subprocess, sys, shutil
|
import argparse
|
||||||
|
import datetime
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description="OpenMW integration tests.")
|
parser = argparse.ArgumentParser(description="OpenMW integration tests.")
|
||||||
|
@ -40,12 +47,13 @@ testing_util_dir = tests_dir / "testing_util"
|
||||||
time_str = datetime.datetime.now().strftime("%Y-%m-%d-%H.%M.%S")
|
time_str = datetime.datetime.now().strftime("%Y-%m-%d-%H.%M.%S")
|
||||||
|
|
||||||
|
|
||||||
def runTest(name):
|
def run_test(test_name):
|
||||||
print(f"Start {name}")
|
start = time.time()
|
||||||
|
print(f'[----------] Running tests from {test_name}')
|
||||||
shutil.rmtree(config_dir, ignore_errors=True)
|
shutil.rmtree(config_dir, ignore_errors=True)
|
||||||
config_dir.mkdir()
|
config_dir.mkdir()
|
||||||
shutil.copyfile(example_suite_dir / "settings.cfg", config_dir / "settings.cfg")
|
shutil.copyfile(example_suite_dir / "settings.cfg", config_dir / "settings.cfg")
|
||||||
test_dir = tests_dir / name
|
test_dir = tests_dir / test_name
|
||||||
with open(config_dir / "openmw.cfg", "w", encoding="utf-8") as omw_cfg:
|
with open(config_dir / "openmw.cfg", "w", encoding="utf-8") as omw_cfg:
|
||||||
for path in content_paths:
|
for path in content_paths:
|
||||||
omw_cfg.write(f'data="{path.parent}"\n')
|
omw_cfg.write(f'data="{path.parent}"\n')
|
||||||
|
@ -72,61 +80,76 @@ def runTest(name):
|
||||||
f"memory limit = {1024 * 1024 * 256}\n"
|
f"memory limit = {1024 * 1024 * 256}\n"
|
||||||
)
|
)
|
||||||
stdout_lines = list()
|
stdout_lines = list()
|
||||||
exit_ok = True
|
|
||||||
test_success = True
|
test_success = True
|
||||||
|
fatal_errors = list()
|
||||||
with subprocess.Popen(
|
with subprocess.Popen(
|
||||||
[openmw_binary, "--replace=config", "--config", config_dir, "--skip-menu", "--no-grab", "--no-sound"],
|
[openmw_binary, "--replace=config", "--config", config_dir, "--skip-menu", "--no-grab", "--no-sound"],
|
||||||
stdout=subprocess.PIPE,
|
stdout=subprocess.PIPE,
|
||||||
stderr=subprocess.STDOUT,
|
stderr=subprocess.STDOUT,
|
||||||
encoding="utf-8",
|
encoding="utf-8",
|
||||||
env={
|
env={
|
||||||
"OPENMW_OSG_STATS_FILE": str(work_dir / f"{name}.{time_str}.osg_stats.log"),
|
"OPENMW_OSG_STATS_FILE": str(work_dir / f"{test_name}.{time_str}.osg_stats.log"),
|
||||||
"OPENMW_OSG_STATS_LIST": "times",
|
"OPENMW_OSG_STATS_LIST": "times",
|
||||||
**os.environ,
|
**os.environ,
|
||||||
},
|
},
|
||||||
) as process:
|
) as process:
|
||||||
quit_requested = False
|
quit_requested = False
|
||||||
|
running_test_number = None
|
||||||
|
running_test_name = None
|
||||||
|
count = 0
|
||||||
|
failed_tests = list()
|
||||||
|
test_start = None
|
||||||
for line in process.stdout:
|
for line in process.stdout:
|
||||||
if args.verbose:
|
if args.verbose:
|
||||||
sys.stdout.write(line)
|
sys.stdout.write(line)
|
||||||
else:
|
else:
|
||||||
stdout_lines.append(line)
|
stdout_lines.append(line)
|
||||||
words = line.split(" ")
|
if "Quit requested by a Lua script" in line:
|
||||||
if len(words) > 1 and words[1] == "E]":
|
|
||||||
print(line, end="")
|
|
||||||
elif "Quit requested by a Lua script" in line:
|
|
||||||
quit_requested = True
|
quit_requested = True
|
||||||
elif "TEST_START" in line:
|
elif "TEST_START" in line:
|
||||||
w = line.split("TEST_START")[1].split("\t")
|
test_start = time.time()
|
||||||
print(f"TEST {w[2].strip()}\t\t", end="")
|
number, name = line.split("TEST_START")[1].strip().split("\t", maxsplit=1)
|
||||||
|
running_test_number = int(number)
|
||||||
|
running_test_name = name
|
||||||
|
count += 1
|
||||||
|
print(f"[ RUN ] {running_test_name}")
|
||||||
elif "TEST_OK" in line:
|
elif "TEST_OK" in line:
|
||||||
print(f"OK")
|
duration = (time.time() - test_start) * 1000
|
||||||
|
number, name = line.split("TEST_OK")[1].strip().split("\t", maxsplit=1)
|
||||||
|
assert running_test_number == int(number)
|
||||||
|
print(f"[ OK ] {running_test_name} ({duration:.3f} ms)")
|
||||||
elif "TEST_FAILED" in line:
|
elif "TEST_FAILED" in line:
|
||||||
w = line.split("TEST_FAILED")[1].split("\t")
|
duration = (time.time() - test_start) * 1000
|
||||||
print(f"FAILED {w[3]}\t\t")
|
number, name, error = line.split("TEST_FAILED")[1].strip().split("\t", maxsplit=2)
|
||||||
test_success = False
|
assert running_test_number == int(number)
|
||||||
|
print(error)
|
||||||
|
print(f"[ FAILED ] {running_test_name} ({duration:.3f} ms)")
|
||||||
|
failed_tests.append(running_test_name)
|
||||||
process.wait(5)
|
process.wait(5)
|
||||||
if not quit_requested:
|
if not quit_requested:
|
||||||
print("ERROR: Unexpected termination")
|
fatal_errors.append("unexpected termination")
|
||||||
exit_ok = False
|
|
||||||
if process.returncode != 0:
|
if process.returncode != 0:
|
||||||
print(f"ERROR: openmw exited with code {process.returncode}")
|
fatal_errors.append(f"openmw exited with code {process.returncode}")
|
||||||
exit_ok = False
|
|
||||||
if os.path.exists(config_dir / "openmw.log"):
|
if os.path.exists(config_dir / "openmw.log"):
|
||||||
shutil.copyfile(config_dir / "openmw.log", work_dir / f"{name}.{time_str}.log")
|
shutil.copyfile(config_dir / "openmw.log", work_dir / f"{test_name}.{time_str}.log")
|
||||||
if not exit_ok and not args.verbose:
|
if fatal_errors and not args.verbose:
|
||||||
sys.stdout.writelines(stdout_lines)
|
sys.stdout.writelines(stdout_lines)
|
||||||
if test_success and exit_ok:
|
total_duration = (time.time() - start) * 1000
|
||||||
print(f"{name} succeeded")
|
print(f'\n[----------] {count} tests from {test_name} ({total_duration:.3f} ms total)')
|
||||||
else:
|
print(f"[ PASSED ] {count - len(failed_tests)} tests.")
|
||||||
print(f"{name} failed")
|
if fatal_errors:
|
||||||
return test_success and exit_ok
|
print(f"[ FAILED ] fatal error: {'; '.join(fatal_errors)}")
|
||||||
|
if failed_tests:
|
||||||
|
print(f"[ FAILED ] {len(failed_tests)} tests, listed below:")
|
||||||
|
for failed_test in failed_tests:
|
||||||
|
print(f"[ FAILED ] {failed_test}")
|
||||||
|
return len(failed_tests) == 0 and not fatal_errors
|
||||||
|
|
||||||
|
|
||||||
status = 0
|
status = 0
|
||||||
for entry in tests_dir.glob("test_*"):
|
for entry in tests_dir.glob("test_*"):
|
||||||
if entry.is_dir():
|
if entry.is_dir():
|
||||||
if not runTest(entry.name):
|
if not run_test(entry.name):
|
||||||
status = -1
|
status = -1
|
||||||
if status == 0:
|
if status == 0:
|
||||||
shutil.rmtree(config_dir, ignore_errors=True)
|
shutil.rmtree(config_dir, ignore_errors=True)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue