From c8672e35f2c6d411085dcabf0cd4341c301f1353 Mon Sep 17 00:00:00 2001 From: Darren Eberly Date: Wed, 2 Aug 2023 19:03:47 -0400 Subject: [PATCH] Initial benchmark utility --- .gitignore | 1 + benchmark/__init__.py | 5 + benchmark/__main__.py | 42 +++++ benchmark/graph.py | 87 ++++++++++ benchmark/manager.py | 160 +++++++++++++++++ benchmark/tests/__init__.py | 0 benchmark/tests/arcade/__init__.py | 1 + benchmark/tests/arcade/collision.py | 102 +++++++++++ benchmark/tests/arcade_accelerate/__init__.py | 1 + .../tests/arcade_accelerate/collision.py | 102 +++++++++++ benchmark/tests/base.py | 163 ++++++++++++++++++ benchmark/timing.py | 74 ++++++++ pyproject.toml | 2 + 13 files changed, 740 insertions(+) create mode 100644 benchmark/__init__.py create mode 100644 benchmark/__main__.py create mode 100644 benchmark/graph.py create mode 100644 benchmark/manager.py create mode 100644 benchmark/tests/__init__.py create mode 100644 benchmark/tests/arcade/__init__.py create mode 100644 benchmark/tests/arcade/collision.py create mode 100644 benchmark/tests/arcade_accelerate/__init__.py create mode 100644 benchmark/tests/arcade_accelerate/collision.py create mode 100644 benchmark/tests/base.py create mode 100644 benchmark/timing.py diff --git a/.gitignore b/.gitignore index a2d502f..daeefe8 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ /target +output/ # Byte-compiled / optimized / DLL files __pycache__/ diff --git a/benchmark/__init__.py b/benchmark/__init__.py new file mode 100644 index 0000000..4a064a8 --- /dev/null +++ b/benchmark/__init__.py @@ -0,0 +1,5 @@ +from pathlib import Path + +PACKAGE_ROOT = Path(__file__).parent.resolve() +PROJECT_ROOT = PACKAGE_ROOT.parent +OUT_DIR = PROJECT_ROOT / "output" diff --git a/benchmark/__main__.py b/benchmark/__main__.py new file mode 100644 index 0000000..09e8450 --- /dev/null +++ b/benchmark/__main__.py @@ -0,0 +1,42 @@ +import argparse +import sys +from datetime import datetime + +from benchmark.manager import TestManager + + +def unload_arcade(): + to_uncache = [] + for mod in sys.modules: + if mod.startsWith("arcade."): + to_uncache.append(mod) + + for mod in to_uncache: + del sys.modules[mod] + + +def main(): + args = parse_args(sys.argv[1:]) + print(f"Session Name: '{args.session}'") + manager = TestManager(args.session, debug=True) + manager.find_test_classes(args.type, args.name) + manager.create_test_instances() + manager.run() + + +def parse_args(args): + parser = argparse.ArgumentParser() + parser.add_argument( + "-s", + "--session", + help="Session Name", + type=str, + default=datetime.now().strftime("%Y-%m-%dT%H-%M-%S"), + ) + parser.add_argument("-t", "--type", help="Test Type", type=str) + parser.add_argument("-n", "--name", help="Test Name", type=str) + return parser.parse_args(args) + + +if __name__ == "__main__": + main() diff --git a/benchmark/graph.py b/benchmark/graph.py new file mode 100644 index 0000000..7959b5e --- /dev/null +++ b/benchmark/graph.py @@ -0,0 +1,87 @@ +import csv +from pathlib import Path + +import matplotlib.pyplot as plt +import seaborn as sns + +sns.set_style("whitegrid") + +FPS = 1 +SPRITE_COUNT = 2 +DRAWING_TIME = 3 +PROCESSING_TIME = 4 + + +class DataSeries: + def __init__(self, name: str, path: Path) -> None: + self.name = name + self.path = path + # Data + self.count = [] + self.processing_time = [] + self.draw_time = [] + self.fps = [] + # Process data + self._process_data() + + def _process_data(self): + rows = self._read_file(self.path) + for row in rows: + self.count.append(row[SPRITE_COUNT]) + self.fps.append(row[FPS]) + self.processing_time.append(row[PROCESSING_TIME]) + self.draw_time.append(row[DRAWING_TIME]) + + def _read_file(self, path: Path): + results = [] + with open(path) as csv_file: + csv_reader = csv.reader(csv_file, delimiter=",") + first_row = True + for row in csv_reader: + if first_row: + first_row = False + else: + results.append([float(cell) for cell in row]) + + return results + + +class PerfGraph: + def __init__(self, title: str, label_x: str, label_y: str) -> None: + self.title = title + self.label_x = label_x + self.label_y = label_y + self.series = [] + + def add_series(self, series: DataSeries): + self.series.append(series) + + def create(self, output_path: Path): + plt.title(self.title) + + for series in self.series: + plt.plot(series.count, series.processing_time, label=series.name) + + plt.legend(loc="upper left", shadow=True, fontsize="large") + plt.xlabel(self.label_x) + plt.ylabel(self.label_y) + + plt.savefig(output_path) + plt.clf() + + +if __name__ == "__main__": + from benchmark import OUT_DIR + + OUTPUT_ROOT = OUT_DIR / "test" / "graphs" + OUTPUT_ROOT.mkdir(parents=True, exist_ok=True) + path = OUT_DIR / "test" / "data" + + graph = PerfGraph( + "Time To Detect Collisions", label_x="Sprite Count", label_y="Time" + ) + graph.add_series(DataSeries("Arcade 0", path / "arcade_collision-0.csv")) + graph.add_series(DataSeries("Arcade 1", path / "arcade_collision-1.csv")) + graph.add_series(DataSeries("Arcade 2", path / "arcade_collision-2.csv")) + graph.add_series(DataSeries("Arcade 3", path / "arcade_collision-3.csv")) + graph.create(OUTPUT_ROOT / "arcade_collision.png") diff --git a/benchmark/manager.py b/benchmark/manager.py new file mode 100644 index 0000000..a29ca6e --- /dev/null +++ b/benchmark/manager.py @@ -0,0 +1,160 @@ +import importlib +import pkgutil +from typing import List, Optional, Type + +from benchmark import OUT_DIR +from benchmark.graph import DataSeries, PerfGraph +from benchmark.tests.base import PerfTest + + +def find_test_classes(path: str) -> List[Type[PerfTest]]: + """Find all test classes in submodules""" + target_module = importlib.import_module(f"benchmark.tests.{path}") + + classes = [] + for v in pkgutil.iter_modules(target_module.__path__): + module = importlib.import_module(f"benchmark.tests.{path}.{v.name}") + if hasattr(module, "Test"): + classes.append(module.Test) + else: + print( + ( + "WARNING: " + f"Module '{module.__name__}' does not have a Test class. " + "Please add a test class or rename the class to 'Test'." + ) + ) + + return classes + + +class TestManager: + """ + Finds and executes tests + + :param str session: The session name. + :param bool debug: If True, print debug messages. + """ + + def __init__(self, session: str, debug: bool = True): + self.debug = debug + self.session = session + self.session_dir = OUT_DIR / session + self.session_dir.mkdir(parents=True, exist_ok=True) + self.data_dir = self.session_dir / "data" + + self.test_classes: List[Type[PerfTest]] = [] + self.test_instances: List[PerfTest] = [] + + @property + def num_test_classes(self) -> int: + return len(self.test_classes) + + @property + def num_test_instances(self) -> int: + return len(self.test_instances) + + def find_test_classes( + self, + type: Optional[str] = None, + name: Optional[str] = None, + ): + """ + Find test classes based on type and name. + + :param str type: The type of test to run. + :param str name: The name of the test to run. + :return: The number of test classes found. + """ + all_classes = find_test_classes("arcade") + all_classes += find_test_classes("arcade_accelerate") + + for cls in all_classes: + if type is not None and cls.type != type: + continue + if name is not None and cls.name != name: + continue + self.test_classes.append(cls) + + if self.debug: + num_classes = len(self.test_classes) + print(f"Found {num_classes} test classes") + for cls in self.test_classes: + print(f" -> {cls.type}.{cls.name}") + + def create_test_instances(self): + """ + Create test instances based on each test's instances attribute. + """ + for cls in self.test_classes: + # If a test have multiple instances, create one instance for each + if cls.instances: + for params, _ in cls.instances: + self.add_test_instance(cls(**params)) + else: + self.add_test_instance(cls()) + + if self.debug: + num_instances = len(self.test_instances) + print(f"Created {num_instances} test instances") + for instance in self.test_instances: + print(f" -> {instance.type}.{instance.name}") + + def add_test_instance(self, instance: PerfTest): + """Validate instance""" + if instance.name == "default": + raise ValueError( + ( + "Test name cannot be 'default'." + "Please add a class attribute 'name' to your test class." + f"Class: {instance}" + ) + ) + self.test_instances.append(instance) + + def get_test_instance(self, name: str) -> Optional[PerfTest]: + for instance in self.test_instances: + if instance.instance_name == name: + return instance + + def run(self): + """Run all tests""" + for instance in self.test_instances: + instance.run(self.session_dir) + + def create_graph( + self, + file_name: str, + title: str, + x_label: str, + y_label: str, + series_names=[], + ): + """Create a graph using matplotlib""" + print("Creating graph : {title}} [{x_label}, {y_label}]}]") + series = [] + skip = False + for _series in series_names: + # Check if we have a test instance with this name + instance = self.get_test_instance(_series) + if instance is None: + print(f" -> No test instance found for series '{_series}'") + skip = True + + path = self.data_dir / f"{_series}.csv" + if not path.exists(): + print( + f"No data found for series '{_series}' in session '{self.session}'" + ) + skip = True + + if skip: + continue + + series.append(DataSeries(instance.name, path)) + + out_path = self.session_dir / "graphs" + out_path.mkdir(parents=True, exist_ok=True) + out_path = out_path / f"{file_name}.png" + graph = PerfGraph(title, x_label, y_label, series) + graph.create(out_path) diff --git a/benchmark/tests/__init__.py b/benchmark/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/benchmark/tests/arcade/__init__.py b/benchmark/tests/arcade/__init__.py new file mode 100644 index 0000000..a301b7e --- /dev/null +++ b/benchmark/tests/arcade/__init__.py @@ -0,0 +1 @@ +from . import collision diff --git a/benchmark/tests/arcade/collision.py b/benchmark/tests/arcade/collision.py new file mode 100644 index 0000000..31cccec --- /dev/null +++ b/benchmark/tests/arcade/collision.py @@ -0,0 +1,102 @@ +import random + +import arcade + +from benchmark import OUT_DIR +from benchmark.tests.base import ArcadePerfTest + +SPRITE_SCALING_COIN = 0.09 +SPRITE_SCALING_PLAYER = 0.5 +SPRITE_NATIVE_SIZE = 128 +SPRITE_SIZE = int(SPRITE_NATIVE_SIZE * SPRITE_SCALING_COIN) +SCREEN_WIDTH = 1800 +SCREEN_HEIGHT = 1000 +SCREEN_TITLE = "Moving Sprite Stress Test - Arcade" +USE_SPATIAL_HASHING = True +DEFAULT_METHOD = 3 + + +class Test(ArcadePerfTest): + name = "collision" + instances = (({"method": 3}, "Simple"),) + + def __init__(self, method: int = DEFAULT_METHOD): + super().__init__( + size=(SCREEN_WIDTH, SCREEN_HEIGHT), + title=SCREEN_TITLE, + start_count=0, + increment_count=1000, + duration=60.0, + ) + self.method = method + self.name = f"collision-{self.method}" + self.series_name = "ArcadeTest" + + # Variables that will hold sprite lists + self.coin_list = None + self.player_list = None + self.player = None + + def setup(self): + self.window.background_color = arcade.color.AMAZON + self.coin_texture = arcade.load_texture(":resources:images/items/coinGold.png") + # Sprite lists + self.coin_list = arcade.SpriteList(use_spatial_hash=USE_SPATIAL_HASHING) + self.player_list = arcade.SpriteList() + self.player = arcade.Sprite( + ":resources:images/animated_characters/female_person/femalePerson_idle.png", + scale=SPRITE_SCALING_PLAYER, + ) + self.player.center_x = random.randrange(SCREEN_WIDTH) + self.player.center_y = random.randrange(SCREEN_HEIGHT) + self.player.change_x = 3 + self.player.change_y = 5 + self.player_list.append(self.player) + + def add_coins(self, amount): + """Add a new set of coins""" + for i in range(amount): + coin = arcade.Sprite( + self.coin_texture, + center_x=random.randrange(SPRITE_SIZE, SCREEN_WIDTH - SPRITE_SIZE), + center_y=random.randrange(SPRITE_SIZE, SCREEN_HEIGHT - SPRITE_SIZE), + scale=SPRITE_SCALING_COIN, + ) + self.coin_list.append(coin) + + def on_draw(self): + super().on_draw() + self.coin_list.draw() + self.player_list.draw() + + def on_update(self, delta_time: float): + super().on_update(delta_time) + + self.player_list.update() + if self.player.center_x < 0 and self.player.change_x < 0: + self.player.change_x *= -1 + if self.player.center_y < 0 and self.player.change_y < 0: + self.player.change_y *= -1 + + if self.player.center_x > SCREEN_WIDTH and self.player.change_x > 0: + self.player.change_x *= -1 + if self.player.center_y > SCREEN_HEIGHT and self.player.change_y > 0: + self.player.change_y *= -1 + + coin_hit_list = arcade.check_for_collision_with_list( + self.player, self.coin_list, method=self.method + ) + for coin in coin_hit_list: + coin.center_x = random.randrange(SCREEN_WIDTH) + coin.center_y = random.randrange(SCREEN_HEIGHT) + + def update_state(self): + # Figure out if we need more coins + if self.timing.target_n > len(self.coin_list): + new_coin_amount = self.timing.target_n - len(self.coin_list) + self.add_coins(new_coin_amount) + self.coin_list.write_sprite_buffers_to_gpu() + + +def run(): + Test().run(OUT_DIR) diff --git a/benchmark/tests/arcade_accelerate/__init__.py b/benchmark/tests/arcade_accelerate/__init__.py new file mode 100644 index 0000000..a301b7e --- /dev/null +++ b/benchmark/tests/arcade_accelerate/__init__.py @@ -0,0 +1 @@ +from . import collision diff --git a/benchmark/tests/arcade_accelerate/collision.py b/benchmark/tests/arcade_accelerate/collision.py new file mode 100644 index 0000000..062d61d --- /dev/null +++ b/benchmark/tests/arcade_accelerate/collision.py @@ -0,0 +1,102 @@ +import random + +import arcade + +from benchmark import OUT_DIR +from benchmark.tests.base import AcceleratedPerfTest + +SPRITE_SCALING_COIN = 0.09 +SPRITE_SCALING_PLAYER = 0.5 +SPRITE_NATIVE_SIZE = 128 +SPRITE_SIZE = int(SPRITE_NATIVE_SIZE * SPRITE_SCALING_COIN) +SCREEN_WIDTH = 1800 +SCREEN_HEIGHT = 1000 +SCREEN_TITLE = "Moving Sprite Stress Test - Arcade" +USE_SPATIAL_HASHING = True +DEFAULT_METHOD = 3 + + +class Test(AcceleratedPerfTest): + name = "collision" + instances = (({"method": 3}, "Simple"),) + + def __init__(self, method: int = DEFAULT_METHOD): + super().__init__( + size=(SCREEN_WIDTH, SCREEN_HEIGHT), + title=SCREEN_TITLE, + start_count=0, + increment_count=1000, + duration=60.0, + ) + self.method = method + self.name = f"collision-{self.method}" + self.series_name = "ArcadeAccelerateTest" + + # Variables that will hold sprite lists + self.coin_list = None + self.player_list = None + self.player = None + + def setup(self): + self.window.background_color = arcade.color.AMAZON + self.coin_texture = arcade.load_texture(":resources:images/items/coinGold.png") + # Sprite lists + self.coin_list = arcade.SpriteList(use_spatial_hash=USE_SPATIAL_HASHING) + self.player_list = arcade.SpriteList() + self.player = arcade.Sprite( + ":resources:images/animated_characters/female_person/femalePerson_idle.png", + scale=SPRITE_SCALING_PLAYER, + ) + self.player.center_x = random.randrange(SCREEN_WIDTH) + self.player.center_y = random.randrange(SCREEN_HEIGHT) + self.player.change_x = 3 + self.player.change_y = 5 + self.player_list.append(self.player) + + def add_coins(self, amount): + """Add a new set of coins""" + for i in range(amount): + coin = arcade.Sprite( + self.coin_texture, + center_x=random.randrange(SPRITE_SIZE, SCREEN_WIDTH - SPRITE_SIZE), + center_y=random.randrange(SPRITE_SIZE, SCREEN_HEIGHT - SPRITE_SIZE), + scale=SPRITE_SCALING_COIN, + ) + self.coin_list.append(coin) + + def on_draw(self): + super().on_draw() + self.coin_list.draw() + self.player_list.draw() + + def on_update(self, delta_time: float): + super().on_update(delta_time) + + self.player_list.update() + if self.player.center_x < 0 and self.player.change_x < 0: + self.player.change_x *= -1 + if self.player.center_y < 0 and self.player.change_y < 0: + self.player.change_y *= -1 + + if self.player.center_x > SCREEN_WIDTH and self.player.change_x > 0: + self.player.change_x *= -1 + if self.player.center_y > SCREEN_HEIGHT and self.player.change_y > 0: + self.player.change_y *= -1 + + coin_hit_list = arcade.check_for_collision_with_list( + self.player, self.coin_list, method=self.method + ) + for coin in coin_hit_list: + coin.center_x = random.randrange(SCREEN_WIDTH) + coin.center_y = random.randrange(SCREEN_HEIGHT) + + def update_state(self): + # Figure out if we need more coins + if self.timing.target_n > len(self.coin_list): + new_coin_amount = self.timing.target_n - len(self.coin_list) + self.add_coins(new_coin_amount) + self.coin_list.write_sprite_buffers_to_gpu() + + +def run(): + Test().run(OUT_DIR) diff --git a/benchmark/tests/base.py b/benchmark/tests/base.py new file mode 100644 index 0000000..fbaa27b --- /dev/null +++ b/benchmark/tests/base.py @@ -0,0 +1,163 @@ +import sys +from pathlib import Path +from typing import Tuple + +import arcade + +from benchmark.timing import PerformanceTiming + + +class PerfTest: + name = "default" + type = "default" + series_name = "default" + instances = [] + + def __init__( + self, + size: Tuple[int, int], + title: str = "Perf Test", + start_count: int = 0, + increment_count: int = 100, + duration: float = 60.0, + **kwargs, + ): + self.size = size + self.title = title + self.start_count = start_count + self.increment_count = increment_count + self.duration = duration + self.frame = 0 + self.timing = None + + @property + def instance_name(self) -> str: + return f"{self.type}_{self.name}" + + def on_draw(self): + pass + + def on_update(self, delta_time: float): + self.frame += 1 + + def update_state(self): + pass + + def run(self, session_dir: Path): + self.frame = 0 + out_path = session_dir / "data" + out_path.mkdir(parents=True, exist_ok=True) + + self.timing = PerformanceTiming( + out_path / f"{self.instance_name}.csv", + start_n=self.start_count, + increment_n=self.increment_count, + end_time=self.duration, + ) + + +class ArcadePerfTest(PerfTest): + type = "arcade" + + def __init__( + self, + size: Tuple[int, int], + title: str = "Perf Test", + start_count: int = 0, + increment_count: int = 100, + duration: float = 60.0, + **kwargs, + ): + super().__init__( + size=size, + title=title, + start_count=start_count, + increment_count=increment_count, + duration=duration, + **kwargs, + ) + self.window = None + + def on_draw(self): + pass + + def on_update(self, delta_time: float): + return super().on_update(delta_time) + + def update_state(self): + pass + + def run_test(self): + """Run the test without collecting data""" + super().run() + self.create_window() + self.setup() + while not self.timing.end_run(): + self.window.dispatch_events() + self.on_update(1 / 60) + self.on_draw() + self.update_state() + self.window.flip() + + def run(self, session_dir: Path): + """Run the test collecting data.""" + super().run(session_dir) + self.create_window() + self.setup() + + # last_time = time.time() + # current_time = time.time() + + while not self.timing.end_run(): + self.window.dispatch_events() + + self.timing.start_timer("update") + self.on_update(1 / 60) + self.timing.stop_timer("update") + + self.window.clear() + + self.timing.start_timer("draw") + self.on_draw() + self.window.ctx.flush() # Wait for draw to finish + self.timing.stop_timer("draw") + + self.update_state() + + self.window.flip() + + self.timing.write() + + def create_window(self): + try: + self.window = arcade.get_window() + self.window.set_size(*self.size) + except RuntimeError: + self.window = arcade.open_window(*self.size, self.title) + # Run a few fames to warm up the window + for _ in range(10): + self.window.clear() + self.window.flip() + self.window.flip() + + +class AcceleratedPerfTest(ArcadePerfTest): + type = "arcade-accelerate" + + def run(self, session_dir: Path): + # This is necessary to unload arcade and ensure that we have the arcade-accelerate bootstrap applied + # The test module itself is responsbile for applying the bootstrap, but arcade needs to be fully unloaded before then + to_uncache = [] + for mod in sys.modules: + if mod.startswith("arcade."): + to_uncache.append(mod) + + for mod in to_uncache: + del sys.modules[mod] + + import arcade_accelerate + + arcade_accelerate.bootstrap() + import arcade + + super().run(session_dir) diff --git a/benchmark/timing.py b/benchmark/timing.py new file mode 100644 index 0000000..447b3eb --- /dev/null +++ b/benchmark/timing.py @@ -0,0 +1,74 @@ +import statistics +import timeit +from typing import List + + +class PerformanceTiming: + def __init__(self, results_file, start_n, increment_n, end_time): + self.program_start_time = timeit.default_timer() + self.result_path = results_file + self.last_report = 0 + self.start_timers = {} + self.timing_lists = {} + self.first_line = True + + self.start_n = start_n + self.increment_n = increment_n + self.end_time = end_time + self.output: List[str] = [] + + @property + def total_program_time(self): + return timeit.default_timer() - self.program_start_time + + @property + def target_n(self): + return int(self.total_program_time + 0.5) * self.increment_n + self.start_n + + def end_run(self): + if self.total_program_time > self.end_time: + return True + + return False + + def start_timer(self, timer_name): + self.start_timers[timer_name] = timeit.default_timer() + + def stop_timer(self, timer_name): + time = timeit.default_timer() - self.start_timers[timer_name] + if timer_name not in self.timing_lists: + self.timing_lists[timer_name] = [] + self.timing_lists[timer_name].append(time) + self.report() + + def report(self): + current_time = self.total_program_time + if self.first_line: + self.first_line = False + output = "Time, FPS, Sprite Count, Draw Time, Update Time" + self.output.append(output) + + if int(current_time) > int(self.last_report): + exact_time = current_time - self.last_report + self.last_report = current_time + if "draw" not in self.timing_lists: + draw_time = 0 + else: + draw_time = statistics.mean(self.timing_lists["draw"]) + + if "update" not in self.timing_lists: + update_time = 0 + update_count = 0 + else: + update_time = statistics.mean(self.timing_lists["update"]) + update_count = len(self.timing_lists["update"]) + + fps = update_count / exact_time + output = f"{int(current_time)}, {fps:.1f}, {self.target_n}, {draw_time:.6f}, {update_time:.6f}" + self.output.append(output) + + self.timing_lists = {} + + def write(self): + with open(self.result_path, "w") as fd: + fd.write("\n".join(self.output)) diff --git a/pyproject.toml b/pyproject.toml index ab755dd..8b6a363 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,6 +28,8 @@ homepage = "https://github.com/pythonarcade/arcade-accelerate" dev = [ "ruff", "black", + "matplotlib", + "seaborn" ] [tool.maturin]