diff --git a/CHANGELOG.md b/CHANGELOG.md index 3b855cbeb1..2612813857 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,8 @@ and this project adheres to OS information [#1170](https://github.com/OpenFn/Lightning/issues/1170) - Expose additional metrics to LiveDashboard [#1171](https://github.com/OpenFn/Lightning/issues/1171) +- Add plumbing to dump Lightning metrics during load testing + [#1178](https://github.com/OpenFn/Lightning/issues/1178) ### Changed diff --git a/benchmarking/BENCHMARKING.md b/benchmarking/BENCHMARKING.md index f0f2f5a629..b7e2646d94 100644 --- a/benchmarking/BENCHMARKING.md +++ b/benchmarking/BENCHMARKING.md @@ -6,28 +6,47 @@ Execute the following steps to run a benchmark on Lightning: installed locally. If you're using `asdf` you can run `asdf install` in the project root. -2. Spin up your Lightning local instance +2. Start up a local Lightning instance with an attached iex session: + + `iex -S mix phx.server` -3. Run the demo setup script: `mix run --no-start priv/repo/demo.exs` The +3. In the attached iex session, run the following, to have Lightning log internal telemetry data: + + ```elixir + filepath = Path.join("benchmarking", "load_test_data.csv") + output_file = File.open!(filepath, [:append]) + + c "benchmarking/load_test_production_spans.exs" + + LoadTestingPrep.init(output_file) + ``` + +4. Run the demo setup script: `mix run --no-start priv/repo/demo.exs` The `webhookURL` is already set to default to the webhook created in the demo data -4. In another terminal (do not stop the Lightning server) run the +5. In another terminal (do not stop the Lightning server) run the `benchmarking/script.js` file using the following command -```bash -k6 run benchmarking/script.js -``` + ```bash + k6 run benchmarking/script.js + ``` + + If the script exits successfully, this means the app met the defined performance + thresholds. + + To collect the benchmarking data in a CSV file, run the previous command with + the `--out filename` option. -If the script exits successfully, this means the app met the defined performance -thresholds. + ```bash + k6 run --out csv=test_results.csv benchmarking/script.js + ``` -To collect the benchmarking data in a CSV file, run the previous command with -the `--out filename` option. +6. In the iex session, close the output file: -```bash -k6 run --out csv=test_results.csv benchmarking/script.js -``` + ```elixir + LoadTestingPrep.fin(output_file) + ``` See [results output](https://k6.io/docs/get-started/results-output/) for other available output formats. diff --git a/benchmarking/load_test_production_spans.exs b/benchmarking/load_test_production_spans.exs new file mode 100644 index 0000000000..a7d0ea6cd5 --- /dev/null +++ b/benchmarking/load_test_production_spans.exs @@ -0,0 +1,54 @@ +defmodule TelemetryCSVLogger do + require Logger + + def handle_event( + [:lightning, :workorder, :webhook, :stop] = event, + %{duration: duration} = measurements, + _metadata, + output_file: file + ) do + log_received(event, measurements) + + IO.binwrite(file, "lightning.create_webhook_workorder.stop, #{duration}\n") + end + + def handle_event(event, _measurements, _metadata, _config) do + log_received(event) + end + + defp native_to_microsecond(duration) do + System.convert_time_unit(duration, :native, :microsecond) + end + + defp log_received(event, %{duration: duration}) do + duration = native_to_microsecond(duration) + + Logger.info( + "Received #{event |> Enum.join(".")} event. Duration: #{duration}µs" + ) + end + + defp log_received(event) do + Logger.info("Received #{event |> Enum.join(".")} event.") + end +end + +defmodule LoadTestingPrep do + def init(output_file) do + telemetry_events = [ + [:lightning, :workorder, :webhook, :stop], + ] + + :ok = + :telemetry.attach_many( + "lightning-load-testing-events", + telemetry_events, + &TelemetryCSVLogger.handle_event/4, + output_file: output_file + ) + end + + def fin(file) do + File.close(file) + end +end