From b61803c2d3baf99c5764b48107924be31511c5e4 Mon Sep 17 00:00:00 2001 From: Owen Nelson Date: Mon, 8 May 2023 13:53:20 -0700 Subject: [PATCH 1/8] initial sources for `svix-webhook-bridge` (formerly `svix-agent`) This is a copy/paste from the repo where this work was being done originally. --- webhook-bridge/.dockerignore | 1 + webhook-bridge/.gitignore | 1 + webhook-bridge/Cargo.lock | 4176 +++++++++++++++++ webhook-bridge/Cargo.toml | 8 + webhook-bridge/Dockerfile | 74 + webhook-bridge/README.md | 203 + webhook-bridge/generic-queue/Cargo.toml | 38 + .../generic-queue/src/gcp_pubsub.rs | 308 ++ webhook-bridge/generic-queue/src/lib.rs | 148 + .../generic-queue/src/memory_queue.rs | 148 + webhook-bridge/generic-queue/src/rabbitmq.rs | 228 + webhook-bridge/generic-queue/src/redis.rs | 310 ++ webhook-bridge/generic-queue/src/sqs.rs | 206 + webhook-bridge/run-tests.sh | 8 + .../svix-agent-plugin-generic/Cargo.toml | 32 + .../svix-agent-plugin-generic/src/config.rs | 88 + .../svix-agent-plugin-generic/src/error.rs | 37 + .../src/gcp_pubsub/mod.rs | 142 + .../svix-agent-plugin-generic/src/lib.rs | 397 ++ .../tests/gcp_pubsub_consumer.rs | 343 ++ .../tests/rabbitmq_consumer.rs | 332 ++ .../tests/redis_stream_consumer.rs | 301 ++ .../tests/sqs_consumer.rs | 335 ++ .../Cargo.toml | 27 + .../src/config.rs | 175 + .../src/forwarding.rs | 121 + .../src/lib.rs | 106 + .../src/runtime.rs | 173 + .../src/types.rs | 388 ++ .../src/verification.rs | 125 + webhook-bridge/svix-agent-types/Cargo.toml | 9 + webhook-bridge/svix-agent-types/src/lib.rs | 9 + webhook-bridge/svix-agent.example.yaml | 125 + webhook-bridge/svix-agent/Cargo.toml | 33 + webhook-bridge/svix-agent/src/config/mod.rs | 101 + webhook-bridge/svix-agent/src/main.rs | 186 + webhook-bridge/testing-docker-compose.yml | 28 + 37 files changed, 9470 insertions(+) create mode 100644 webhook-bridge/.dockerignore create mode 100644 webhook-bridge/.gitignore create mode 100644 webhook-bridge/Cargo.lock create mode 100644 webhook-bridge/Cargo.toml create mode 100644 webhook-bridge/Dockerfile create mode 100644 webhook-bridge/README.md create mode 100644 webhook-bridge/generic-queue/Cargo.toml create mode 100644 webhook-bridge/generic-queue/src/gcp_pubsub.rs create mode 100644 webhook-bridge/generic-queue/src/lib.rs create mode 100644 webhook-bridge/generic-queue/src/memory_queue.rs create mode 100644 webhook-bridge/generic-queue/src/rabbitmq.rs create mode 100644 webhook-bridge/generic-queue/src/redis.rs create mode 100644 webhook-bridge/generic-queue/src/sqs.rs create mode 100755 webhook-bridge/run-tests.sh create mode 100644 webhook-bridge/svix-agent-plugin-generic/Cargo.toml create mode 100644 webhook-bridge/svix-agent-plugin-generic/src/config.rs create mode 100644 webhook-bridge/svix-agent-plugin-generic/src/error.rs create mode 100644 webhook-bridge/svix-agent-plugin-generic/src/gcp_pubsub/mod.rs create mode 100644 webhook-bridge/svix-agent-plugin-generic/src/lib.rs create mode 100644 webhook-bridge/svix-agent-plugin-generic/tests/gcp_pubsub_consumer.rs create mode 100644 webhook-bridge/svix-agent-plugin-generic/tests/rabbitmq_consumer.rs create mode 100644 webhook-bridge/svix-agent-plugin-generic/tests/redis_stream_consumer.rs create mode 100644 webhook-bridge/svix-agent-plugin-generic/tests/sqs_consumer.rs create mode 100644 webhook-bridge/svix-agent-plugin-webhook-receiver/Cargo.toml create mode 100644 webhook-bridge/svix-agent-plugin-webhook-receiver/src/config.rs create mode 100644 webhook-bridge/svix-agent-plugin-webhook-receiver/src/forwarding.rs create mode 100644 webhook-bridge/svix-agent-plugin-webhook-receiver/src/lib.rs create mode 100644 webhook-bridge/svix-agent-plugin-webhook-receiver/src/runtime.rs create mode 100644 webhook-bridge/svix-agent-plugin-webhook-receiver/src/types.rs create mode 100644 webhook-bridge/svix-agent-plugin-webhook-receiver/src/verification.rs create mode 100644 webhook-bridge/svix-agent-types/Cargo.toml create mode 100644 webhook-bridge/svix-agent-types/src/lib.rs create mode 100644 webhook-bridge/svix-agent.example.yaml create mode 100644 webhook-bridge/svix-agent/Cargo.toml create mode 100644 webhook-bridge/svix-agent/src/config/mod.rs create mode 100644 webhook-bridge/svix-agent/src/main.rs create mode 100644 webhook-bridge/testing-docker-compose.yml diff --git a/webhook-bridge/.dockerignore b/webhook-bridge/.dockerignore new file mode 100644 index 000000000..2f7896d1d --- /dev/null +++ b/webhook-bridge/.dockerignore @@ -0,0 +1 @@ +target/ diff --git a/webhook-bridge/.gitignore b/webhook-bridge/.gitignore new file mode 100644 index 000000000..ea8c4bf7f --- /dev/null +++ b/webhook-bridge/.gitignore @@ -0,0 +1 @@ +/target diff --git a/webhook-bridge/Cargo.lock b/webhook-bridge/Cargo.lock new file mode 100644 index 000000000..8bed90a54 --- /dev/null +++ b/webhook-bridge/Cargo.lock @@ -0,0 +1,4176 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "aho-corasick" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67fc08ce920c31afb70f013dcce1bfc3a3195de6a228474e45e1f145b36f8d04" +dependencies = [ + "memchr", +] + +[[package]] +name = "amq-protocol" +version = "7.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de7ec72218cc1c9063bdf559a60f40405980ee56bca469ad1e549cc0d76deb46" +dependencies = [ + "amq-protocol-tcp", + "amq-protocol-types", + "amq-protocol-uri", + "cookie-factory", + "nom", + "serde", +] + +[[package]] +name = "amq-protocol-tcp" +version = "7.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09fae6d8a492462d268b48b7bc14d76a53e5310414f909c61cb3a509dbe7ca9b" +dependencies = [ + "amq-protocol-uri", + "tcp-stream", + "tracing", +] + +[[package]] +name = "amq-protocol-types" +version = "7.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7891a2fc253f8919d3caa49fccd06d721002e015940c8592f7824c0b4e80a485" +dependencies = [ + "cookie-factory", + "nom", + "serde", + "serde_json", +] + +[[package]] +name = "amq-protocol-uri" +version = "7.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da3649e6751a8fb43330f2a442409970f10f51c79b987fcbbfc7093f4edb6a5e" +dependencies = [ + "amq-protocol-types", + "percent-encoding", + "url", +] + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anstream" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is-terminal", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41ed9a86bf92ae6580e0a31281f65a1b1d867c0cc68d5346e2ae128dddfa6a7d" + +[[package]] +name = "anstyle-parse" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e765fd216e48e067936442276d1d57399e37bce53c264d6fefbe298080cb57ee" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b" +dependencies = [ + "windows-sys 0.48.0", +] + +[[package]] +name = "anstyle-wincon" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "180abfa45703aebe0093f79badacc01b8fd4ea2e35118747e5811127f926e188" +dependencies = [ + "anstyle", + "windows-sys 0.48.0", +] + +[[package]] +name = "anyhow" +version = "1.0.71" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8" + +[[package]] +name = "assert-json-diff" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e4f2b81832e72834d7518d8487a0396a28cc408186a2e8854c0f98011faf12" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "async-channel" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf46fee83e5ccffc220104713af3292ff9bc7c64c7de289f66dae8e38d826833" +dependencies = [ + "concurrent-queue", + "event-listener", + "futures-core", +] + +[[package]] +name = "async-executor" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fa3dc5f2a8564f07759c008b9109dc0d39de92a88d5588b8a5036d286383afb" +dependencies = [ + "async-lock", + "async-task", + "concurrent-queue", + "fastrand", + "futures-lite", + "slab", +] + +[[package]] +name = "async-fs" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "279cf904654eeebfa37ac9bb1598880884924aab82e290aa65c9e77a0e142e06" +dependencies = [ + "async-lock", + "autocfg", + "blocking", + "futures-lite", +] + +[[package]] +name = "async-global-executor" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1b6f5d7df27bd294849f8eec66ecfc63d11814df7a4f5d74168a2394467b776" +dependencies = [ + "async-channel", + "async-executor", + "async-io", + "async-lock", + "blocking", + "futures-lite", + "once_cell", +] + +[[package]] +name = "async-global-executor-trait" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33dd14c5a15affd2abcff50d84efd4009ada28a860f01c14f9d654f3e81b3f75" +dependencies = [ + "async-global-executor", + "async-trait", + "executor-trait", +] + +[[package]] +name = "async-io" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af" +dependencies = [ + "async-lock", + "autocfg", + "cfg-if", + "concurrent-queue", + "futures-lite", + "log", + "parking", + "polling", + "rustix", + "slab", + "socket2", + "waker-fn", +] + +[[package]] +name = "async-lock" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa24f727524730b077666307f2734b4a1a1c57acb79193127dcc8914d5242dd7" +dependencies = [ + "event-listener", +] + +[[package]] +name = "async-net" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4051e67316bc7eff608fe723df5d32ed639946adcd69e07df41fd42a7b411f1f" +dependencies = [ + "async-io", + "autocfg", + "blocking", + "futures-lite", +] + +[[package]] +name = "async-process" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a9d28b1d97e08915212e2e45310d47854eafa69600756fc735fb788f75199c9" +dependencies = [ + "async-io", + "async-lock", + "autocfg", + "blocking", + "cfg-if", + "event-listener", + "futures-lite", + "rustix", + "signal-hook", + "windows-sys 0.48.0", +] + +[[package]] +name = "async-reactor-trait" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a6012d170ad00de56c9ee354aef2e358359deb1ec504254e0e5a3774771de0e" +dependencies = [ + "async-io", + "async-trait", + "futures-core", + "reactor-trait", +] + +[[package]] +name = "async-stream" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.15", +] + +[[package]] +name = "async-task" +version = "4.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc7ab41815b3c653ccd2978ec3255c81349336702dfdf62ee6f7069b12a3aae" + +[[package]] +name = "async-trait" +version = "0.1.68" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9ccdd8f2a161be9bd5c023df56f1b2a0bd1d83872ae53b71a84a12c9bf6e842" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.15", +] + +[[package]] +name = "atomic-waker" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1181e1e0d1fce796a03db1ae795d67167da795f9cf4a39c37589e85ef57f26d3" + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "aws-config" +version = "0.55.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc00553f5f3c06ffd4510a9d576f92143618706c45ea6ff81e84ad9be9588abd" +dependencies = [ + "aws-credential-types", + "aws-http", + "aws-sdk-sso", + "aws-sdk-sts", + "aws-smithy-async", + "aws-smithy-client", + "aws-smithy-http", + "aws-smithy-http-tower", + "aws-smithy-json", + "aws-smithy-types", + "aws-types", + "bytes", + "fastrand", + "hex", + "http", + "hyper", + "ring", + "time 0.3.21", + "tokio", + "tower", + "tracing", + "zeroize", +] + +[[package]] +name = "aws-credential-types" +version = "0.55.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cb57ac6088805821f78d282c0ba8aec809f11cbee10dda19a97b03ab040ccc2" +dependencies = [ + "aws-smithy-async", + "aws-smithy-types", + "fastrand", + "tokio", + "tracing", + "zeroize", +] + +[[package]] +name = "aws-endpoint" +version = "0.55.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c5f6f84a4f46f95a9bb71d9300b73cd67eb868bc43ae84f66ad34752299f4ac" +dependencies = [ + "aws-smithy-http", + "aws-smithy-types", + "aws-types", + "http", + "regex", + "tracing", +] + +[[package]] +name = "aws-http" +version = "0.55.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a754683c322f7dc5167484266489fdebdcd04d26e53c162cad1f3f949f2c5671" +dependencies = [ + "aws-credential-types", + "aws-smithy-http", + "aws-smithy-types", + "aws-types", + "bytes", + "http", + "http-body", + "lazy_static", + "percent-encoding", + "pin-project-lite", + "tracing", +] + +[[package]] +name = "aws-sdk-sqs" +version = "0.25.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "455c218364c68bde3229be8eea1b93535f9efe8e521875f33a8541da6d278099" +dependencies = [ + "aws-credential-types", + "aws-endpoint", + "aws-http", + "aws-sig-auth", + "aws-smithy-async", + "aws-smithy-client", + "aws-smithy-http", + "aws-smithy-http-tower", + "aws-smithy-json", + "aws-smithy-query", + "aws-smithy-types", + "aws-smithy-xml", + "aws-types", + "bytes", + "http", + "regex", + "tokio-stream", + "tower", + "tracing", +] + +[[package]] +name = "aws-sdk-sso" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "babfd626348836a31785775e3c08a4c345a5ab4c6e06dfd9167f2bee0e6295d6" +dependencies = [ + "aws-credential-types", + "aws-endpoint", + "aws-http", + "aws-sig-auth", + "aws-smithy-async", + "aws-smithy-client", + "aws-smithy-http", + "aws-smithy-http-tower", + "aws-smithy-json", + "aws-smithy-types", + "aws-types", + "bytes", + "http", + "regex", + "tokio-stream", + "tower", + "tracing", +] + +[[package]] +name = "aws-sdk-sts" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d0fbe3c2c342bc8dfea4bb43937405a8ec06f99140a0dcb9c7b59e54dfa93a1" +dependencies = [ + "aws-credential-types", + "aws-endpoint", + "aws-http", + "aws-sig-auth", + "aws-smithy-async", + "aws-smithy-client", + "aws-smithy-http", + "aws-smithy-http-tower", + "aws-smithy-json", + "aws-smithy-query", + "aws-smithy-types", + "aws-smithy-xml", + "aws-types", + "bytes", + "http", + "regex", + "tower", + "tracing", +] + +[[package]] +name = "aws-sig-auth" +version = "0.55.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84dc92a63ede3c2cbe43529cb87ffa58763520c96c6a46ca1ced80417afba845" +dependencies = [ + "aws-credential-types", + "aws-sigv4", + "aws-smithy-http", + "aws-types", + "http", + "tracing", +] + +[[package]] +name = "aws-sigv4" +version = "0.55.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "392fefab9d6fcbd76d518eb3b1c040b84728ab50f58df0c3c53ada4bea9d327e" +dependencies = [ + "aws-smithy-http", + "form_urlencoded", + "hex", + "hmac", + "http", + "once_cell", + "percent-encoding", + "regex", + "sha2", + "time 0.3.21", + "tracing", +] + +[[package]] +name = "aws-smithy-async" +version = "0.55.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae23b9fe7a07d0919000116c4c5c0578303fbce6fc8d32efca1f7759d4c20faf" +dependencies = [ + "futures-util", + "pin-project-lite", + "tokio", + "tokio-stream", +] + +[[package]] +name = "aws-smithy-client" +version = "0.55.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5230d25d244a51339273b8870f0f77874cd4449fb4f8f629b21188ae10cfc0ba" +dependencies = [ + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-http-tower", + "aws-smithy-types", + "bytes", + "fastrand", + "http", + "http-body", + "hyper", + "hyper-rustls", + "lazy_static", + "pin-project-lite", + "rustls 0.20.8", + "tokio", + "tower", + "tracing", +] + +[[package]] +name = "aws-smithy-http" +version = "0.55.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b60e2133beb9fe6ffe0b70deca57aaeff0a35ad24a9c6fab2fd3b4f45b99fdb5" +dependencies = [ + "aws-smithy-types", + "bytes", + "bytes-utils", + "futures-core", + "http", + "http-body", + "hyper", + "once_cell", + "percent-encoding", + "pin-project-lite", + "pin-utils", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "aws-smithy-http-tower" +version = "0.55.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a4d94f556c86a0dd916a5d7c39747157ea8cb909ca469703e20fee33e448b67" +dependencies = [ + "aws-smithy-http", + "aws-smithy-types", + "bytes", + "http", + "http-body", + "pin-project-lite", + "tower", + "tracing", +] + +[[package]] +name = "aws-smithy-json" +version = "0.55.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ce3d6e6ebb00b2cce379f079ad5ec508f9bcc3a9510d9b9c1840ed1d6f8af39" +dependencies = [ + "aws-smithy-types", +] + +[[package]] +name = "aws-smithy-query" +version = "0.55.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d58edfca32ef9bfbc1ca394599e17ea329cb52d6a07359827be74235b64b3298" +dependencies = [ + "aws-smithy-types", + "urlencoding", +] + +[[package]] +name = "aws-smithy-types" +version = "0.55.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58db46fc1f4f26be01ebdb821751b4e2482cd43aa2b64a0348fb89762defaffa" +dependencies = [ + "base64-simd", + "itoa", + "num-integer", + "ryu", + "time 0.3.21", +] + +[[package]] +name = "aws-smithy-xml" +version = "0.55.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb557fe4995bd9ec87fb244bbb254666a971dc902a783e9da8b7711610e9664c" +dependencies = [ + "xmlparser", +] + +[[package]] +name = "aws-types" +version = "0.55.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de0869598bfe46ec44ffe17e063ed33336e59df90356ca8ff0e8da6f7c1d994b" +dependencies = [ + "aws-credential-types", + "aws-smithy-async", + "aws-smithy-client", + "aws-smithy-http", + "aws-smithy-types", + "http", + "rustc_version", + "tracing", +] + +[[package]] +name = "axum" +version = "0.6.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8175979259124331c1d7bf6586ee7e0da434155e4b2d48ec2c8386281d8df39" +dependencies = [ + "async-trait", + "axum-core", + "axum-macros", + "bitflags", + "bytes", + "futures-util", + "http", + "http-body", + "hyper", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http", + "http-body", + "mime", + "rustversion", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-macros" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bb524613be645939e280b7279f7b017f98cf7f5ef084ec374df373530e73277" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 2.0.15", +] + +[[package]] +name = "base-encode" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a17bd29f7c70f32e9387f4d4acfa5ea7b7749ef784fb78cf382df97069337b8c" + +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + +[[package]] +name = "base64" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a4ddaa51a5bc52a6948f74c06d20aaaddb71924eab79b8c97a8c556e942d6a" + +[[package]] +name = "base64-simd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "339abbe78e73178762e23bea9dfd08e697eb3f3301cd4be981c0f78ba5859195" +dependencies = [ + "outref", + "vsimd", +] + +[[package]] +name = "bb8" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e9f4fa9768efd269499d8fba693260cfc670891cf6de3adc935588447a77cc8" +dependencies = [ + "async-trait", + "futures-channel", + "futures-util", + "parking_lot 0.11.2", + "tokio", +] + +[[package]] +name = "bb8-redis" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c440295545cb69b3cec992ae8844fbb1de1c84f2f90248438af287e14bb09bde" +dependencies = [ + "async-trait", + "bb8", + "redis", +] + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "block-padding" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8894febbff9f758034a5b8e12d87918f56dfc64a8e1fe757d65e29041538d93" +dependencies = [ + "generic-array", +] + +[[package]] +name = "blocking" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77231a1c8f801696fc0123ec6150ce92cffb8e164a02afb9c8ddee0e9b65ad65" +dependencies = [ + "async-channel", + "async-lock", + "async-task", + "atomic-waker", + "fastrand", + "futures-lite", + "log", +] + +[[package]] +name = "bumpalo" +version = "3.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b1ce199063694f33ffb7dd4e0ee620741495c32833cde5aa08f02a0bf96f0c8" + +[[package]] +name = "byteorder" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" + +[[package]] +name = "bytes" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" + +[[package]] +name = "bytes-utils" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e47d3a8076e283f3acd27400535992edb3ba4b5bb72f8891ad8fbe7932a7d4b9" +dependencies = [ + "bytes", + "either", +] + +[[package]] +name = "cbc" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26b52a9543ae338f279b96b0b9fed9c8093744685043739079ce85cd58f289a6" +dependencies = [ + "cipher", +] + +[[package]] +name = "cc" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e3c5919066adf22df73762e50cffcde3a758f2a848b113b586d1f86728b673b" +dependencies = [ + "iana-time-zone", + "js-sys", + "num-integer", + "num-traits", + "time 0.1.45", + "wasm-bindgen", + "winapi", +] + +[[package]] +name = "cipher" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" +dependencies = [ + "crypto-common", + "inout", +] + +[[package]] +name = "clap" +version = "4.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34d21f9bf1b425d2968943631ec91202fe5e837264063503708b83013f8fc938" +dependencies = [ + "clap_builder", + "clap_derive", + "once_cell", +] + +[[package]] +name = "clap_builder" +version = "4.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "914c8c79fb560f238ef6429439a30023c862f7a28e688c58f7203f12b29970bd" +dependencies = [ + "anstream", + "anstyle", + "bitflags", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9644cd56d6b87dbe899ef8b053e331c0637664e9e21a33dfcdc36093f5c5c4" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 2.0.15", +] + +[[package]] +name = "clap_lex" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a2dd5a6fe8c6e3502f568a6353e5273bbb15193ad9a89e457b9970798efbea1" + +[[package]] +name = "codespan-reporting" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e" +dependencies = [ + "termcolor", + "unicode-width", +] + +[[package]] +name = "colorchoice" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" + +[[package]] +name = "combine" +version = "4.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35ed6e9d84f0b51a7f52daf1c7d71dd136fd7a3f41a8462b8cdb8c78d920fad4" +dependencies = [ + "bytes", + "futures-core", + "memchr", + "pin-project-lite", + "tokio", + "tokio-util", +] + +[[package]] +name = "concurrent-queue" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62ec6771ecfa0762d24683ee5a32ad78487a3d3afdc0fb8cae19d2c5deb50b7c" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "cookie-factory" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "396de984970346b0d9e93d1415082923c679e5ae5c3ee3dcbd104f5610af126b" + +[[package]] +name = "core-foundation" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" + +[[package]] +name = "cpufeatures" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e4c1eaa2012c47becbbad2ab175484c2a84d1185b566fb2cc5b8707343dfe58" +dependencies = [ + "libc", +] + +[[package]] +name = "crc16" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "338089f42c427b86394a5ee60ff321da23a5c89c9d89514c829687b26359fcff" + +[[package]] +name = "crc32fast" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" +dependencies = [ + "cfg-if", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c063cd8cc95f5c377ed0d4b49a4b21f632396ff690e8470c29b3359b346984b" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "cxx" +version = "1.0.94" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f61f1b6389c3fe1c316bf8a4dccc90a38208354b330925bce1f74a6c4756eb93" +dependencies = [ + "cc", + "cxxbridge-flags", + "cxxbridge-macro", + "link-cplusplus", +] + +[[package]] +name = "cxx-build" +version = "1.0.94" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12cee708e8962df2aeb38f594aae5d827c022b6460ac71a7a3e2c3c2aae5a07b" +dependencies = [ + "cc", + "codespan-reporting", + "once_cell", + "proc-macro2", + "quote", + "scratch", + "syn 2.0.15", +] + +[[package]] +name = "cxxbridge-flags" +version = "1.0.94" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7944172ae7e4068c533afbb984114a56c46e9ccddda550499caa222902c7f7bb" + +[[package]] +name = "cxxbridge-macro" +version = "1.0.94" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2345488264226bf682893e25de0769f3360aac9957980ec49361b083ddaa5bc5" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.15", +] + +[[package]] +name = "dashmap" +version = "5.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc" +dependencies = [ + "cfg-if", + "hashbrown", + "lock_api", + "once_cell", + "parking_lot_core 0.9.7", +] + +[[package]] +name = "deadpool" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "421fe0f90f2ab22016f32a9881be5134fdd71c65298917084b0c7477cbc3856e" +dependencies = [ + "async-trait", + "deadpool-runtime", + "num_cpus", + "retain_mut", + "tokio", +] + +[[package]] +name = "deadpool-runtime" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaa37046cc0f6c3cc6090fbdbf73ef0b8ef4cfcc37f6befc0020f63e8cf121e1" + +[[package]] +name = "des" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffdd80ce8ce993de27e9f063a444a4d53ce8e8db4c1f00cc03af5ad5a9867a1e" +dependencies = [ + "cipher", +] + +[[package]] +name = "digest" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8168378f4e5023e7218c89c891c0fd8ecdb5e5e4f18cb78f38cf245dd021e76f" +dependencies = [ + "block-buffer", + "crypto-common", + "subtle", +] + +[[package]] +name = "doc-comment" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" + +[[package]] +name = "either" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" + +[[package]] +name = "encoding_rs" +version = "0.8.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "enum_dispatch" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11f36e95862220b211a6e2aa5eca09b4fa391b13cd52ceb8035a24bf65a79de2" +dependencies = [ + "once_cell", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "errno" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" +dependencies = [ + "errno-dragonfly", + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "errno-dragonfly" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "event-listener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" + +[[package]] +name = "executor-trait" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a1052dd43212a7777ec6a69b117da52f5e52f07aec47d00c1a2b33b85d06b08" +dependencies = [ + "async-trait", +] + +[[package]] +name = "fastrand" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" +dependencies = [ + "instant", +] + +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + +[[package]] +name = "flate2" +version = "1.0.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b9429470923de8e8cbd4d2dc513535400b4b3fef0319fb5c4e1f520a7bef743" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "flume" +version = "0.10.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1657b4441c3403d9f7b3409e47575237dac27b1b5726df654a6ecbf92f0f7577" +dependencies = [ + "futures-core", + "futures-sink", + "pin-project", + "spin 0.9.8", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futures" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c" + +[[package]] +name = "futures-executor" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964" + +[[package]] +name = "futures-lite" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce" +dependencies = [ + "fastrand", + "futures-core", + "futures-io", + "memchr", + "parking", + "pin-project-lite", + "waker-fn", +] + +[[package]] +name = "futures-macro" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.15", +] + +[[package]] +name = "futures-sink" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f43be4fe21a13b9781a69afa4985b0f6ee0e1afab2c6f454a8cf30e2b2237b6e" + +[[package]] +name = "futures-task" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65" + +[[package]] +name = "futures-timer" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" + +[[package]] +name = "futures-util" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "generic_queue" +version = "0.1.0" +dependencies = [ + "async-trait", + "aws-config", + "aws-sdk-sqs", + "bb8", + "bb8-redis", + "futures", + "futures-util", + "google-cloud-auth", + "google-cloud-default", + "google-cloud-gax", + "google-cloud-googleapis", + "google-cloud-pubsub", + "lapin", + "redis", + "redis_cluster_async", + "serde", + "serde_json", + "thiserror", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "getrandom" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", +] + +[[package]] +name = "getrandom" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c85e1d9ab2eadba7e5040d4e09cbd6d072b76a557ad64e797c2cb9d4da21d7e4" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.11.0+wasi-snapshot-preview1", +] + +[[package]] +name = "google-cloud-auth" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54bbf625af612872d711647cedbc2e64c40b52f4cc8abc9013f6b23bc63a7d6a" +dependencies = [ + "async-trait", + "base64 0.21.0", + "google-cloud-metadata", + "google-cloud-token", + "home", + "jsonwebtoken", + "reqwest", + "serde", + "serde_json", + "thiserror", + "time 0.3.21", + "tokio", + "tracing", + "urlencoding", +] + +[[package]] +name = "google-cloud-default" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9337fa3a20c33cdd8edcbba5471e865da406206f5f67ba47ac89590030b1ba9" +dependencies = [ + "async-trait", + "google-cloud-auth", + "google-cloud-gax", + "google-cloud-pubsub", +] + +[[package]] +name = "google-cloud-gax" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c15ce5fa8244676a83e84422c980e39963a8620d010949f0a8f2ce53f53a32ff" +dependencies = [ + "google-cloud-token", + "http", + "thiserror", + "tokio", + "tokio-retry", + "tonic", + "tower", + "tracing", +] + +[[package]] +name = "google-cloud-googleapis" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "629f516672e1d057d76070f8ae59469a9a88ed5bfbe7076883da216f2d14a491" +dependencies = [ + "prost", + "prost-types", + "tonic", +] + +[[package]] +name = "google-cloud-metadata" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96e4ad0802d3f416f62e7ce01ac1460898ee0efc98f8b45cd4aab7611607012f" +dependencies = [ + "reqwest", + "thiserror", + "tokio", +] + +[[package]] +name = "google-cloud-pubsub" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37e003dba63baafcfdf003c9813a6b66077c69a41dcfaee2058ca0d0d3011e68" +dependencies = [ + "async-channel", + "async-stream", + "google-cloud-gax", + "google-cloud-googleapis", + "google-cloud-token", + "prost-types", + "thiserror", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "google-cloud-token" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fcd62eb34e3de2f085bcc33a09c3e17c4f65650f36d53eb328b00d63bcb536a" +dependencies = [ + "async-trait", +] + +[[package]] +name = "h2" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17f8a914c2987b688368b5138aa05321db91f4090cf26118185672ad588bce21" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "hermit-abi" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7" +dependencies = [ + "libc", +] + +[[package]] +name = "hermit-abi" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + +[[package]] +name = "hmac-sha256" +version = "1.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc736091aacb31ddaa4cd5f6988b3c21e99913ac846b41f32538c5fae5d71bfe" + +[[package]] +name = "home" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb" +dependencies = [ + "windows-sys 0.48.0", +] + +[[package]] +name = "http" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" +dependencies = [ + "bytes", + "http", + "pin-project-lite", +] + +[[package]] +name = "http-range-header" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bfe8eed0a9285ef776bb792479ea3834e8b94e13d615c2f66d03dd50a435a29" + +[[package]] +name = "http-types" +version = "2.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e9b187a72d63adbfba487f48095306ac823049cb504ee195541e91c7775f5ad" +dependencies = [ + "anyhow", + "async-channel", + "base64 0.13.1", + "futures-lite", + "http", + "infer", + "pin-project-lite", + "rand 0.7.3", + "serde", + "serde_json", + "serde_qs", + "serde_urlencoded", + "url", +] + +[[package]] +name = "httparse" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" + +[[package]] +name = "httpdate" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" + +[[package]] +name = "hyper" +version = "0.14.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab302d72a6f11a3b910431ff93aae7e773078c769f0a3ef15fb9ec692ed147d4" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.23.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1788965e61b367cd03a62950836d5cd41560c3577d90e40e0819373194d1661c" +dependencies = [ + "http", + "hyper", + "log", + "rustls 0.20.8", + "rustls-native-certs", + "tokio", + "tokio-rustls", +] + +[[package]] +name = "hyper-timeout" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +dependencies = [ + "hyper", + "pin-project-lite", + "tokio", + "tokio-io-timeout", +] + +[[package]] +name = "hyper-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +dependencies = [ + "bytes", + "hyper", + "native-tls", + "tokio", + "tokio-native-tls", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0722cd7114b7de04316e7ea5456a0bbb20e4adb46fd27a3697adb812cff0f37c" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0703ae284fc167426161c2e3f1da3ea71d94b21bedbcc9494e92b28e334e3dca" +dependencies = [ + "cxx", + "cxx-build", +] + +[[package]] +name = "idna" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown", +] + +[[package]] +name = "infer" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64e9829a50b42bb782c1df523f78d332fe371b10c661e78b7a3c34b0198e9fac" + +[[package]] +name = "inout" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" +dependencies = [ + "block-padding", + "generic-array", +] + +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "io-lifetimes" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c66c74d2ae7e79a5a8f7ac924adbe38ee42a859c6539ad869eb51f0b52dc220" +dependencies = [ + "hermit-abi 0.3.1", + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "ipnet" +version = "2.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12b6ee2129af8d4fb011108c73d99a1b83a85977f23b82460c0ae2e25bb4b57f" + +[[package]] +name = "is-terminal" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adcf93614601c8129ddf72e2d5633df827ba6551541c6d8c59520a371475be1f" +dependencies = [ + "hermit-abi 0.3.1", + "io-lifetimes", + "rustix", + "windows-sys 0.48.0", +] + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6" + +[[package]] +name = "js-sys" +version = "0.3.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "445dde2150c55e483f3d8416706b97ec8e8237c307e5b7b4b8dd15e6af2a0730" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "jsonwebtoken" +version = "8.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6971da4d9c3aa03c3d8f3ff0f4155b534aad021292003895a469716b2a230378" +dependencies = [ + "base64 0.21.0", + "pem", + "ring", + "serde", + "serde_json", + "simple_asn1", +] + +[[package]] +name = "lapin" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd03ea5831b44775e296239a64851e2fd14a80a363d202ba147009ffc994ff0f" +dependencies = [ + "amq-protocol", + "async-global-executor-trait", + "async-reactor-trait", + "async-trait", + "executor-trait", + "flume", + "futures-core", + "futures-io", + "parking_lot 0.12.1", + "pinky-swear", + "reactor-trait", + "serde", + "tracing", + "waker-fn", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.144" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b00cc1c228a6782d0f076e7b232802e0c5689d41bb5df366f2a6b6621cfdfe1" + +[[package]] +name = "link-cplusplus" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecd207c9c713c34f95a097a5b029ac2ce6010530c7b49d7fea24d977dede04f5" +dependencies = [ + "cc", +] + +[[package]] +name = "linux-raw-sys" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ece97ea872ece730aed82664c424eb4c8291e1ff2480247ccf7409044bc6479f" + +[[package]] +name = "lock_api" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata", +] + +[[package]] +name = "matchit" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b87248edafb776e59e6ee64a79086f65890d3510f2c656c000bf2a7e8a0aea40" + +[[package]] +name = "memchr" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "mime_guess" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef" +dependencies = [ + "mime", + "unicase", +] + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" +dependencies = [ + "adler", +] + +[[package]] +name = "mio" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b9d9a46eff5b4ff64b45a9e316a6d1e0bc719ef429cbec4dc630684212bfdf9" +dependencies = [ + "libc", + "log", + "wasi 0.11.0+wasi-snapshot-preview1", + "windows-sys 0.45.0", +] + +[[package]] +name = "multimap" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" + +[[package]] +name = "native-tls" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" +dependencies = [ + "lazy_static", + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num-bigint" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-integer" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" +dependencies = [ + "autocfg", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" +dependencies = [ + "autocfg", +] + +[[package]] +name = "num_cpus" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" +dependencies = [ + "hermit-abi 0.2.6", + "libc", +] + +[[package]] +name = "once_cell" +version = "1.17.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" + +[[package]] +name = "openssl" +version = "0.10.52" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01b8574602df80f7b85fdfc5392fa884a4e3b3f4f35402c070ab34c3d3f78d56" +dependencies = [ + "bitflags", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.15", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-sys" +version = "0.9.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e17f59264b2809d77ae94f0e1ebabc434773f370d6ca667bd223ea10e06cc7e" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "opentelemetry" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69d6c3d7288a106c0a363e4b0e8d308058d56902adefb16f4936f417ffef086e" +dependencies = [ + "opentelemetry_api", + "opentelemetry_sdk", +] + +[[package]] +name = "opentelemetry-http" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1edc79add46364183ece1a4542592ca593e6421c60807232f5b8f7a31703825d" +dependencies = [ + "async-trait", + "bytes", + "http", + "opentelemetry_api", + "reqwest", +] + +[[package]] +name = "opentelemetry-otlp" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1c928609d087790fc936a1067bdc310ae702bdf3b090c3f281b713622c8bbde" +dependencies = [ + "async-trait", + "futures", + "futures-util", + "http", + "opentelemetry", + "opentelemetry-http", + "opentelemetry-proto", + "prost", + "reqwest", + "thiserror", + "tokio", + "tonic", +] + +[[package]] +name = "opentelemetry-proto" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d61a2f56df5574508dd86aaca016c917489e589ece4141df1b5e349af8d66c28" +dependencies = [ + "futures", + "futures-util", + "opentelemetry", + "prost", + "tonic", + "tonic-build", +] + +[[package]] +name = "opentelemetry_api" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c24f96e21e7acc813c7a8394ee94978929db2bcc46cf6b5014fc612bf7760c22" +dependencies = [ + "fnv", + "futures-channel", + "futures-util", + "indexmap", + "js-sys", + "once_cell", + "pin-project-lite", + "thiserror", +] + +[[package]] +name = "opentelemetry_sdk" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ca41c4933371b61c2a2f214bf16931499af4ec90543604ec828f7a625c09113" +dependencies = [ + "async-trait", + "crossbeam-channel", + "dashmap", + "fnv", + "futures-channel", + "futures-executor", + "futures-util", + "once_cell", + "opentelemetry_api", + "percent-encoding", + "rand 0.8.5", + "thiserror", + "tokio", + "tokio-stream", +] + +[[package]] +name = "outref" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4030760ffd992bef45b0ae3f10ce1aba99e33464c90d14dd7c039884963ddc7a" + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "p12" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4873306de53fe82e7e484df31e1e947d61514b6ea2ed6cd7b45d63006fd9224" +dependencies = [ + "cbc", + "cipher", + "des", + "getrandom 0.2.9", + "hmac", + "lazy_static", + "rc2", + "sha1 0.10.5", + "yasna", +] + +[[package]] +name = "parking" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14f2252c834a40ed9bb5422029649578e63aa341ac401f74e719dd1afda8394e" + +[[package]] +name = "parking_lot" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core 0.8.6", +] + +[[package]] +name = "parking_lot" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +dependencies = [ + "lock_api", + "parking_lot_core 0.9.7", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" +dependencies = [ + "cfg-if", + "instant", + "libc", + "redox_syscall 0.2.16", + "smallvec", + "winapi", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9069cbb9f99e3a5083476ccb29ceb1de18b9118cafa53e90c9551235de2b9521" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall 0.2.16", + "smallvec", + "windows-sys 0.45.0", +] + +[[package]] +name = "pem" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8835c273a76a90455d7344889b0964598e3316e2a79ede8e36f16bdcf2228b8" +dependencies = [ + "base64 0.13.1", +] + +[[package]] +name = "percent-encoding" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e" + +[[package]] +name = "petgraph" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4dd7d28ee937e54fe3080c91faa1c3a46c06de6252988a7f4592ba2310ef22a4" +dependencies = [ + "fixedbitset", + "indexmap", +] + +[[package]] +name = "pin-project" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad29a609b6bcd67fee905812e544992d216af9d755757c05ed2d0e15a74c6ecc" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "069bdb1e05adc7a8990dce9cc75370895fbe4e3d58b9b73bf1aee56359344a55" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pinky-swear" +version = "6.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d894b67aa7a4bf295db5e85349078c604edaa6fa5c8721e8eca3c7729a27f2ac" +dependencies = [ + "doc-comment", + "flume", + "parking_lot 0.12.1", + "tracing", +] + +[[package]] +name = "pkg-config" +version = "0.3.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" + +[[package]] +name = "polling" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce" +dependencies = [ + "autocfg", + "bitflags", + "cfg-if", + "concurrent-queue", + "libc", + "log", + "pin-project-lite", + "windows-sys 0.48.0", +] + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "prettyplease" +version = "0.1.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c8646e95016a7a6c4adea95bafa8a16baab64b583356217f2c85db4a39d9a86" +dependencies = [ + "proc-macro2", + "syn 1.0.109", +] + +[[package]] +name = "proc-macro2" +version = "1.0.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "prost" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-build" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "119533552c9a7ffacc21e099c24a0ac8bb19c2a2a3f363de84cd9b844feab270" +dependencies = [ + "bytes", + "heck", + "itertools", + "lazy_static", + "log", + "multimap", + "petgraph", + "prettyplease", + "prost", + "prost-types", + "regex", + "syn 1.0.109", + "tempfile", + "which", +] + +[[package]] +name = "prost-derive" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" +dependencies = [ + "anyhow", + "itertools", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "prost-types" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "213622a1460818959ac1181aaeb2dc9c7f63df720db7d788b3e24eacd1983e13" +dependencies = [ + "prost", +] + +[[package]] +name = "quote" +version = "1.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f4f29d145265ec1c483c7c654450edde0bfe043d3938d6972630663356d9500" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" +dependencies = [ + "getrandom 0.1.16", + "libc", + "rand_chacha 0.2.2", + "rand_core 0.5.1", + "rand_hc", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" +dependencies = [ + "ppv-lite86", + "rand_core 0.5.1", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" +dependencies = [ + "getrandom 0.1.16", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.9", +] + +[[package]] +name = "rand_hc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" +dependencies = [ + "rand_core 0.5.1", +] + +[[package]] +name = "rc2" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62c64daa8e9438b84aaae55010a93f396f8e60e3911590fcba770d04643fc1dd" +dependencies = [ + "cipher", +] + +[[package]] +name = "reactor-trait" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "438a4293e4d097556730f4711998189416232f009c137389e0f961d2bc0ddc58" +dependencies = [ + "async-trait", + "futures-core", + "futures-io", +] + +[[package]] +name = "redis" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "152f3863635cbb76b73bc247845781098302c6c9ad2060e1a9a7de56840346b6" +dependencies = [ + "async-trait", + "bytes", + "combine", + "futures-util", + "itoa", + "native-tls", + "percent-encoding", + "pin-project-lite", + "ryu", + "sha1 0.6.1", + "tokio", + "tokio-native-tls", + "tokio-util", + "url", +] + +[[package]] +name = "redis_cluster_async" +version = "0.7.1-alpha.0" +source = "git+https://github.com/redis-rs/redis-cluster-async.git?rev=e6fe168#e6fe168276faf0049061eb98ba1aa3ad68f7fd04" +dependencies = [ + "crc16", + "futures", + "log", + "pin-project-lite", + "rand 0.8.5", + "redis", + "tokio", +] + +[[package]] +name = "redox_syscall" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" +dependencies = [ + "bitflags", +] + +[[package]] +name = "redox_syscall" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" +dependencies = [ + "bitflags", +] + +[[package]] +name = "regex" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af83e617f331cc6ae2da5443c602dfa5af81e517212d9d611a5b3ba1777b5370" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax 0.7.1", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", +] + +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + +[[package]] +name = "regex-syntax" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5996294f19bd3aae0453a862ad728f60e6600695733dd5df01da90c54363a3c" + +[[package]] +name = "reqwest" +version = "0.11.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13293b639a097af28fc8a90f22add145a9c954e49d77da06263d58cf44d5fb91" +dependencies = [ + "base64 0.21.0", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-tls", + "ipnet", + "js-sys", + "log", + "mime", + "mime_guess", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "serde", + "serde_json", + "serde_urlencoded", + "tokio", + "tokio-native-tls", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "winreg", +] + +[[package]] +name = "retain_mut" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4389f1d5789befaf6029ebd9f7dac4af7f7e3d61b69d4f30e2ac02b57e7712b0" + +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin 0.5.2", + "untrusted", + "web-sys", + "winapi", +] + +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver", +] + +[[package]] +name = "rustix" +version = "0.37.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acf8729d8542766f1b2cf77eb034d52f40d375bb8b615d0b147089946e16613d" +dependencies = [ + "bitflags", + "errno", + "io-lifetimes", + "libc", + "linux-raw-sys", + "windows-sys 0.48.0", +] + +[[package]] +name = "rustls" +version = "0.20.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fff78fc74d175294f4e83b28343315ffcfb114b156f0185e9741cb5570f50e2f" +dependencies = [ + "log", + "ring", + "sct", + "webpki", +] + +[[package]] +name = "rustls" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c911ba11bc8433e811ce56fde130ccf32f5127cab0e0194e9c68c5a5b671791e" +dependencies = [ + "log", + "ring", + "rustls-webpki", + "sct", +] + +[[package]] +name = "rustls-connector" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "692d857261c41e2915b8ee70e40265e51010ee5d3c7a9b7d50837bc5cee86207" +dependencies = [ + "log", + "rustls 0.21.1", + "rustls-native-certs", + "rustls-webpki", +] + +[[package]] +name = "rustls-native-certs" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0167bac7a9f490495f3c33013e7722b53cb087ecbe082fb0c6387c96f634ea50" +dependencies = [ + "openssl-probe", + "rustls-pemfile", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d194b56d58803a43635bdc398cd17e383d6f71f9182b9a192c127ca42494a59b" +dependencies = [ + "base64 0.21.0", +] + +[[package]] +name = "rustls-webpki" +version = "0.100.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6207cd5ed3d8dca7816f8f3725513a34609c0c765bf652b8c3cb4cfd87db46b" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "rustversion" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f3208ce4d8448b3f3e7d168a73f5e0c43a61e32930de3bceeccedb388b6bf06" + +[[package]] +name = "ryu" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041" + +[[package]] +name = "schannel" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "713cfb06c7059f3588fb8044c0fad1d09e3c01d225e25b9220dbfdcf16dbb1b3" +dependencies = [ + "windows-sys 0.42.0", +] + +[[package]] +name = "scopeguard" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" + +[[package]] +name = "scratch" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1792db035ce95be60c3f8853017b3999209281c24e2ba5bc8e59bf97a0c590c1" + +[[package]] +name = "sct" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "security-framework" +version = "2.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a332be01508d814fed64bf28f798a146d73792121129962fdf335bb3c49a4254" +dependencies = [ + "bitflags", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31c9bb296072e961fcbd8853511dd39c2d8be2deb1e17c6860b1d30732b323b4" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed" + +[[package]] +name = "serde" +version = "1.0.162" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71b2f6e1ab5c2b98c05f0f35b236b22e8df7ead6ffbf51d7808da7f8817e7ab6" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.162" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2a0814352fd64b58489904a44ea8d90cb1a91dcb6b4f5ebabc32c8318e93cb6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.15", +] + +[[package]] +name = "serde_json" +version = "1.0.96" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_path_to_error" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7f05c1d5476066defcdfacce1f52fc3cae3af1d3089727100c02ae92e5abbe0" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_qs" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7715380eec75f029a4ef7de39a9200e0a63823176b759d055b613f5a87df6a6" +dependencies = [ + "percent-encoding", + "serde", + "thiserror", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_yaml" +version = "0.9.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9d684e3ec7de3bf5466b32bd75303ac16f0736426e5a4e0d6e489559ce1249c" +dependencies = [ + "indexmap", + "itoa", + "ryu", + "serde", + "unsafe-libyaml", +] + +[[package]] +name = "sha1" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1da05c97445caa12d05e848c4a4fcbbea29e748ac28f7e80e9b010392063770" +dependencies = [ + "sha1_smol", +] + +[[package]] +name = "sha1" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha1_smol" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae1a47186c03a32177042e55dbc5fd5aee900b8e0069a8d70fba96a9375cd012" + +[[package]] +name = "sha2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sharded-slab" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "signal-hook" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "732768f1176d21d09e076c23a93123d40bba92d50c4058da34d45c8de8e682b9" +dependencies = [ + "libc", + "signal-hook-registry", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" +dependencies = [ + "libc", +] + +[[package]] +name = "simple_asn1" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adc4e5204eb1910f40f9cfa375f6f05b68c3abac4b6fd879c8ff5e7ae8a0a085" +dependencies = [ + "num-bigint", + "num-traits", + "thiserror", + "time 0.3.21", +] + +[[package]] +name = "slab" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6528351c9bc8ab22353f9d776db39a20288e8d6c37ef8cfe3317cf875eecfc2d" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" + +[[package]] +name = "smol" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13f2b548cd8447f8de0fdf1c592929f70f4fc7039a05e47404b0d096ec6987a1" +dependencies = [ + "async-channel", + "async-executor", + "async-fs", + "async-io", + "async-lock", + "async-net", + "async-process", + "blocking", + "futures-lite", +] + +[[package]] +name = "socket2" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" +dependencies = [ + "lock_api", +] + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "subtle" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" + +[[package]] +name = "svix" +version = "0.84.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a87e2fd4388d272d516007be0b954837710147da8597b21a2c73f6fd87ab69fa" +dependencies = [ + "base64 0.13.1", + "hmac-sha256", + "http", + "reqwest", + "serde", + "serde_derive", + "serde_json", + "thiserror", + "time 0.3.21", + "url", +] + +[[package]] +name = "svix-agent" +version = "0.1.0" +dependencies = [ + "clap", + "lazy_static", + "opentelemetry", + "opentelemetry-http", + "opentelemetry-otlp", + "serde", + "serde_yaml", + "svix-agent-plugin-generic", + "svix-agent-plugin-webhook-receiver", + "svix-agent-types", + "svix-ksuid", + "tokio", + "tracing", + "tracing-opentelemetry", + "tracing-subscriber", +] + +[[package]] +name = "svix-agent-plugin-generic" +version = "0.1.0" +dependencies = [ + "aws-config", + "aws-sdk-sqs", + "fastrand", + "futures-lite", + "generic_queue", + "google-cloud-auth", + "google-cloud-default", + "google-cloud-gax", + "google-cloud-googleapis", + "google-cloud-pubsub", + "lapin", + "redis", + "serde", + "serde_json", + "svix", + "svix-agent-types", + "tokio", + "tokio-executor-trait", + "tokio-reactor-trait", + "tracing", + "tracing-subscriber", + "wiremock", +] + +[[package]] +name = "svix-agent-plugin-webhook-receiver" +version = "0.1.0" +dependencies = [ + "anyhow", + "axum", + "chrono", + "enum_dispatch", + "generic_queue", + "http", + "hyper", + "serde", + "serde_json", + "serde_yaml", + "smol", + "svix", + "svix-agent-types", + "threadpool", + "tokio", + "tower-http", + "tracing", +] + +[[package]] +name = "svix-agent-types" +version = "0.1.0" +dependencies = [ + "async-trait", +] + +[[package]] +name = "svix-ksuid" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75d773122e48817eb6eb74605cf799574a855bf4c7eb0c1bb06c005067123b13" +dependencies = [ + "base-encode", + "byteorder", + "getrandom 0.2.9", + "time 0.3.21", +] + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a34fcf3e8b60f57e6a14301a2e916d323af98b0ea63c599441eec8558660c822" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "tcp-stream" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6918876e41110757f36b734388e17834e69abf7ca4456ebe8a38af21f5a651d7" +dependencies = [ + "cfg-if", + "p12", + "rustls-connector", + "rustls-pemfile", +] + +[[package]] +name = "tempfile" +version = "3.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9fbec84f381d5795b08656e4912bec604d162bff9291d6189a78f4c8ab87998" +dependencies = [ + "cfg-if", + "fastrand", + "redox_syscall 0.3.5", + "rustix", + "windows-sys 0.45.0", +] + +[[package]] +name = "termcolor" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "thiserror" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.15", +] + +[[package]] +name = "thread_local" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" +dependencies = [ + "cfg-if", + "once_cell", +] + +[[package]] +name = "threadpool" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa" +dependencies = [ + "num_cpus", +] + +[[package]] +name = "time" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" +dependencies = [ + "libc", + "wasi 0.10.0+wasi-snapshot-preview1", + "winapi", +] + +[[package]] +name = "time" +version = "0.3.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f3403384eaacbca9923fa06940178ac13e4edb725486d70e8e15881d0c836cc" +dependencies = [ + "itoa", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb" + +[[package]] +name = "time-macros" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "372950940a5f07bf38dbe211d7283c9e6d7327df53794992d293e534c733d09b" +dependencies = [ + "time-core", +] + +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3c786bf8134e5a3a166db9b29ab8f48134739014a3eca7bc6bfa95d673b136f" +dependencies = [ + "autocfg", + "bytes", + "libc", + "mio", + "num_cpus", + "parking_lot 0.12.1", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.48.0", +] + +[[package]] +name = "tokio-executor-trait" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "802ccf58e108fe16561f35348fabe15ff38218968f033d587e399a84937533cc" +dependencies = [ + "async-trait", + "executor-trait", + "tokio", +] + +[[package]] +name = "tokio-io-timeout" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" +dependencies = [ + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-macros" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.15", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-reactor-trait" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9481a72f36bd9cbb8d6dd349227c4783e234e4332cfe806225bc929c4b92486" +dependencies = [ + "async-trait", + "futures-core", + "futures-io", + "reactor-trait", + "tokio", + "tokio-stream", +] + +[[package]] +name = "tokio-retry" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f57eb36ecbe0fc510036adff84824dd3c24bb781e21bfa67b69d556aa85214f" +dependencies = [ + "pin-project", + "rand 0.8.5", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.23.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" +dependencies = [ + "rustls 0.20.8", + "tokio", + "webpki", +] + +[[package]] +name = "tokio-stream" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "806fe8c2c87eccc8b3267cbae29ed3ab2d0bd37fca70ab622e46aaa9375ddb7d" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", + "tracing", +] + +[[package]] +name = "tonic" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f219fad3b929bef19b1f86fbc0358d35daed8f2cac972037ac0dc10bbb8d5fb" +dependencies = [ + "async-stream", + "async-trait", + "axum", + "base64 0.13.1", + "bytes", + "flate2", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-timeout", + "percent-encoding", + "pin-project", + "prost", + "prost-derive", + "rustls-pemfile", + "tokio", + "tokio-rustls", + "tokio-stream", + "tokio-util", + "tower", + "tower-layer", + "tower-service", + "tracing", + "tracing-futures", + "webpki-roots", +] + +[[package]] +name = "tonic-build" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5bf5e9b9c0f7e0a7c027dcfaba7b2c60816c7049171f679d99ee2ff65d0de8c4" +dependencies = [ + "prettyplease", + "proc-macro2", + "prost-build", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "indexmap", + "pin-project", + "pin-project-lite", + "rand 0.8.5", + "slab", + "tokio", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-http" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f873044bf02dd1e8239e9c1293ea39dad76dc594ec16185d0a1bf31d8dc8d858" +dependencies = [ + "bitflags", + "bytes", + "futures-core", + "futures-util", + "http", + "http-body", + "http-range-header", + "pin-project-lite", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + +[[package]] +name = "tracing" +version = "0.1.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +dependencies = [ + "cfg-if", + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f57e3ca2a01450b1a921183a9c9cbfda207fd822cef4ccb00a65402cbba7a74" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.15", +] + +[[package]] +name = "tracing-core" +version = "0.1.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-futures" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" +dependencies = [ + "pin-project", + "tracing", +] + +[[package]] +name = "tracing-log" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" +dependencies = [ + "lazy_static", + "log", + "tracing-core", +] + +[[package]] +name = "tracing-opentelemetry" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21ebb87a95ea13271332df069020513ab70bdb5637ca42d6e492dc3bbbad48de" +dependencies = [ + "once_cell", + "opentelemetry", + "tracing", + "tracing-core", + "tracing-log", + "tracing-subscriber", +] + +[[package]] +name = "tracing-serde" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1" +dependencies = [ + "serde", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "serde", + "serde_json", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", + "tracing-serde", +] + +[[package]] +name = "try-lock" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" + +[[package]] +name = "typenum" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" + +[[package]] +name = "unicase" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" +dependencies = [ + "version_check", +] + +[[package]] +name = "unicode-bidi" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" + +[[package]] +name = "unicode-ident" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4" + +[[package]] +name = "unicode-normalization" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-width" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" + +[[package]] +name = "unsafe-libyaml" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1865806a559042e51ab5414598446a5871b561d21b6764f2eabb0dd481d880a6" + +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + +[[package]] +name = "url" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "urlencoding" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8db7427f936968176eaa7cdf81b7f98b980b18495ec28f1b5791ac3bfe3eea9" + +[[package]] +name = "utf8parse" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "vsimd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c3082ca00d5a5ef149bb8b555a72ae84c9c59f7250f013ac822ac2e49b19c64" + +[[package]] +name = "waker-fn" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca" + +[[package]] +name = "want" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" +dependencies = [ + "log", + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + +[[package]] +name = "wasi" +version = "0.10.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn 1.0.109", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f219e0d211ba40266969f6dbdd90636da12f75bee4fc9d6c23d1260dadb51454" +dependencies = [ + "cfg-if", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d" + +[[package]] +name = "web-sys" +version = "0.3.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e33b99f4b23ba3eec1a53ac264e35a755f00e966e0065077d6027c0f575b0b97" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "webpki-roots" +version = "0.22.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" +dependencies = [ + "webpki", +] + +[[package]] +name = "which" +version = "4.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2441c784c52b289a054b7201fc93253e288f094e2f4be9058343127c4226a269" +dependencies = [ + "either", + "libc", + "once_cell", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +dependencies = [ + "winapi", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" +dependencies = [ + "windows-targets 0.48.0", +] + +[[package]] +name = "windows-sys" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets 0.42.2", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.0", +] + +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + +[[package]] +name = "windows-targets" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5" +dependencies = [ + "windows_aarch64_gnullvm 0.48.0", + "windows_aarch64_msvc 0.48.0", + "windows_i686_gnu 0.48.0", + "windows_i686_msvc 0.48.0", + "windows_x86_64_gnu 0.48.0", + "windows_x86_64_gnullvm 0.48.0", + "windows_x86_64_msvc 0.48.0", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" + +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" + +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" + +[[package]] +name = "winreg" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d" +dependencies = [ + "winapi", +] + +[[package]] +name = "wiremock" +version = "0.5.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd7b0b5b253ebc0240d6aac6dd671c495c467420577bf634d3064ae7e6fa2b4c" +dependencies = [ + "assert-json-diff", + "async-trait", + "base64 0.21.0", + "deadpool", + "futures", + "futures-timer", + "http-types", + "hyper", + "log", + "once_cell", + "regex", + "serde", + "serde_json", + "tokio", +] + +[[package]] +name = "xmlparser" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d25c75bf9ea12c4040a97f829154768bbbce366287e2dc044af160cd79a13fd" + +[[package]] +name = "yasna" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e17bb3549cc1321ae1296b9cdc2698e2b6cb1992adfa19a8c72e5b7a738f44cd" + +[[package]] +name = "zeroize" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" diff --git a/webhook-bridge/Cargo.toml b/webhook-bridge/Cargo.toml new file mode 100644 index 000000000..5ab4dc496 --- /dev/null +++ b/webhook-bridge/Cargo.toml @@ -0,0 +1,8 @@ +[workspace] +members = [ + "generic-queue", + "svix-agent-types", + "svix-agent", + "svix-agent-plugin-generic", + "svix-agent-plugin-webhook-receiver", +] diff --git a/webhook-bridge/Dockerfile b/webhook-bridge/Dockerfile new file mode 100644 index 000000000..9ad43a9a9 --- /dev/null +++ b/webhook-bridge/Dockerfile @@ -0,0 +1,74 @@ +# Base build +FROM rust:1.69-slim-bullseye AS build + +RUN apt-get update && apt-get install -y \ + build-essential=12.* \ + checkinstall=1.* \ + zlib1g-dev=1:* \ + pkg-config=0.29.* \ + libssl-dev=* \ + protobuf-compiler=* \ + --no-install-recommends + +RUN set -ex ; \ + mkdir -p /app ;\ + useradd appuser ;\ + chown -R appuser: /app ;\ + mkdir -p /home/appuser ;\ + chown -R appuser: /home/appuser + +WORKDIR /app + +# Hack to enable docker caching +COPY Cargo.toml . +COPY Cargo.lock . +COPY generic-queue/Cargo.toml generic-queue/ +COPY svix-agent-types/Cargo.toml svix-agent-types/ +COPY svix-agent-plugin-generic/Cargo.toml svix-agent-plugin-generic/ +COPY svix-agent-plugin-webhook-receiver/Cargo.toml svix-agent-plugin-webhook-receiver/ +COPY svix-agent/Cargo.toml svix-agent/ +RUN set -ex ;\ + mkdir generic-queue/src ;\ + mkdir svix-agent-plugin-generic/src ;\ + mkdir svix-agent-plugin-webhook-receiver/src ;\ + mkdir svix-agent-types/src ;\ + mkdir svix-agent/src ;\ + echo '' > generic-queue/src/lib.rs ;\ + echo '' > svix-agent-plugin-generic/src/lib.rs ;\ + echo '' > svix-agent-plugin-webhook-receiver/src/lib.rs ;\ + echo '' > svix-agent-types/src/lib.rs ;\ + echo 'fn main() { println!("Dummy!"); }' > svix-agent/src/main.rs ;\ + cargo build --release ;\ + rm -rf \ + generic-queue/src \ + svix-agent-plugin-generic/src \ + svix-agent-plugin-webhook-receiver/src \ + svix-agent-types/src \ + svix-agent/src + +COPY . . +# touching the lib.rs/main.rs ensures cargo rebuilds them instead of considering them already built. +RUN touch */src/lib.rs && touch */src/main.rs +RUN cargo build --release --frozen + +# Production +FROM debian:11.2-slim AS prod + +RUN set -ex ; \ + mkdir -p /app ;\ + useradd appuser ;\ + chown -R appuser: /app ;\ + mkdir -p /home/appuser ;\ + chown -R appuser: /home/appuser + +RUN apt-get update ;\ + apt-get install --no-install-recommends -y ca-certificates=20210119; \ + update-ca-certificates; \ + rm -rf /var/lib/apt/lists/* + +USER appuser + +COPY --from=build /app/target/release/svix-agent /usr/local/bin/svix-agent + +# Will fail if there's no `svix-agent.yaml` in the CWD or `SVIX_AGENT_CFG` is not set to a valid path to a config +CMD ["svix-agent"] diff --git a/webhook-bridge/README.md b/webhook-bridge/README.md new file mode 100644 index 000000000..5ed3ae73d --- /dev/null +++ b/webhook-bridge/README.md @@ -0,0 +1,203 @@ +# Svix Agent + +This service subscribes to a queue or stream and forwards each item to Svix when a valid message is found. + +## Usage + +``` +svix-agent -c path/to/svix-agent.yaml +``` + +## Configuration + +> For an annotated sample configuration see [the example config](svix-agent.example.yaml). + +`svix-agent` is organized in terms of "plugins" which are tasks that run in tandem. +Each plugin represents a unit of work performed while the agent while it runs. + +Presently there are 2 "plugins" available for `svix-agent`. + +### svix-agent-plugin-generic + +This plugin consumes messages from message queues to and forwards them to Svix to create messages. + +Currently this supports the following messaging systems: +- GCP Pub/Sub +- RabbitMQ +- Redis +- SQS + +Generally instances of this plugin are configured in terms of inputs and outputs, where the input configuration varies +by the messaging system. + +The output options control how the Svix client is built and configured. +The sole required field is `token`. + +Messages received by these consumers must follow an expected format: + +``` +{ + // This indicates which Svix application to send the message to + "app_id": "app_XYZ", + + // The `message` field has the same requirements as the standard `MessageIn` + // used for Create Message API requests + "message": { + "eventType": "my.event", + "payload": {"abc": 123} + } +} +``` + +> The comments in the above JSON are for illustrative purposes only ;) +> That's not valid JSON! Sorry! + + +For detail on the `message` field, see: + +Important to note that queues, exchanges, topics, or what have you, should be created and configured independently, +prior to using the agent plugin. There's nothing in place to automatically create these resources. +The plugin will only try (and fail) to read from the stream in such a case. + + +#### Example GCP Pub/Sub consumer + +The GCP consumer plugin can optionally specify a path to a credentials file. + +When left unset, it falls back to looking env vars: +- `GOOGLE_APPLICATION_CREDENTIALS` set to a path to a credentials file +- `GOOGLE_APPLICATION_CREDENTIALS_JSON` set to the contents of a credentials file (ie. a blob of JSON) + +```yaml +plugins: +- type: "gcppubsubconsumer" + input: + subscription_id: "my-subscription" + # Optional - will fallback to looking at env vars when left unset. + credentials_file: "/path/to/credentials.json" + output: + # Required (the Svix token to use when creating messages with this consumer) + token: "XYZ" +``` + +#### Example RabbitMq consumer + +```yaml +plugins: +- type: "rabbitmqconsumer" + input: + # Required + uri: "amqp://guest:guest@localhost:5672/%2f" + # Required + queue_name: "my-queue" + # Optional (default: unset, managed by rabbitmq) + consumer_tag: "my-consumer-001" + # Optional (default: false) + requeue_on_nack: true + output: + # Required (the Svix token to use when creating messages with this consumer) + token: "XYZ" +``` + +#### Example Redis consumer + +```yaml +plugins: +- type: "redisconsumer" + input: + # Required + dsn: "redis://localhost:6379/" + # Required + queue_key: "my_queue" + # Required + consumer_name: "my_consumer" + # Required + consumer_group: "my_group" + # Required + max_connections: 4 + # Optional (default: false) + requeue_on_nack: true + output: + # Required (the Svix token to use when creating messages with this consumer) + token: "XYZ" +``` + +#### Example SQS consumer + +Note that the SQS consumer requires credentials to be set as environment vars: +- `AWS_DEFAULT_REGION` +- `AWS_ACCESS_KEY_ID` +- `AWS_SECRET_ACCESS_KEY` + +> This incidentally means all SQS consumers configured for a given `svix-agent` will need to share these details. + +```yaml +plugins: +- type: "sqsconsumer" + input: + # Required + queue_dsn: "http://localhost:19324/000000000000/local" + # Optional (default: false) + override_endpoint: true + output: + # Required (the Svix token to use when creating messages with this consumer) + token: "XYZ" +``` + + +### svix-agent-plugin-webhook-receiver + +This plugin starts an HTTP server which accepts webhooks and forwards them to one of the supported messaging +systems. + +Again, same as with `svix-agent-plugin-generic`, the supported systems are: + +- GCP Pub/Sub +- RabbitMQ +- Redis +- SQS + +The HTTP server also (optionally) performs validation of the webhooks using Svix's signature verification method. + +The `verification` section for each route can be set one of two ways: +* `none` which accepts and forwards any JSON POST HTTP request. +* `svix` that takes a Svix endpoint secret (starting with `whsec_`) and + validating it using an official Svix library + + +Each instance of this plugin can forward requests to one or more messaging destinations based on the trailing path +segment: + +``` +/webhook/:name +``` + +#### Example + +```yaml +plugins: +- type: "webhookreceiver" + listen_addr: "0.0.0.0:5000" + routes: + - name: "goog" + verification: + type: "svix" + secret: "whsec_XXXXX=" + destination: + type: gcppubsub + topic: "example" + - name: "amz" + verification: + type: "none" + destination: + # Note that the SQS forwarder requires credentials to be set as environment vars: + # - `AWS_DEFAULT_REGION` + # - `AWS_ACCESS_KEY_ID` + # - `AWS_SECRET_ACCESS_KEY` + type: "sqs" + queue_dsn: "https://example.aws.com/my-queue" +``` + +In this situation, `POST`ing a JSON payload to `http://localhost:5000/webhook/goog` would forward the body +to the `example` topic in GCP Pub/Sub _only when verification passes_, whereas `POST`'ing to +`http://localhost:5000/webhook/amz` will forward the body to SQS without extra validation. diff --git a/webhook-bridge/generic-queue/Cargo.toml b/webhook-bridge/generic-queue/Cargo.toml new file mode 100644 index 000000000..d34538595 --- /dev/null +++ b/webhook-bridge/generic-queue/Cargo.toml @@ -0,0 +1,38 @@ +[package] +name = "generic_queue" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +async-trait = "0.1" +aws-config = { version = "0.55", optional = true } +aws-sdk-sqs = { version = "0.25", optional = true } +bb8 = { version = "0.7.1", optional = true } +bb8-redis = { version = "0.10.1", optional = true } +futures = { version = "0.3", default-features = false, features = ["async-await", "std"] } +lapin = { version = "2", optional = true } +redis = { version = "0.21.5", features = ["tokio-comp", "tokio-native-tls-comp", "streams"], optional = true } +redis_cluster_async = { git = "https://github.com/redis-rs/redis-cluster-async.git", rev = "e6fe168", optional = true } +google-cloud-pubsub = { version="0.14.1", optional = true } +google-cloud-googleapis = { version = "0.8.0", optional = true } +google-cloud-gax = { version = "0.14.1", optional = true } +google-cloud-default = { version = "0.2.0", features = ["pubsub"], optional = true } +google-cloud-auth = { version = "0.9.2", optional = true } +serde = { version = "1", features = ["derive", "rc"] } +serde_json = "1" +thiserror = "1" +tokio = { version = "1", features = ["full"] } +tokio-util = { version = "0.7.8", optional = true } +futures-util = { version = "0.3.28", optional = true } +tracing = "0.1" + +[features] +default = ["gcp_pubsub", "memory_queue", "rabbitmq", "redis", "redis_cluster", "sqs"] +gcp_pubsub = ["dep:google-cloud-pubsub", "dep:google-cloud-googleapis", "dep:google-cloud-gax", "dep:tokio-util", "dep:futures-util", "dep:google-cloud-default", "dep:google-cloud-auth"] +memory_queue = [] +rabbitmq = ["dep:lapin"] +redis = ["dep:bb8", "dep:bb8-redis", "dep:redis"] +redis_cluster = ["redis", "dep:redis_cluster_async"] +sqs = ["dep:aws-config", "dep:aws-sdk-sqs"] diff --git a/webhook-bridge/generic-queue/src/gcp_pubsub.rs b/webhook-bridge/generic-queue/src/gcp_pubsub.rs new file mode 100644 index 000000000..45e2daffb --- /dev/null +++ b/webhook-bridge/generic-queue/src/gcp_pubsub.rs @@ -0,0 +1,308 @@ +//! Support for Google Cloud Pub/Sub. +//! +//! In this system subscriptions are like queue bindings to topics. +//! Consumers need a subscription id to start receiving messages. +//! We don't have any public API for managing/creating/deleting subscriptions in this module, so +//! this is left to the user to do via whatever method they like. +//! +//! - +//! - +//! - (how to publish messages ad hoc, helpful for debugging) +//! +//! Don't have a better place to mention this just yet. +//! When testing against the gcloud emulator, you need to set `PUBSUB_EMULATOR_HOST` to the bind +//! address, and `PUBSUB_PROJECT_ID` (matching however the emulator was configured). +//! This should bypass the need for credentials and so on. +//! ```sh +//! export PUBSUB_EMULATOR_HOST=localhost:8085 +//! export PUBSUB_PROJECT_ID=local-project +//! ``` +//! > N.b. the rust client hardcodes the project id to `local-project` when it sees the +//! > `PUBSUB_EMULATOR_HOST` env var in use, so if you see errors about resources not found etc, it +//! > might be because of a project mismatch. +//! +//! To use the `gcloud` CLI with the emulator (useful for creating topics/subscriptions), you have +//! to configure an override for the pubsub API: +//! +//! ```sh +//! gcloud config set api_endpoint_overrides/pubsub "http://${PUBSUB_EMULATOR_HOST}/" +//! ``` +//! Note that you'll also have to manually set it back to the default as needed: +//! ```sh +//! gcloud config unset api_endpoint_overrides/pubsub +//! ``` +//! h/t +//! +//! Also note, and this is odd, `gcloud` will prompt you to login even though you're trying to +//! connect to a local process. +//! Go ahead and follow the prompts to get your CLI working. +//! +//! I guess it still wants to talk to GCP for other interactions other than the pubsub API. +//! +//! ## Example `gcloud` usage: +//! ```sh +//! gcloud --project=local-project pubsub topics create tester +//! gcloud --project=local-project pubsub topics create dead-letters +//! gcloud --project=local-project pubsub subscriptions create local-1 \ +//! --topic=tester \ +//! --dead-letter-topic=dead-letters \ +//! --max-delivery-attempts=5 +//! gcloud --project local-project pubsub topics publish tester --message='{"my message": 1234}' +//! ``` +//! +use crate::{Delivery, QueueError, TaskQueueBackend, TaskQueueReceive, TaskQueueSend}; +use async_trait::async_trait; +use futures_util::StreamExt; +use google_cloud_auth::credentials::CredentialsFile; +use google_cloud_default::WithAuthExt; +use google_cloud_googleapis::pubsub::v1::PubsubMessage; +use google_cloud_pubsub::client::{Client, ClientConfig}; +use google_cloud_pubsub::subscriber::ReceivedMessage; +use google_cloud_pubsub::subscription::Subscription; +use serde::{de::DeserializeOwned, Serialize}; +use std::path::{Path, PathBuf}; +use std::time::Instant; +use std::{marker::PhantomData, time::Duration}; + +pub struct GCPPubSubConfig { + pub topic: String, + pub subscription_id: String, + pub credentials_file: Option, +} + +pub struct GCPPubSubQueueBackend; + +/// Make a `ClientConfig` from a `CredentialsFile` on disk. +async fn configure_client_from_file>( + cred_file_path: P, +) -> Result { + let bytes = std::fs::read(cred_file_path).map_err(QueueError::generic)?; + let creds: CredentialsFile = serde_json::from_slice(&bytes).map_err(QueueError::generic)?; + ClientConfig::default() + .with_credentials(creds) + .await + .map_err(QueueError::generic) +} + +/// Making a `ClientConfig` via env vars is possible in two ways: +/// - setting `GOOGLE_APPLICATION_CREDENTIALS` to the file path to have it loaded automatically +/// - setting `GOOGLE_APPLICATION_CREDENTIALS_JSON` to the file contents (avoiding the need for a +/// file on disk). +/// +/// Naturally relying on env vars for configuration means it's difficult to have +async fn configure_client_from_env() -> Result { + ClientConfig::default() + .with_auth() + .await + .map_err(QueueError::generic) +} + +async fn get_client(cfg: &GCPPubSubConfig) -> Result { + let config = { + if let Some(fp) = &cfg.credentials_file { + tracing::trace!("reading gcp creds from file: {}", fp.display()); + configure_client_from_file(&fp).await? + } else { + tracing::trace!("reading gcp creds from env"); + configure_client_from_env().await? + } + }; + Client::new(config).await.map_err(QueueError::generic) +} + +async fn get_consumer( + client: Client, + cfg: &GCPPubSubConfig, +) -> Result { + Ok(GCPPubSubQueueConsumer { + client, + subscription_id: cfg.subscription_id.clone(), + }) +} +async fn get_producer( + client: Client, + cfg: &GCPPubSubConfig, +) -> Result { + let topic = client.topic(&cfg.topic); + // Only warn on startup, if the topic doesn't exist. If it gets created after the fact, we + // should be able to still use it when available, otherwise if it's still missing at that time, error. + if !topic.exists(None).await.map_err(QueueError::generic)? { + tracing::warn!("topic {} does not exist", &cfg.topic); + } + Ok(GCPPubSubQueueProducer { + client, + topic: cfg.topic.clone(), + }) +} + +#[async_trait] +impl TaskQueueBackend + for GCPPubSubQueueBackend +{ + type PairConfig = GCPPubSubConfig; + type Delivery = GCPPubSubDelivery; + type Producer = GCPPubSubQueueProducer; + type Consumer = GCPPubSubQueueConsumer; + + async fn new_pair( + cfg: GCPPubSubConfig, + ) -> Result<(GCPPubSubQueueProducer, GCPPubSubQueueConsumer), QueueError> { + let client = get_client(&cfg).await?; + let producer = get_producer(client.clone(), &cfg).await?; + let consumer = get_consumer(client, &cfg).await?; + Ok((producer, consumer)) + } + + async fn producing_half(cfg: GCPPubSubConfig) -> Result { + let client = get_client(&cfg).await?; + let producer = get_producer(client, &cfg).await?; + Ok(producer) + } + + async fn consuming_half(cfg: GCPPubSubConfig) -> Result { + let client = get_client(&cfg).await?; + let consumer = get_consumer(client, &cfg).await?; + Ok(consumer) + } +} + +pub struct GCPPubSubDelivery { + message: ReceivedMessage, + _pd: PhantomData, +} + +impl std::fmt::Debug for GCPPubSubDelivery { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + f.debug_struct("GCPPubSubDelivery") + .field("ack_id", &self.message.ack_id()) + .field("message_id", &self.message.message.message_id) + .finish() + } +} + +#[async_trait] +impl Delivery for GCPPubSubDelivery { + fn payload(&self) -> Result { + serde_json::from_slice(&self.message.message.data).map_err(Into::into) + } + + async fn ack(self) -> Result<(), QueueError> { + self.message.ack().await.map_err(QueueError::generic) + } + + async fn nack(self) -> Result<(), QueueError> { + self.message.nack().await.map_err(QueueError::generic) + } +} + +pub struct GCPPubSubQueueProducer { + client: Client, + topic: String, +} + +impl std::fmt::Debug for GCPPubSubQueueProducer { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + f.debug_struct("GCPPubSubQueueProducer") + .field("topic", &self.topic) + .finish() + } +} + +#[async_trait] +impl TaskQueueSend for GCPPubSubQueueProducer { + async fn send(&self, payload: T) -> Result<(), QueueError> { + let msg = PubsubMessage { + data: serde_json::to_string(&payload)?.into(), + ..Default::default() + }; + + // N.b. defer the creation of a publisher/topic until needed. Helps recover when + // the topic does not yet exist, but will soon. + // Might be more expensive to recreate each time, but overall more reliable. + let topic = self.client.topic(&self.topic); + + // Publishing to a non-existent topic will cause the publisher to wait (forever?) + // Giving this error will allow dependents like `svix-agent-plugin-webhook-receiver` to + // respond 500 immediately when this happens, instead of holding the connection open + // indefinitely. + if !topic.exists(None).await.map_err(QueueError::generic)? { + return Err(QueueError::Generic( + format!("topic {} does not exist", &topic.id()).into(), + )); + } + // FIXME: may need to expose `PublisherConfig` to caller so they can tweak this + let publisher = topic.new_publisher(None); + let awaiter = publisher.publish(msg).await; + awaiter.get().await.map_err(QueueError::generic)?; + Ok(()) + } +} + +pub struct GCPPubSubQueueConsumer { + client: Client, + subscription_id: String, +} + +impl GCPPubSubQueueConsumer { + async fn subscription(&self) -> Result { + let subscription = self.client.subscription(&self.subscription_id); + if !subscription + .exists(None) + .await + .map_err(QueueError::generic)? + { + return Err(QueueError::Generic( + format!("subscription {} does not exist", &self.subscription_id).into(), + )); + } + Ok(subscription) + } +} + +impl std::fmt::Debug for GCPPubSubQueueConsumer { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + f.debug_struct("GCPPubSubQueueConsumer") + .field("subscription_id", &self.subscription_id) + .finish() + } +} + +#[async_trait] +impl + TaskQueueReceive> for GCPPubSubQueueConsumer +{ + async fn receive_all( + &mut self, + max_batch_size: usize, + timeout: Duration, + ) -> Result>, QueueError> { + let start = Instant::now(); + let subscription = self.subscription().await?; + let mut stream = subscription + .subscribe(None) + .await + .map_err(QueueError::generic)?; + + let mut out = Vec::with_capacity(max_batch_size); + + loop { + if let Some(message) = stream.next().await { + tracing::trace!( + "Got Message: id={}, ack_id={} ", + &message.message.message_id, + &message.ack_id() + ); + + out.push(GCPPubSubDelivery { + message, + _pd: PhantomData, + }); + } + + if out.len() >= max_batch_size || (!out.is_empty() && start.elapsed() > timeout) { + break; + } + } + Ok(out) + } +} diff --git a/webhook-bridge/generic-queue/src/lib.rs b/webhook-bridge/generic-queue/src/lib.rs new file mode 100644 index 000000000..0229b1d10 --- /dev/null +++ b/webhook-bridge/generic-queue/src/lib.rs @@ -0,0 +1,148 @@ +//! This crate is meant to act as an abstraction layer over many concrete queue implementations such +//! as to allow supporting multiple backends from your library or application. + +use std::time::Duration; + +use async_trait::async_trait; +use thiserror::Error; + +#[cfg(feature = "gcp_pubsub")] +pub mod gcp_pubsub; +#[cfg(feature = "memory_queue")] +pub mod memory_queue; +#[cfg(feature = "rabbitmq")] +pub mod rabbitmq; +#[cfg(feature = "redis")] +pub mod redis; +#[cfg(feature = "sqs")] +pub mod sqs; + +#[derive(Debug, Error)] +pub enum QueueError { + #[error("no data was returned in the specified timeframe")] + NoData, + #[error("cannot create this type without matching half")] + CannotCreateHalf, + #[error("error serializing or deserializing type: {0}")] + Serde(#[from] serde_json::Error), + #[error("unknown error: {0}")] + Generic(Box), +} + +impl QueueError { + fn generic(e: E) -> Self { + Self::Generic(Box::new(e)) + } +} + +/// This trait is simply used to define basic types associated with a queue backend, as well as to +/// allow the simple creation of a [`TaskQueueBackend::new_pair`] from the associated configuration +/// type. +/// +/// This trait is generic over the type sent and/or received through the queue, but it tends to be +/// defined with blanket implementations allowing you to send and receive multiple types through a +/// single [`Self::Producer`], [`Self::Consumer`] pair. The notable exception to this is the in- +/// memory queue which is implemented via [`tokio::broadcast`] channels, which accept only items of +/// the preselected type. +#[async_trait] +pub trait TaskQueueBackend { + /// All necessary configuration for creating a sending/receiving pair should be contained in this + /// configuration type. + type PairConfig; + + /// Because the actual type delivered by a queue tends to include a lot of metadata, and becaue + /// connection handles often have to be cloned into a delivery type to allow ACKing and NACKing, + /// this associated type defines what is actually output by the [`Self::Consumer`]. + /// + /// This type must implement [`Delivery`] which contains a method for accessing the inner body and + /// relevant metadata. + type Delivery: Delivery + Send + Sync; + + /// This type is what actually allows sending messages down the queue. + /// + /// This type must implement [`TaskQueueSend`] which contains a method for dispatching an event + /// down the queue -- [`TaskQueueSend::send`]. + type Producer: TaskQueueSend; + + /// This type is what actually allows receiving messages from the queue. + /// + /// This type must implement [`TaskQueueReceive`] which contains a method for receiving a batch + /// of events with a maximum batch size and timeout -- [`TaskQueueReceive::receive_all`]. + type Consumer: TaskQueueReceive; + + /// Creates a new [`Self::Producer`], [`Self::Consumer`] pair of the types associated with the + /// implementor of this trait. + /// + /// This takes a configuration type as a parameter ([`Self::PairConfig`]) which is unique depending + /// on the implementor of this trait. + async fn new_pair( + cfg: Self::PairConfig, + ) -> Result<(Self::Producer, Self::Consumer), QueueError>; + + /// Creates just the [`Self::Producer`] compared to [`Self::new_pair`]. + async fn producing_half(cfg: Self::PairConfig) -> Result; + + /// Creates just the [`Self::Consumer`] comapared to [`Self::new_pair`]. + async fn consuming_half(cfg: Self::PairConfig) -> Result; +} + +/// This trait is used to interface with deliveries for a specific queue. Implementors of this trait +/// will often contain, in addition to the payload and metadata, members necessary for ACKing or +/// NACKing a delivery. +/// +/// As ACKing and NACKing is done from the delivery (such as to allow [`Send`]ing deliveries across +/// threads without also needing a handle to the consumer that produced this delivery), any +/// implementations of this trait should *avoid* implementing [`Clone`]. This will ensure that a +/// single delivery is not both ACKed and NACKed. +#[async_trait] +pub trait Delivery { + /// Returns a freshly deserialized instance of the contained payload. + fn payload(&self) -> Result; + + /// ACKs this message, which, depending on what backend you are using, may be a NOOP, or it may + /// explicity acknowledge the successful processing the message. + /// + /// When ACKed, consumers will not see this exact message again. + async fn ack(self) -> Result<(), QueueError>; + /// NACKs this message, which, depending on what backend you are using, may be a NOOP, it may + /// explicitly mark a messaege as not acknowledged, or it may reinsert the message back into the + /// end of the queue. + /// + /// When NACKed, consumers of this queue will process the message again at some point. + async fn nack(self) -> Result<(), QueueError>; +} + +/// This trait is implemented for the sending/transmitting/producing side of the queue -- the side +/// which takes a payload, and sends it such that it is eventually received by a matching impl of +/// [`TaskQueueReceive`]. +/// +/// It contains one method: [`TaskQueueSend::send`] which does as expected. +#[async_trait] +pub trait TaskQueueSend: Send + Sync { + /// Sends a payload through the queue associated with this producer. + async fn send(&self, payload: T) -> Result<(), QueueError>; +} + +/// This trait is implemented for the receiving/consuming side of the queue -- the side which +/// receives batches of [`Delivery`] instances. +/// +/// NOTE that some backends don't support batching by default, in which case they will attempt to +/// read until the timeout duration is met or the batch is full. +/// +/// It contains one method [`TaskQueueReceive::receive_all`] which does as expected. +#[async_trait] +pub trait TaskQueueReceive>: Send + Sync { + /// Receives a batch of deliveries from the queue. + /// + /// This function will not return an empty [`Vec`], instead it will await the moment of at least + /// one event being received to return. + /// + /// After the inital event is received (in a queue that does not natively support batching), the + /// queue is read from in a loop until either the maximum batch size has been met or the timeout + /// duration has been exceeded. + async fn receive_all( + &mut self, + max_batch_size: usize, + timeout: Duration, + ) -> Result, QueueError>; +} diff --git a/webhook-bridge/generic-queue/src/memory_queue.rs b/webhook-bridge/generic-queue/src/memory_queue.rs new file mode 100644 index 000000000..0ff7c81ad --- /dev/null +++ b/webhook-bridge/generic-queue/src/memory_queue.rs @@ -0,0 +1,148 @@ +use std::{fmt::Debug, marker::PhantomData, time::Duration}; + +use async_trait::async_trait; +use serde::{de::DeserializeOwned, Serialize}; +use tokio::sync::broadcast; + +use crate::{Delivery, QueueError, TaskQueueBackend, TaskQueueReceive, TaskQueueSend}; + +pub struct MemoryQueueBackend< + T: 'static + Clone + Debug + DeserializeOwned + Send + Serialize + Sync, +>(PhantomData); + +#[async_trait] +impl TaskQueueBackend + for MemoryQueueBackend +{ + type PairConfig = usize; + + type Delivery = MemoryQueueDelivery; + + type Producer = MemoryQueueProducer; + type Consumer = MemoryQueueConsumer; + + async fn new_pair( + cfg: usize, + ) -> Result<(MemoryQueueProducer, MemoryQueueConsumer), QueueError> { + let (tx, rx) = broadcast::channel(cfg); + + let producer = MemoryQueueProducer { tx: tx.clone() }; + let consumer = MemoryQueueConsumer { tx, rx }; + + Ok((producer, consumer)) + } + + async fn producing_half(_cfg: usize) -> Result, QueueError> { + Err(QueueError::CannotCreateHalf) + } + + async fn consuming_half(_cfg: usize) -> Result, QueueError> { + Err(QueueError::CannotCreateHalf) + } +} + +#[derive(Clone)] +pub struct MemoryQueueDelivery { + payload: T, + ack_tx: broadcast::Sender, +} + +#[async_trait] +impl Delivery + for MemoryQueueDelivery +{ + fn payload(&self) -> Result { + Ok(self.payload.clone()) + } + + async fn ack(self) -> Result<(), QueueError> { + Ok(()) + } + + async fn nack(self) -> Result<(), QueueError> { + self.ack_tx + .send(self.payload) + .map_err(QueueError::generic)?; + Ok(()) + } +} + +#[derive(Clone)] +pub struct MemoryQueueProducer { + tx: broadcast::Sender, +} + +#[async_trait] +impl TaskQueueSend + for MemoryQueueProducer +{ + async fn send(&self, payload: T) -> Result<(), QueueError> { + self.tx.send(payload).map_err(QueueError::generic)?; + Ok(()) + } +} + +pub struct MemoryQueueConsumer { + tx: broadcast::Sender, + rx: broadcast::Receiver, +} + +#[async_trait] +impl + TaskQueueReceive> for MemoryQueueConsumer +{ + async fn receive_all( + &mut self, + max_batch_size: usize, + timeout: Duration, + ) -> Result>, QueueError> { + let mut out = Vec::with_capacity(max_batch_size); + + // Await at least one delivery before starting the clock + out.push( + self.rx + .recv() + .await + .map(|payload| MemoryQueueDelivery { + payload, + ack_tx: self.tx.clone(), + }) + .map_err(QueueError::generic)?, + ); + + let mut interval = tokio::time::interval(timeout); + + // Skip the first tick which is instantaneous + interval.tick().await; + + loop { + tokio::select! { + _ = interval.tick() => break, + msg = self.rx.recv() => { + out.push( + msg.map(|payload| MemoryQueueDelivery { + payload, + ack_tx: self.tx.clone(), + }) + .map_err(QueueError::generic)?, + ); + + if out.len() >= max_batch_size { + break; + } + } + } + } + + Ok(out) + } +} + +impl Clone for MemoryQueueConsumer { + fn clone(&self) -> Self { + Self { + rx: self.tx.subscribe(), + tx: self.tx.clone(), + } + } +} diff --git a/webhook-bridge/generic-queue/src/rabbitmq.rs b/webhook-bridge/generic-queue/src/rabbitmq.rs new file mode 100644 index 000000000..698b75c83 --- /dev/null +++ b/webhook-bridge/generic-queue/src/rabbitmq.rs @@ -0,0 +1,228 @@ +use std::{marker::PhantomData, time::Duration}; + +use async_trait::async_trait; +use futures::StreamExt; +use lapin::{acker::Acker, Channel, Connection, Consumer}; +use serde::{de::DeserializeOwned, Serialize}; + +use crate::{Delivery, QueueError, TaskQueueBackend, TaskQueueReceive, TaskQueueSend}; + +pub use lapin::{ + options::{BasicAckOptions, BasicConsumeOptions, BasicNackOptions, BasicPublishOptions}, + types::FieldTable, + BasicProperties, ConnectionProperties, +}; + +pub struct RabbitMqConfig { + pub uri: String, + pub connection_properties: ConnectionProperties, + + pub publish_exchange: String, + pub publish_routing_key: String, + pub publish_options: BasicPublishOptions, + pub publish_properites: BasicProperties, + + pub consume_queue: String, + pub consumer_tag: String, + pub consume_options: BasicConsumeOptions, + pub consume_arguments: FieldTable, + + pub requeue_on_nack: bool, +} + +pub struct RabbitMqBackend; + +#[async_trait] +impl TaskQueueBackend + for RabbitMqBackend +{ + type PairConfig = RabbitMqConfig; + + type Delivery = RabbitMqDelivery; + + type Producer = RabbitMqProducer; + type Consumer = RabbitMqConsumer; + + async fn new_pair( + cfg: RabbitMqConfig, + ) -> Result<(RabbitMqProducer, RabbitMqConsumer), QueueError> { + let conn = Connection::connect(&cfg.uri, cfg.connection_properties) + .await + .map_err(QueueError::generic)?; + + let channel_tx = conn.create_channel().await.map_err(QueueError::generic)?; + let channel_rx = conn.create_channel().await.map_err(QueueError::generic)?; + + Ok(( + RabbitMqProducer { + channel: channel_tx, + exchange: cfg.publish_exchange, + routing_key: cfg.publish_routing_key, + options: cfg.publish_options, + properties: cfg.publish_properites, + }, + RabbitMqConsumer { + consumer: channel_rx + .basic_consume( + &cfg.consume_queue, + &cfg.consumer_tag, + cfg.consume_options, + cfg.consume_arguments, + ) + .await + .map_err(QueueError::generic)?, + requeue_on_nack: cfg.requeue_on_nack, + }, + )) + } + + async fn producing_half(cfg: RabbitMqConfig) -> Result { + let conn = Connection::connect(&cfg.uri, cfg.connection_properties) + .await + .map_err(QueueError::generic)?; + + let channel_tx = conn.create_channel().await.map_err(QueueError::generic)?; + + Ok(RabbitMqProducer { + channel: channel_tx, + exchange: cfg.publish_exchange, + routing_key: cfg.publish_routing_key, + options: cfg.publish_options, + properties: cfg.publish_properites, + }) + } + + async fn consuming_half(cfg: RabbitMqConfig) -> Result { + let conn = Connection::connect(&cfg.uri, cfg.connection_properties) + .await + .map_err(QueueError::generic)?; + + let channel_rx = conn.create_channel().await.map_err(QueueError::generic)?; + + Ok(RabbitMqConsumer { + consumer: channel_rx + .basic_consume( + &cfg.consume_queue, + &cfg.consumer_tag, + cfg.consume_options, + cfg.consume_arguments, + ) + .await + .map_err(QueueError::generic)?, + requeue_on_nack: cfg.requeue_on_nack, + }) + } +} + +pub struct RabbitMqDelivery { + requeue_on_nack: bool, + acker: Acker, + body: Vec, + + _pd: PhantomData, +} + +#[async_trait] +impl Delivery for RabbitMqDelivery { + fn payload(&self) -> Result { + serde_json::from_slice(&self.body).map_err(Into::into) + } + + async fn ack(self) -> Result<(), QueueError> { + self.acker + .ack(BasicAckOptions { multiple: false }) + .await + .map_err(QueueError::generic) + } + + async fn nack(self) -> Result<(), QueueError> { + self.acker + .nack(BasicNackOptions { + multiple: false, + requeue: self.requeue_on_nack, + }) + .await + .map_err(QueueError::generic) + } +} + +pub struct RabbitMqProducer { + channel: Channel, + exchange: String, + routing_key: String, + options: BasicPublishOptions, + properties: BasicProperties, +} + +#[async_trait] +impl TaskQueueSend for RabbitMqProducer { + async fn send(&self, payload: T) -> Result<(), QueueError> { + self.channel + .basic_publish( + &self.exchange, + &self.routing_key, + self.options, + &serde_json::to_vec(&payload)?, + self.properties.clone(), + ) + .await + .map_err(QueueError::generic)?; + + Ok(()) + } +} + +pub struct RabbitMqConsumer { + consumer: Consumer, + requeue_on_nack: bool, +} + +#[async_trait] +impl TaskQueueReceive> + for RabbitMqConsumer +{ + async fn receive_all( + &mut self, + max_batch_size: usize, + timeout: Duration, + ) -> Result>, QueueError> { + let mut stream = self + .consumer + .clone() + .map(|l: Result| -> Result, QueueError> { + let l = l.map_err(QueueError::generic)?; + Ok(RabbitMqDelivery { + acker: l.acker, + body: l.data, + requeue_on_nack: self.requeue_on_nack, + _pd: PhantomData, + }) + }); + let mut out = Vec::new(); + + if let Some(delivery) = stream.next().await { + out.push(delivery?); + + let mut interval = tokio::time::interval(timeout); + // Skip the instant first period + interval.tick().await; + + loop { + tokio::select! { + _ = interval.tick() => break, + delivery = stream.next() => { + if let Some(delivery) = delivery { + out.push(delivery?); + + if out.len() >= max_batch_size { + break; + } + } + } + } + } + } + + Ok(out) + } +} diff --git a/webhook-bridge/generic-queue/src/redis.rs b/webhook-bridge/generic-queue/src/redis.rs new file mode 100644 index 000000000..2bc1e4ae2 --- /dev/null +++ b/webhook-bridge/generic-queue/src/redis.rs @@ -0,0 +1,310 @@ +use std::{collections::HashMap, marker::PhantomData, time::Duration}; + +use async_trait::async_trait; +use bb8_redis::RedisConnectionManager; +use redis::streams::{StreamId, StreamKey, StreamReadOptions, StreamReadReply}; +use serde::{ + de::{DeserializeOwned, Error, Unexpected}, + Serialize, +}; + +use crate::{Delivery, QueueError, TaskQueueBackend, TaskQueueReceive, TaskQueueSend}; + +pub struct RedisConfig { + pub dsn: String, + pub max_connections: u16, + pub reinsert_on_nack: bool, + pub queue_key: String, + pub consumer_group: String, + pub consumer_name: String, +} + +pub struct RedisQueueBackend { + _pd_serde: PhantomData, +} + +#[async_trait] +impl< + S: RedisStreamSerdeScheme, + T: 'static + RedisStreamDeserialize + RedisStreamSerialize + Send + Sync, + > TaskQueueBackend for RedisQueueBackend +{ + type PairConfig = RedisConfig; + + type Delivery = RedisStreamDelivery; + + type Producer = RedisStreamProducer; + type Consumer = RedisStreamConsumer; + + async fn new_pair( + cfg: RedisConfig, + ) -> Result<(RedisStreamProducer, RedisStreamConsumer), QueueError> { + let redis = RedisConnectionManager::new(cfg.dsn).map_err(QueueError::generic)?; + let redis = bb8::Pool::builder() + .max_size(cfg.max_connections.into()) + .build(redis) + .await + .map_err(QueueError::generic)?; + + Ok(( + RedisStreamProducer { + redis: redis.clone(), + queue_key: cfg.queue_key.clone(), + _pd_serde: PhantomData, + }, + RedisStreamConsumer { + redis, + queue_key: cfg.queue_key, + consumer_group: cfg.consumer_group, + consumer_name: cfg.consumer_name, + reinsert_on_nack: cfg.reinsert_on_nack, + _pd_serde: PhantomData, + }, + )) + } + + async fn producing_half(cfg: RedisConfig) -> Result, QueueError> { + let redis = RedisConnectionManager::new(cfg.dsn).map_err(QueueError::generic)?; + let redis = bb8::Pool::builder() + .max_size(cfg.max_connections.into()) + .build(redis) + .await + .map_err(QueueError::generic)?; + + Ok(RedisStreamProducer { + redis, + queue_key: cfg.queue_key, + _pd_serde: PhantomData, + }) + } + + async fn consuming_half(cfg: RedisConfig) -> Result, QueueError> { + let redis = RedisConnectionManager::new(cfg.dsn).map_err(QueueError::generic)?; + let redis = bb8::Pool::builder() + .max_size(cfg.max_connections.into()) + .build(redis) + .await + .map_err(QueueError::generic)?; + + Ok(RedisStreamConsumer { + redis, + queue_key: cfg.queue_key, + consumer_group: cfg.consumer_group, + consumer_name: cfg.consumer_name, + reinsert_on_nack: cfg.reinsert_on_nack, + _pd_serde: PhantomData, + }) + } +} + +pub trait RedisStreamSerdeScheme: Send + Sized + Sync {} +pub trait RedisStreamSerialize { + fn into_redis_stream_map(self) -> Result, QueueError>; +} +pub trait RedisStreamDeserialize: Sized { + fn from_redis_stream_map(map: &HashMap) -> Result; +} + +pub struct RedisStreamJsonSerde; +impl RedisStreamSerdeScheme for RedisStreamJsonSerde {} + +impl RedisStreamSerialize for T { + fn into_redis_stream_map(self) -> Result, QueueError> { + Ok(vec![("payload".to_owned(), serde_json::to_string(&self)?)]) + } +} + +impl RedisStreamDeserialize for T { + fn from_redis_stream_map(map: &HashMap) -> Result { + let Some(payload) = map.get("payload") else { + return Err(QueueError::Serde(serde_json::Error::missing_field( + "payload", + ))) + }; + + match payload { + redis::Value::Data(bytes) => { + let s = std::str::from_utf8(bytes).map_err(|_| { + QueueError::Serde(serde_json::Error::invalid_type( + Unexpected::Other("unknown"), + &"string", + )) + })?; + + Ok(serde_json::from_str(s)?) + } + redis::Value::Nil => Err(QueueError::Serde(serde_json::Error::invalid_type( + Unexpected::Other("nil"), + &"string", + ))), + redis::Value::Int(i) => Err(QueueError::Serde(serde_json::Error::invalid_type( + Unexpected::Signed(*i), + &"string", + ))), + redis::Value::Bulk(_) => Err(QueueError::Serde(serde_json::Error::invalid_type( + Unexpected::Seq, + &"string", + ))), + redis::Value::Status(_) => Err(QueueError::Serde(serde_json::Error::invalid_type( + Unexpected::Other("status"), + &"string", + ))), + redis::Value::Okay => Err(QueueError::Serde(serde_json::Error::invalid_type( + Unexpected::Other("okay"), + &"string", + ))), + } + } +} + +pub struct RedisStreamDelivery { + body: StreamId, + + reinsert_on_nack: bool, + + redis: bb8::Pool, + queue_key: String, + consumer_group: String, + entry_id: String, + + _pd_payload: PhantomData, + _pd_serde: PhantomData, +} + +#[async_trait] +impl< + S: RedisStreamSerdeScheme, + T: RedisStreamDeserialize + RedisStreamSerialize + Send + Sync, + > Delivery for RedisStreamDelivery +{ + fn payload(&self) -> Result { + T::from_redis_stream_map(&self.body.map).map_err(Into::into) + } + + async fn ack(self) -> Result<(), QueueError> { + let mut conn = self.redis.get().await.map_err(QueueError::generic)?; + redis::Cmd::xack(self.queue_key, self.consumer_group, &[self.entry_id]) + .query_async(&mut *conn) + .await + .map_err(QueueError::generic)?; + + Ok(()) + } + + async fn nack(self) -> Result<(), QueueError> { + if self.reinsert_on_nack { + let mut conn = self.redis.get().await.map_err(QueueError::generic)?; + + // FIXME: Transaction? + redis::Cmd::xadd( + &self.queue_key, + "*", + &T::from_redis_stream_map(&self.body.map)?.into_redis_stream_map()?, + ) + .query_async(&mut *conn) + .await + .map_err(QueueError::generic)?; + + redis::Cmd::xack(self.queue_key, self.consumer_group, &[self.entry_id]) + .query_async(&mut *conn) + .await + .map_err(QueueError::generic)?; + + Ok(()) + } else { + Ok(()) + } + } +} + +pub struct RedisStreamProducer { + redis: bb8::Pool, + queue_key: String, + + _pd_serde: PhantomData, +} + +#[async_trait] +impl + Send + Sync> TaskQueueSend + for RedisStreamProducer +{ + async fn send(&self, payload: T) -> Result<(), QueueError> { + let mut conn = self.redis.get().await.map_err(QueueError::generic)?; + redis::Cmd::xadd(&self.queue_key, "*", &payload.into_redis_stream_map()?) + .query_async(&mut *conn) + .await + .map_err(QueueError::generic)?; + + Ok(()) + } +} + +pub struct RedisStreamConsumer { + redis: bb8::Pool, + queue_key: String, + consumer_group: String, + consumer_name: String, + reinsert_on_nack: bool, + + _pd_serde: PhantomData, +} + +#[async_trait] +impl< + S: RedisStreamSerdeScheme, + T: RedisStreamDeserialize + RedisStreamSerialize + Send + Sync, + > TaskQueueReceive> for RedisStreamConsumer +{ + async fn receive_all( + &mut self, + max_batch_size: usize, + timeout: Duration, + ) -> Result>, QueueError> { + let mut conn = self.redis.get().await.map_err(QueueError::generic)?; + + // Ensure an empty vec is never returned + let queue: StreamKey = loop { + let read_out: StreamReadReply = redis::Cmd::xread_options( + &[&self.queue_key], + &[">"], + &StreamReadOptions::default() + .group(&self.consumer_group, &self.consumer_name) + .block( + timeout + .as_millis() + .try_into() + .map_err(QueueError::generic)?, + ) + .count(max_batch_size), + ) + .query_async(&mut *conn) + .await + .map_err(QueueError::generic)?; + + let queue = read_out.keys.into_iter().next().ok_or(QueueError::NoData)?; + + if !queue.ids.is_empty() { + break queue; + } + }; + + Ok(queue + .ids + .into_iter() + .map(|stream_id| { + Ok(RedisStreamDelivery { + entry_id: stream_id.id.clone(), + body: stream_id, + reinsert_on_nack: self.reinsert_on_nack, + redis: self.redis.clone(), + queue_key: self.queue_key.clone(), + consumer_group: self.consumer_group.clone(), + _pd_serde: PhantomData, + _pd_payload: PhantomData, + }) + }) + .collect::, QueueError>>()?) + } +} + +// TODO: Test that the pending entries list doesn't fill diff --git a/webhook-bridge/generic-queue/src/sqs.rs b/webhook-bridge/generic-queue/src/sqs.rs new file mode 100644 index 000000000..f72ff0a88 --- /dev/null +++ b/webhook-bridge/generic-queue/src/sqs.rs @@ -0,0 +1,206 @@ +use std::{marker::PhantomData, time::Duration}; + +use async_trait::async_trait; +use aws_sdk_sqs::{ + operation::delete_message::DeleteMessageError, types::error::ReceiptHandleIsInvalid, Client, +}; +use serde::{de::DeserializeOwned, Serialize}; + +use crate::{Delivery, QueueError, TaskQueueBackend, TaskQueueReceive, TaskQueueSend}; + +pub struct SqsConfig { + pub queue_dsn: String, + pub override_endpoint: bool, +} + +pub struct SqsQueueBackend; + +#[async_trait] +impl TaskQueueBackend + for SqsQueueBackend +{ + type PairConfig = SqsConfig; + + type Delivery = SqsDelivery; + + type Producer = SqsQueueProducer; + type Consumer = SqsQueueConsumer; + + async fn new_pair(cfg: SqsConfig) -> Result<(SqsQueueProducer, SqsQueueConsumer), QueueError> { + let aws_cfg = if cfg.override_endpoint { + aws_config::from_env() + .endpoint_url(&cfg.queue_dsn) + .load() + .await + } else { + aws_config::load_from_env().await + }; + + let client = Client::new(&aws_cfg); + + let producer = SqsQueueProducer { + client: client.clone(), + queue_dsn: cfg.queue_dsn.clone(), + }; + let consumer = SqsQueueConsumer { + client, + queue_dsn: cfg.queue_dsn, + }; + + Ok((producer, consumer)) + } + + async fn producing_half(cfg: SqsConfig) -> Result { + let aws_cfg = if cfg.override_endpoint { + aws_config::from_env() + .endpoint_url(&cfg.queue_dsn) + .load() + .await + } else { + aws_config::load_from_env().await + }; + + let client = Client::new(&aws_cfg); + + let producer = SqsQueueProducer { + client, + queue_dsn: cfg.queue_dsn, + }; + + Ok(producer) + } + + async fn consuming_half(cfg: SqsConfig) -> Result { + let aws_cfg = if cfg.override_endpoint { + aws_config::from_env() + .endpoint_url(&cfg.queue_dsn) + .load() + .await + } else { + aws_config::load_from_env().await + }; + + let client = Client::new(&aws_cfg); + + let consumer = SqsQueueConsumer { + client, + queue_dsn: cfg.queue_dsn, + }; + + Ok(consumer) + } +} + +pub struct SqsDelivery { + ack_client: Client, + // FIXME: Cow/Arc this stuff? + queue_dsn: String, + body: String, + receipt_handle: Option, + _pd: PhantomData, +} + +#[async_trait] +impl Delivery for SqsDelivery { + fn payload(&self) -> Result { + serde_json::from_str(&self.body).map_err(Into::into) + } + + async fn ack(self) -> Result<(), QueueError> { + if let Some(receipt_handle) = self.receipt_handle { + self.ack_client + .delete_message() + .queue_url(&self.queue_dsn) + .receipt_handle(receipt_handle) + .send() + .await + .map_err(QueueError::generic)?; + + Ok(()) + } else { + Err(QueueError::generic( + DeleteMessageError::ReceiptHandleIsInvalid( + ReceiptHandleIsInvalid::builder() + .message("receipt handle must be Some to be acked") + .build(), + ), + )) + } + } + + async fn nack(self) -> Result<(), QueueError> { + Ok(()) + } +} + +pub struct SqsQueueProducer { + client: Client, + queue_dsn: String, +} + +#[async_trait] +impl TaskQueueSend for SqsQueueProducer { + async fn send(&self, payload: T) -> Result<(), QueueError> { + self.client + .send_message() + .queue_url(&self.queue_dsn) + .message_body(serde_json::to_string(&payload)?) + .send() + .await + .map_err(QueueError::generic)?; + + Ok(()) + } +} + +pub struct SqsQueueConsumer { + client: Client, + queue_dsn: String, +} + +#[async_trait] +impl TaskQueueReceive> + for SqsQueueConsumer +{ + async fn receive_all( + &mut self, + max_batch_size: usize, + timeout: Duration, + ) -> Result>, QueueError> { + // Ensure that there's at least one message before returning regardless of timeout + let out = loop { + let out = self + .client + .receive_message() + .set_wait_time_seconds(Some( + timeout.as_secs().try_into().map_err(QueueError::generic)?, + )) + .set_max_number_of_messages(Some( + max_batch_size.try_into().map_err(QueueError::generic)?, + )) + .queue_url(&self.queue_dsn) + .send() + .await + .map_err(QueueError::generic)?; + + if !out.messages().unwrap_or_default().is_empty() { + break out; + } + }; + + Ok(out + .messages() + .unwrap_or_default() + .iter() + .map(|message| -> Result, QueueError> { + Ok(SqsDelivery { + ack_client: self.client.clone(), + queue_dsn: self.queue_dsn.clone(), + body: message.body().unwrap_or_default().to_owned(), + receipt_handle: message.receipt_handle().map(ToOwned::to_owned), + _pd: PhantomData, + }) + }) + .collect::, _>>()?) + } +} diff --git a/webhook-bridge/run-tests.sh b/webhook-bridge/run-tests.sh new file mode 100755 index 000000000..9c988e95a --- /dev/null +++ b/webhook-bridge/run-tests.sh @@ -0,0 +1,8 @@ +#!/bin/sh -e + +AWS_DEFAULT_REGION="elasticmq" \ +AWS_ACCESS_KEY_ID="x" \ +AWS_SECRET_ACCESS_KEY="x" \ +PUBSUB_EMULATOR_HOST=localhost:8085 \ +PUBSUB_PROJECT_ID=local-project \ +cargo test --all-features -- "$@" diff --git a/webhook-bridge/svix-agent-plugin-generic/Cargo.toml b/webhook-bridge/svix-agent-plugin-generic/Cargo.toml new file mode 100644 index 000000000..f9560b9b6 --- /dev/null +++ b/webhook-bridge/svix-agent-plugin-generic/Cargo.toml @@ -0,0 +1,32 @@ +[package] +name = "svix-agent-plugin-generic" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +futures-lite = "1.12.0" +generic_queue = { path = "../generic-queue" } +serde_json = "1.0" +serde = { version = "1.0", features = ["derive"] } +svix = "0.84.1" +svix-agent-types = { path = "../svix-agent-types" } +tokio = { version = "1", features = ["full"] } +tokio-executor-trait = "2.1" +tokio-reactor-trait = "1.1" +tracing = "0.1" + +[dev-dependencies] +aws-config = "0.55" +aws-sdk-sqs = "0.25" +fastrand = "1.9" +google-cloud-auth = { version = "0.9.2" } +google-cloud-default = { version = "0.2.0", features = ["pubsub"]} +google-cloud-gax = { version = "0.14.1" } +google-cloud-googleapis = { version = "0.8.0" } +google-cloud-pubsub = { version="0.14.1" } +lapin = "2" +redis = { version = "0.21", features = ["tokio-comp", "streams"] } +tracing-subscriber = "0.3" +wiremock = "0.5.18" \ No newline at end of file diff --git a/webhook-bridge/svix-agent-plugin-generic/src/config.rs b/webhook-bridge/svix-agent-plugin-generic/src/config.rs new file mode 100644 index 000000000..a316319eb --- /dev/null +++ b/webhook-bridge/svix-agent-plugin-generic/src/config.rs @@ -0,0 +1,88 @@ +use generic_queue::rabbitmq::{BasicConsumeOptions, FieldTable}; +use serde::Deserialize; +use std::path::PathBuf; +use svix::api::SvixOptions as _SvixOptions; + +#[derive(Debug, Default, Deserialize)] +pub struct RabbitMqConsumerConfig { + pub input: RabbitMqInputOpts, + pub output: OutputOpts, +} + +#[derive(Debug, Default, Deserialize)] +pub struct RedisConsumerConfig { + pub input: RedisInputOpts, + pub output: OutputOpts, +} + +#[derive(Debug, Default, Deserialize)] +pub struct SqsConsumerConfig { + pub input: SqsInputOpts, + pub output: OutputOpts, +} + +#[derive(Debug, Default, Deserialize)] +pub struct GCPPubSubConsumerConfig { + pub input: GCPPubSubInputOpts, + pub output: OutputOpts, +} + +// N.b. the codegen types we get from openapi don't impl Deserialize so we need our own version. +#[derive(Debug, Default, Deserialize)] +pub struct SvixOptions { + #[serde(default)] + pub debug: bool, + pub server_url: Option, +} + +impl From for _SvixOptions { + fn from(SvixOptions { debug, server_url }: SvixOptions) -> Self { + _SvixOptions { debug, server_url } + } +} + +#[derive(Debug, Default, Deserialize)] +pub struct OutputOpts { + /// Svix API token for the client. + pub token: String, + /// Options for the Svix client. + pub svix_options: Option, +} + +#[derive(Debug, Default, Deserialize)] +pub struct RabbitMqInputOpts { + /// Connection string for RabbitMQ. + pub uri: String, + /// The name of the queue to consume from. + /// N.b. the queue must be declared before the consumer can connect to it. + pub queue_name: String, + /// Identifier for the consumer. + pub consumer_tag: Option, + + pub consume_opts: Option, + pub consume_args: Option, + + pub requeue_on_nack: bool, +} + +#[derive(Debug, Default, Deserialize)] +pub struct RedisInputOpts { + pub dsn: String, + pub max_connections: u16, + pub reinsert_on_nack: bool, + pub queue_key: String, + pub consumer_group: String, + pub consumer_name: String, +} + +#[derive(Debug, Default, Deserialize)] +pub struct SqsInputOpts { + pub queue_dsn: String, + pub override_endpoint: bool, +} + +#[derive(Debug, Default, Deserialize)] +pub struct GCPPubSubInputOpts { + pub subscription_id: String, + pub credentials_file: Option, +} diff --git a/webhook-bridge/svix-agent-plugin-generic/src/error.rs b/webhook-bridge/svix-agent-plugin-generic/src/error.rs new file mode 100644 index 000000000..3b7812a9d --- /dev/null +++ b/webhook-bridge/svix-agent-plugin-generic/src/error.rs @@ -0,0 +1,37 @@ +use generic_queue::QueueError; + +pub enum Error { + Payload(String), + Json(serde_json::Error), + Queue(QueueError), + Svix(svix::error::Error), +} + +impl From for Error { + fn from(value: svix::error::Error) -> Self { + Error::Svix(value) + } +} + +impl From for Error { + fn from(value: serde_json::Error) -> Self { + Error::Json(value) + } +} + +impl From for Error { + fn from(value: QueueError) -> Self { + Error::Queue(value) + } +} + +impl From for std::io::Error { + fn from(value: Error) -> Self { + match value { + Error::Payload(e) => std::io::Error::new(std::io::ErrorKind::Other, e), + Error::Json(e) => std::io::Error::new(std::io::ErrorKind::Other, e), + Error::Queue(e) => std::io::Error::new(std::io::ErrorKind::Other, e), + Error::Svix(e) => std::io::Error::new(std::io::ErrorKind::Other, e), + } + } +} diff --git a/webhook-bridge/svix-agent-plugin-generic/src/gcp_pubsub/mod.rs b/webhook-bridge/svix-agent-plugin-generic/src/gcp_pubsub/mod.rs new file mode 100644 index 000000000..0e60a7a82 --- /dev/null +++ b/webhook-bridge/svix-agent-plugin-generic/src/gcp_pubsub/mod.rs @@ -0,0 +1,142 @@ +use crate::config::{GCPPubSubConsumerConfig, GCPPubSubInputOpts}; +use crate::error::Error; +use crate::PLUGIN_NAME; +use crate::PLUGIN_VERS; +use crate::{create_svix_message, CreateMessageRequest}; +use generic_queue::gcp_pubsub::{ + GCPPubSubConfig, GCPPubSubDelivery, GCPPubSubQueueBackend, GCPPubSubQueueConsumer, +}; +use generic_queue::{Delivery, TaskQueueBackend, TaskQueueReceive}; +use std::time::{Duration, Instant}; +use svix::api::Svix; +use svix_agent_types::{async_trait, Plugin}; +use tracing::instrument; + +pub struct GCPPubSubConsumerPlugin { + input_options: GCPPubSubInputOpts, + svix_client: Svix, +} + +impl GCPPubSubConsumerPlugin { + pub fn new(GCPPubSubConsumerConfig { input, output }: GCPPubSubConsumerConfig) -> Self { + Self { + input_options: input, + svix_client: Svix::new(output.token, output.svix_options.map(Into::into)), + } + } + + /// Pulls N messages off the queue and feeds them to [`Self::process`]. + #[instrument(skip_all, + fields( + otel.kind = "CONSUMER", + messaging.system = "gcp-pubsub", + messaging.operation = "receive", + messaging.source = &self.input_options.subscription_id, + svixagent_plugin.name = PLUGIN_NAME, + svixagent_plugin.vers = PLUGIN_VERS, + ) + )] + async fn receive(&self, consumer: &mut GCPPubSubQueueConsumer) -> std::io::Result<()> { + let deliveries = consumer + .receive_all(1, Duration::from_millis(10)) + .await + .map_err(Error::from)?; + tracing::trace!("received: {}", deliveries.len()); + for delivery in deliveries { + self.process(delivery).await?; + } + Ok(()) + } + + /// Parses the delivery as JSON and feeds it into [`create_svix_message`]. + /// Will nack the delivery if either the JSON parse step, or the request to svix fails. + #[instrument(skip_all, fields(messaging.operation = "process"))] + async fn process(&self, delivery: GCPPubSubDelivery) -> std::io::Result<()> { + let payload = match Delivery::::payload(&delivery) { + Ok(p) => p, + Err(e) => { + tracing::warn!("{e}"); + delivery.nack().await.map_err(Error::from)?; + return Ok(()); + } + }; + + match create_svix_message(&self.svix_client, payload).await { + Ok(_) => { + tracing::trace!("ack"); + delivery.ack().await.map_err(Error::from)? + } + Err(e) => { + tracing::error!("nack: {e}"); + delivery.nack().await.map_err(Error::from)? + } + } + Ok(()) + } + + async fn consume(&self) -> std::io::Result<()> { + let mut consumer = + >::consuming_half( + GCPPubSubConfig { + subscription_id: self.input_options.subscription_id.clone(), + credentials_file: self.input_options.credentials_file.clone(), + // Topics are for producers so we don't care + topic: String::new(), + }, + ) + .await + .map_err(Error::from)?; + + tracing::debug!( + "gcp pubsub consuming: {}", + &self.input_options.subscription_id + ); + + loop { + self.receive(&mut consumer).await?; + } + } +} + +impl TryInto> for GCPPubSubConsumerConfig { + type Error = &'static str; + + fn try_into(self) -> Result, Self::Error> { + Ok(Box::new(GCPPubSubConsumerPlugin::new(self))) + } +} + +#[async_trait] +impl Plugin for GCPPubSubConsumerPlugin { + async fn run(&self) -> std::io::Result<()> { + let mut fails: u64 = 0; + let mut last_fail = Instant::now(); + + tracing::info!( + "gcp pubsub starting: {}", + &self.input_options.subscription_id + ); + + loop { + if let Err(e) = self.consume().await { + tracing::error!("{e}"); + } + + tracing::error!( + "gcp pubsub disconnected: {}", + &self.input_options.subscription_id + ); + + if last_fail.elapsed() > Duration::from_secs(10) { + // reset the fail count if we didn't have a hiccup in the past short while. + tracing::trace!("been a while since last fail, resetting count"); + fails = 0; + } else { + fails += 1; + } + + last_fail = Instant::now(); + tokio::time::sleep(Duration::from_millis((300 * fails).min(3000))).await; + } + } +} diff --git a/webhook-bridge/svix-agent-plugin-generic/src/lib.rs b/webhook-bridge/svix-agent-plugin-generic/src/lib.rs new file mode 100644 index 000000000..199c51f6a --- /dev/null +++ b/webhook-bridge/svix-agent-plugin-generic/src/lib.rs @@ -0,0 +1,397 @@ +use std::time::{Duration, Instant}; + +use generic_queue::{ + rabbitmq::{ + BasicProperties, BasicPublishOptions, ConnectionProperties, RabbitMqBackend, RabbitMqConfig, + }, + redis::{RedisConfig, RedisQueueBackend}, + sqs::{SqsConfig, SqsQueueBackend}, + Delivery, TaskQueueBackend, TaskQueueReceive, +}; +use serde::{Deserialize, Serialize}; +use svix::api::{MessageIn, PostOptions as PostOptions_, Svix}; +use svix_agent_types::{async_trait, Plugin}; + +pub mod config; +pub use config::{ + GCPPubSubConsumerConfig, RabbitMqConsumerConfig, RabbitMqInputOpts, RedisConsumerConfig, + RedisInputOpts, SqsConsumerConfig, SqsInputOpts, +}; +mod error; +use error::Error; +mod gcp_pubsub; +pub use gcp_pubsub::GCPPubSubConsumerPlugin; + +pub const PLUGIN_NAME: &str = env!("CARGO_PKG_NAME"); +pub const PLUGIN_VERS: &str = env!("CARGO_PKG_VERSION"); + +pub struct RabbitMqConsumerPlugin { + input_options: RabbitMqInputOpts, + svix_client: Svix, +} + +pub struct RedisConsumerPlugin { + input_options: RedisInputOpts, + svix_client: Svix, +} + +pub struct SqsConsumerPlugin { + input_options: SqsInputOpts, + svix_client: Svix, +} + +impl TryInto> for RabbitMqConsumerConfig { + type Error = &'static str; + + fn try_into(self) -> Result, Self::Error> { + Ok(Box::new(RabbitMqConsumerPlugin::new(self))) + } +} + +impl TryInto> for RedisConsumerConfig { + type Error = &'static str; + + fn try_into(self) -> Result, Self::Error> { + Ok(Box::new(RedisConsumerPlugin::new(self))) + } +} + +impl TryInto> for SqsConsumerConfig { + type Error = &'static str; + + fn try_into(self) -> Result, Self::Error> { + Ok(Box::new(SqsConsumerPlugin::new(self))) + } +} + +impl RabbitMqConsumerPlugin { + pub fn new(RabbitMqConsumerConfig { input, output }: RabbitMqConsumerConfig) -> Self { + Self { + input_options: input, + svix_client: Svix::new(output.token, output.svix_options.map(Into::into)), + } + } + + async fn consume(&self) -> std::io::Result<()> { + let mut consumer = + >::consuming_half( + RabbitMqConfig { + uri: self.input_options.uri.clone(), + connection_properties: ConnectionProperties::default(), + publish_exchange: String::new(), + publish_routing_key: String::new(), + publish_options: BasicPublishOptions::default(), + publish_properites: BasicProperties::default(), + consume_queue: self.input_options.queue_name.clone(), + consumer_tag: self.input_options.consumer_tag.clone().unwrap_or_default(), + consume_options: self.input_options.consume_opts.unwrap_or_default(), + consume_arguments: self.input_options.consume_args.clone().unwrap_or_default(), + requeue_on_nack: self.input_options.requeue_on_nack, + }, + ) + .await + .map_err(Error::from)?; + + tracing::debug!("rabbitmq consuming: {}", &self.input_options.queue_name); + + // FIXME: `while let` swallows errors from `receive_all`. + while let Ok(deliveries) = consumer.receive_all(1, Duration::from_millis(10)).await { + let span = tracing::error_span!( + "receive", + otel.kind = "CONSUMER", + messaging.system = "rabbitmq", + messaging.operation = "receive", + messaging.source = &self.input_options.queue_name, + svixagent_plugin.name = PLUGIN_NAME, + svixagent_plugin.vers = PLUGIN_VERS, + ); + let _enter = span.enter(); + tracing::trace!("received: {}", deliveries.len()); + + for delivery in deliveries { + let span = tracing::error_span!("process", messaging.operation = "process"); + let _enter = span.enter(); + + let payload = match Delivery::::payload(&delivery) { + Ok(p) => p, + Err(e) => { + tracing::warn!("nack: {e}"); + delivery.nack().await.map_err(Error::from)?; + continue; + } + }; + + match create_svix_message(&self.svix_client, payload).await { + Ok(_) => { + tracing::trace!("ack"); + delivery.ack().await.map_err(Error::from)? + } + + Err(e) => { + tracing::error!("nack: {e}"); + delivery.nack().await.map_err(Error::from)? + } + } + } + } + + Ok(()) + } +} + +#[async_trait] +impl Plugin for RabbitMqConsumerPlugin { + async fn run(&self) -> std::io::Result<()> { + let mut fails: u64 = 0; + let mut last_fail = Instant::now(); + + tracing::info!("rabbitmq starting: {}", &self.input_options.queue_name); + + loop { + if let Err(e) = self.consume().await { + tracing::error!("{e}"); + } + tracing::error!("rabbitmq disconnected: {}", &self.input_options.queue_name); + + if last_fail.elapsed() > Duration::from_secs(10) { + // reset the fail count if we didn't have a hiccup in the past short while. + tracing::trace!("been a while since last fail, resetting count"); + fails = 0; + } else { + fails += 1; + } + + last_fail = Instant::now(); + tokio::time::sleep(Duration::from_millis((300 * fails).min(3000))).await; + } + } +} + +impl RedisConsumerPlugin { + pub fn new(RedisConsumerConfig { input, output }: RedisConsumerConfig) -> Self { + Self { + input_options: input, + svix_client: Svix::new(output.token, output.svix_options.map(Into::into)), + } + } + + async fn consume(&self) -> std::io::Result<()> { + let mut consumer = + >::consuming_half( + RedisConfig { + dsn: self.input_options.dsn.clone(), + max_connections: self.input_options.max_connections, + reinsert_on_nack: self.input_options.reinsert_on_nack, + queue_key: self.input_options.queue_key.clone(), + consumer_group: self.input_options.consumer_group.clone(), + consumer_name: self.input_options.consumer_name.clone(), + }, + ) + .await + .map_err(Error::from)?; + + tracing::debug!("redis consuming: {}", &self.input_options.queue_key); + // FIXME: `while let` swallows errors from `receive_all`. + while let Ok(deliveries) = consumer.receive_all(1, Duration::from_millis(10)).await { + let span = tracing::error_span!( + "receive", + otel.kind = "CONSUMER", + messaging.system = "redis", + messaging.operation = "receive", + messaging.source = &self.input_options.queue_key, + svixagent_plugin.name = PLUGIN_NAME, + svixagent_plugin.vers = PLUGIN_VERS, + ); + let _enter = span.enter(); + tracing::trace!("received: {}", deliveries.len()); + + for delivery in deliveries { + let span = tracing::error_span!("process", messaging.operation = "process"); + let _enter = span.enter(); + + let payload = match Delivery::::payload(&delivery) { + Ok(p) => p, + Err(e) => { + tracing::warn!("nack: {e}"); + delivery.nack().await.map_err(Error::from)?; + continue; + } + }; + + match create_svix_message(&self.svix_client, payload).await { + Ok(_) => { + tracing::trace!("ack"); + delivery.ack().await.map_err(Error::from)? + } + Err(e) => { + tracing::error!("nack: {e}"); + delivery.nack().await.map_err(Error::from)? + } + } + } + } + + Ok(()) + } +} + +#[async_trait] +impl Plugin for RedisConsumerPlugin { + async fn run(&self) -> std::io::Result<()> { + let mut fails: u64 = 0; + let mut last_fail = Instant::now(); + + tracing::info!("redis starting: {}", &self.input_options.queue_key); + + loop { + if let Err(e) = self.consume().await { + tracing::error!("{e}"); + } + + tracing::error!("redis disconnected: {}", &self.input_options.queue_key); + if last_fail.elapsed() > Duration::from_secs(10) { + // reset the fail count if we didn't have a hiccup in the past short while. + tracing::trace!("been a while since last fail, resetting count"); + fails = 0; + } else { + fails += 1; + } + + last_fail = Instant::now(); + tokio::time::sleep(Duration::from_millis((300 * fails).min(3000))).await; + } + } +} + +impl SqsConsumerPlugin { + pub fn new(SqsConsumerConfig { input, output }: SqsConsumerConfig) -> Self { + Self { + input_options: input, + svix_client: Svix::new(output.token, output.svix_options.map(Into::into)), + } + } + + async fn consume(&self) -> std::io::Result<()> { + let mut consumer = + >::consuming_half( + SqsConfig { + queue_dsn: self.input_options.queue_dsn.clone(), + override_endpoint: self.input_options.override_endpoint, + }, + ) + .await + .map_err(Error::from)?; + + tracing::debug!("sqs consuming: {}", &self.input_options.queue_dsn); + // FIXME: `while let` swallows errors from `receive_all`. + while let Ok(deliveries) = consumer.receive_all(1, Duration::from_millis(10)).await { + let span = tracing::error_span!( + "receive", + otel.kind = "CONSUMER", + messaging.system = "sqs", + messaging.operation = "receive", + messaging.source = &self.input_options.queue_dsn, + svixagent_plugin.name = PLUGIN_NAME, + svixagent_plugin.vers = PLUGIN_VERS, + ); + let _enter = span.enter(); + tracing::trace!("received: {}", deliveries.len()); + + for delivery in deliveries { + let span = tracing::error_span!("process", messaging.operation = "process"); + let _enter = span.enter(); + + let payload = match Delivery::::payload(&delivery) { + Ok(p) => p, + Err(e) => { + tracing::warn!("nack: {e}"); + delivery.nack().await.map_err(Error::from)?; + continue; + } + }; + + match create_svix_message(&self.svix_client, payload).await { + Ok(_) => { + tracing::trace!("ack"); + delivery.ack().await.map_err(Error::from)? + } + Err(e) => { + tracing::error!("nack: {e}"); + delivery.nack().await.map_err(Error::from)? + } + } + } + } + + Ok(()) + } +} + +#[async_trait] +impl Plugin for SqsConsumerPlugin { + async fn run(&self) -> std::io::Result<()> { + let mut fails: u64 = 0; + let mut last_fail = Instant::now(); + + tracing::info!("sqs starting: {}", &self.input_options.queue_dsn); + + loop { + if let Err(e) = self.consume().await { + tracing::error!("{e}"); + } + + tracing::error!("sqs disconnected: {}", &self.input_options.queue_dsn); + + if last_fail.elapsed() > Duration::from_secs(10) { + // reset the fail count if we didn't have a hiccup in the past short while. + tracing::trace!("been a while since last fail, resetting count"); + fails = 0; + } else { + fails += 1; + } + + last_fail = Instant::now(); + tokio::time::sleep(Duration::from_millis((300 * fails).min(3000))).await; + } + } +} +#[derive(Clone, Default, Deserialize, Serialize)] +pub struct PostOptions { + idempotency_key: Option, +} + +impl From for PostOptions_ { + fn from(value: PostOptions) -> Self { + PostOptions_ { + idempotency_key: value.idempotency_key, + } + } +} + +#[derive(Clone, Deserialize, Serialize)] +pub struct CreateMessageRequest { + pub app_id: String, + pub message: MessageIn, + #[serde(skip_serializing_if = "Option::is_none")] + pub post_options: Option, +} + +async fn create_svix_message(svix: &Svix, value: serde_json::Value) -> std::io::Result<()> { + let CreateMessageRequest { + app_id, + message, + post_options, + }: CreateMessageRequest = serde_json::from_value(value)?; + let span = tracing::error_span!( + "create_svix_message", + app_id = app_id, + event_type = message.event_type + ); + let _enter = span.enter(); + + svix.message() + .create(app_id, message, post_options.map(Into::into)) + .await + .map_err(Error::from)?; + Ok(()) +} diff --git a/webhook-bridge/svix-agent-plugin-generic/tests/gcp_pubsub_consumer.rs b/webhook-bridge/svix-agent-plugin-generic/tests/gcp_pubsub_consumer.rs new file mode 100644 index 000000000..366661302 --- /dev/null +++ b/webhook-bridge/svix-agent-plugin-generic/tests/gcp_pubsub_consumer.rs @@ -0,0 +1,343 @@ +//! Use the `testing-docker-compose.yml` in the repo root to run the dependencies for testing, +//! including the gcloud pubsub emulator. +//! +//! Use `run-tests.sh` to use the requisite environment for testing. + +use google_cloud_googleapis::pubsub::v1::{DeadLetterPolicy, PubsubMessage}; +use google_cloud_pubsub::client::{Client, ClientConfig}; +use google_cloud_pubsub::subscription::{Subscription, SubscriptionConfig}; +use google_cloud_pubsub::topic::Topic; +use std::time::Duration; + +use serde_json::json; +use svix::api::MessageIn; +use svix_agent_plugin_generic::config::GCPPubSubInputOpts; +use svix_agent_plugin_generic::{ + config::{OutputOpts, SvixOptions}, + CreateMessageRequest, GCPPubSubConsumerConfig, GCPPubSubConsumerPlugin, +}; +use svix_agent_types::Plugin; +use wiremock::matchers::method; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +const DEFAULT_PUBSUB_EMULATOR_HOST: &str = "localhost:8085"; + +fn get_test_plugin(svix_url: String, subscription_id: String) -> GCPPubSubConsumerPlugin { + GCPPubSubConsumerPlugin::new(GCPPubSubConsumerConfig { + input: GCPPubSubInputOpts { + subscription_id, + credentials_file: None, + }, + output: OutputOpts { + token: "xxxx".to_string(), + svix_options: Some(SvixOptions { + server_url: Some(svix_url), + ..Default::default() + }), + }, + }) +} + +async fn mq_connection() -> Client { + // The `Default` impl for `ClientConfig` looks for this env var. When set it branches for + // local-mode use using the addr in the env var and a hardcoded project id of `local-project`. + if std::env::var("PUBSUB_EMULATOR_HOST").is_err() { + std::env::set_var("PUBSUB_EMULATOR_HOST", DEFAULT_PUBSUB_EMULATOR_HOST); + } + Client::new(ClientConfig::default()).await.unwrap() +} + +fn random_chars() -> impl Iterator { + std::iter::repeat_with(fastrand::alphanumeric) +} + +async fn create_test_queue(client: &Client) -> (Topic, Subscription) { + let topic_name: String = "topic-".chars().chain(random_chars().take(8)).collect(); + // Need to define a dead letter topic to avoid the "bad" test cases from pulling the nacked + // messages again and again. + let dead_letter_topic_name: String = "topic-".chars().chain(random_chars().take(8)).collect(); + let subscription_name: String = "subscription-" + .chars() + .chain(random_chars().take(8)) + .collect(); + + let topic = client.create_topic(&topic_name, None, None).await.unwrap(); + let dead_letter_topic = client + .create_topic(&dead_letter_topic_name, None, None) + .await + .unwrap(); + let subscription = client + .create_subscription( + &subscription_name, + &topic_name, + SubscriptionConfig { + // Messages published to the topic need to supply a unique ID to make use of this + enable_exactly_once_delivery: true, + dead_letter_policy: Some(DeadLetterPolicy { + dead_letter_topic: dead_letter_topic.fully_qualified_name().into(), + max_delivery_attempts: MAX_DELIVERY_ATTEMPTS, + }), + ..Default::default() + }, + None, + ) + .await + .unwrap(); + + (topic, subscription) +} + +async fn publish(topic: &Topic, payload: &str) { + let publisher = topic.new_publisher(None); + let awaiter = publisher + .publish(PubsubMessage { + data: payload.to_owned().into_bytes(), + message_id: random_chars().take(6).collect(), + ..Default::default() + }) + .await; + awaiter.get().await.unwrap(); +} + +/// General "pause while we wait for messages to travel" beat. If you're seeing flakes, bump this up. +const WAIT_MS: u64 = 100; +/// Controls how many times a message can be nack'd before it lands on the dead letter topic. +const MAX_DELIVERY_ATTEMPTS: i32 = 5; + +/// Push a msg on the queue. +/// Check to see if the svix server sees a request. +#[tokio::test] +async fn test_consume_ok() { + let client = mq_connection().await; + let (topic, subscription) = create_test_queue(&client).await; + + let mock_server = MockServer::start().await; + // The mock will make asserts on drop (i.e. when the body of the test is returning). + // The `expect` call should ensure we see exactly 1 POST request. + // + let mock = Mock::given(method("POST")) + .respond_with(ResponseTemplate::new(202).set_body_json(json!({ + "eventType": "testing.things", + "payload": { + "_SVIX_APP_ID": "app_1234", + "_SVIX_EVENT_TYPE": "testing.things", + "hi": "there", + }, + "id": "msg_xxxx", + "timestamp": "2023-04-25T00:00:00Z" + }))) + .named("create_message") + .expect(1); + mock_server.register(mock).await; + + let plugin = get_test_plugin(mock_server.uri(), subscription.id()); + + let handle = tokio::spawn(async move { + let fut = plugin.run(); + fut.await + }); + // Wait for the consumer to connect + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + let msg = CreateMessageRequest { + app_id: "app_1234".into(), + message: MessageIn::new("testing.things".into(), json!({"hi": "there"})), + post_options: None, + }; + + publish(&topic, &serde_json::to_string(&msg).unwrap()).await; + + // Wait for the consumer to consume. + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + handle.abort(); + + subscription.delete(None).await.ok(); + topic.delete(None).await.ok(); +} + +#[tokio::test] +async fn test_missing_app_id_nack() { + let client = mq_connection().await; + let (topic, subscription) = create_test_queue(&client).await; + + let mock_server = MockServer::start().await; + let mock = Mock::given(method("POST")) + // The response doesn't really matter, but we need to define it to be able to `expect(0)`. + .respond_with(ResponseTemplate::new(400)) + .named("create_message") + // No requests should be made when the event type or app id are missing. + .expect(0); + mock_server.register(mock).await; + + let plugin = get_test_plugin(mock_server.uri(), subscription.id()); + + let handle = tokio::spawn(async move { + let fut = plugin.run(); + fut.await + }); + + // Wait for the consumer to connect + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + publish( + &topic, + &serde_json::to_string(&json!({ + // No app id + "message": { + "eventType": "testing.things", + "payload": { + "hi": "there", + } + }, + + })) + .unwrap(), + ) + .await; + + // Wait for the consumer to consume. + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + handle.abort(); + + subscription.delete(None).await.ok(); + topic.delete(None).await.ok(); +} + +#[tokio::test] +async fn test_missing_event_type_nack() { + let client = mq_connection().await; + let (topic, subscription) = create_test_queue(&client).await; + + let mock_server = MockServer::start().await; + let mock = Mock::given(method("POST")) + // The response doesn't really matter, but we need to define it to be able to `expect(0)`. + .respond_with(ResponseTemplate::new(400)) + .named("create_message") + // No requests should be made when the event type or app id are missing. + .expect(0); + mock_server.register(mock).await; + + let plugin = get_test_plugin(mock_server.uri(), subscription.id()); + + let handle = tokio::spawn(async move { + let fut = plugin.run(); + fut.await + }); + + // Wait for the consumer to connect + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + publish( + &topic, + &serde_json::to_string(&json!({ + "app_id": "app_1234", + "message": { + // No event type + "payload": { + "hi": "there", + } + }, + })) + .unwrap(), + ) + .await; + + // Wait for the consumer to consume. + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + handle.abort(); + + subscription.delete(None).await.ok(); + topic.delete(None).await.ok(); +} + +/// Check that the plugin keeps running when it can't send a message to svix +#[tokio::test] +async fn test_consume_svix_503() { + let client = mq_connection().await; + let (topic, subscription) = create_test_queue(&client).await; + + let mock_server = MockServer::start().await; + // The mock will make asserts on drop (i.e. when the body of the test is returning). + // The `expect` call should ensure we see exactly 1 POST request. + // + let mock = Mock::given(method("POST")) + .respond_with(ResponseTemplate::new(503)) + .named("create_message") + // N.b. this test case is different than other backend flavors of these since there's a + // minimum of 5 delivery attempts made before messages are forwarded to the dead letter topic. + // In other cases this can happen immediately, but not with gcp pubsub. + .up_to_n_times(MAX_DELIVERY_ATTEMPTS.try_into().unwrap()) + .expect(1..); + mock_server.register(mock).await; + + let plugin = get_test_plugin(mock_server.uri(), subscription.id()); + + let handle = tokio::spawn(async move { + let fut = plugin.run(); + fut.await + }); + // Wait for the consumer to connect + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + publish( + &topic, + &serde_json::to_string(&CreateMessageRequest { + app_id: "app_1234".into(), + message: MessageIn::new("testing.things".into(), json!({"hi": "there"})), + post_options: None, + }) + .unwrap(), + ) + .await; + + // Wait for the consumer to consume. + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + assert!(!handle.is_finished()); + handle.abort(); + + subscription.delete(None).await.ok(); + topic.delete(None).await.ok(); +} + +/// Check that the plugin keeps running when it can't send a message to svix because idk, the servers are all offline?? +#[tokio::test] +async fn test_consume_svix_offline() { + let client = mq_connection().await; + let (topic, subscription) = create_test_queue(&client).await; + + let mock_server = MockServer::start().await; + + let plugin = get_test_plugin(mock_server.uri(), subscription.id()); + + // bye-bye svix... + drop(mock_server); + + let handle = tokio::spawn(async move { + let fut = plugin.run(); + fut.await + }); + // Wait for the consumer to connect + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + publish( + &topic, + &serde_json::to_string(&CreateMessageRequest { + app_id: "app_1234".into(), + message: MessageIn::new("testing.things".into(), json!({"hi": "there"})), + post_options: None, + }) + .unwrap(), + ) + .await; + + // Wait for the consumer to consume. + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + assert!(!handle.is_finished()); + handle.abort(); + + subscription.delete(None).await.ok(); + topic.delete(None).await.ok(); +} diff --git a/webhook-bridge/svix-agent-plugin-generic/tests/rabbitmq_consumer.rs b/webhook-bridge/svix-agent-plugin-generic/tests/rabbitmq_consumer.rs new file mode 100644 index 000000000..dacf471a2 --- /dev/null +++ b/webhook-bridge/svix-agent-plugin-generic/tests/rabbitmq_consumer.rs @@ -0,0 +1,332 @@ +//! Requires a rabbitmq node to be running on localhost:5672 (the default port) and using the +//! default guest/guest credentials. +//! Try using the `testing-docker-compose.yml` in the repo root to get this going. + +use generic_queue::rabbitmq::FieldTable; +use lapin::{options::QueueDeclareOptions, Channel, Connection, ConnectionProperties, Queue}; +use serde_json::json; +use std::time::Duration; +use svix::api::MessageIn; +use svix_agent_plugin_generic::{ + config::{OutputOpts, RabbitMqInputOpts, SvixOptions}, + CreateMessageRequest, RabbitMqConsumerConfig, RabbitMqConsumerPlugin, +}; +use svix_agent_types::Plugin; +use wiremock::matchers::method; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +fn get_test_plugin(svix_url: String, mq_uri: &str, queue_name: &str) -> RabbitMqConsumerPlugin { + RabbitMqConsumerPlugin::new(RabbitMqConsumerConfig { + input: RabbitMqInputOpts { + uri: mq_uri.to_string(), + queue_name: queue_name.to_string(), + ..Default::default() + }, + output: OutputOpts { + token: "xxxx".to_string(), + svix_options: Some(SvixOptions { + server_url: Some(svix_url), + ..Default::default() + }), + }, + }) +} + +async fn declare_queue(name: &str, channel: &Channel) -> Queue { + channel + .queue_declare( + name, + QueueDeclareOptions { + auto_delete: true, + ..Default::default() + }, + FieldTable::default(), + ) + .await + .unwrap() +} + +async fn mq_connection(uri: &str) -> Connection { + let options = ConnectionProperties::default() + .with_connection_name("test".into()) + .with_executor(tokio_executor_trait::Tokio::current()) + .with_reactor(tokio_reactor_trait::Tokio); + Connection::connect(uri, options).await.unwrap() +} + +async fn publish(channel: &Channel, queue_name: &str, payload: &[u8]) { + let confirm = channel + .basic_publish( + "", + queue_name, + Default::default(), + payload, + Default::default(), + ) + .await + .unwrap(); + confirm.await.unwrap(); +} + +/// General "pause while we wait for messages to travel" beat. If you're seeing flakes, bump this up. +const WAIT_MS: u64 = 150; +/// These tests assume a "vanilla" rabbitmq instance, using the default port, creds, exchange... +const MQ_URI: &str = "amqp://guest:guest@localhost:5672/%2f"; + +/// Push a msg on the queue. +/// Check to see if the svix server sees a request. +#[tokio::test] +async fn test_consume_ok() { + let mq_conn = mq_connection(MQ_URI).await; + let channel = mq_conn.create_channel().await.unwrap(); + // setup the queue before running the consumer or the consumer will error out + let queue = declare_queue("", &channel).await; + let queue_name = queue.name().as_str(); + + let mock_server = MockServer::start().await; + // The mock will make asserts on drop (i.e. when the body of the test is returning). + // The `expect` call should ensure we see exactly 1 POST request. + // + let mock = Mock::given(method("POST")) + .respond_with(ResponseTemplate::new(202).set_body_json(json!({ + "eventType": "testing.things", + "payload": { + "_SVIX_APP_ID": "app_1234", + "_SVIX_EVENT_TYPE": "testing.things", + "hi": "there", + }, + "id": "msg_xxxx", + "timestamp": "2023-04-25T00:00:00Z" + }))) + .named("create_message") + .expect(1); + mock_server.register(mock).await; + + let plugin = get_test_plugin(mock_server.uri(), MQ_URI, queue_name); + + let handle = tokio::spawn(async move { + let fut = plugin.run(); + fut.await + }); + // Wait for the consumer to connect + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + let msg = CreateMessageRequest { + app_id: "app_1234".into(), + message: MessageIn::new("testing.things".into(), json!({"hi": "there"})), + post_options: None, + }; + + publish(&channel, queue_name, &serde_json::to_vec(&msg).unwrap()).await; + + // Wait for the consumer to consume. + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + handle.abort(); + channel + .queue_delete(queue_name, Default::default()) + .await + .ok(); +} + +#[tokio::test] +async fn test_missing_app_id_nack() { + let mq_conn = mq_connection(MQ_URI).await; + let channel = mq_conn.create_channel().await.unwrap(); + // setup the queue before running the consumer or the consumer will error out + let queue = declare_queue("", &channel).await; + let queue_name = queue.name().as_str(); + + let mock_server = MockServer::start().await; + let mock = Mock::given(method("POST")) + // The response doesn't really matter, but we need to define it to be able to `expect(0)`. + .respond_with(ResponseTemplate::new(400)) + .named("create_message") + // No requests should be made when the event type or app id are missing. + .expect(0); + mock_server.register(mock).await; + + let plugin = get_test_plugin(mock_server.uri(), MQ_URI, queue_name); + + let handle = tokio::spawn(async move { + let fut = plugin.run(); + fut.await + }); + + // Wait for the consumer to connect + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + publish( + &channel, + queue_name, + &serde_json::to_vec(&json!({ + // No app id + "message": { + "eventType": "testing.things", + "payload": { + "hi": "there", + } + }, + + })) + .unwrap(), + ) + .await; + + // Wait for the consumer to consume. + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + handle.abort(); + channel + .queue_delete(queue_name, Default::default()) + .await + .ok(); +} + +#[tokio::test] +async fn test_missing_event_type_nack() { + let mq_conn = mq_connection(MQ_URI).await; + let channel = mq_conn.create_channel().await.unwrap(); + // setup the queue before running the consumer or the consumer will error out + let queue = declare_queue("", &channel).await; + let queue_name = queue.name().as_str(); + + let mock_server = MockServer::start().await; + let mock = Mock::given(method("POST")) + // The response doesn't really matter, but we need to define it to be able to `expect(0)`. + .respond_with(ResponseTemplate::new(400)) + .named("create_message") + // No requests should be made when the event type or app id are missing. + .expect(0); + mock_server.register(mock).await; + + let plugin = get_test_plugin(mock_server.uri(), MQ_URI, queue_name); + + let handle = tokio::spawn(async move { + let fut = plugin.run(); + fut.await + }); + + // Wait for the consumer to connect + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + publish( + &channel, + queue_name, + &serde_json::to_vec(&json!({ + "app_id": "app_1234", + "message": { + // No event type + "payload": { + "hi": "there", + } + }, + })) + .unwrap(), + ) + .await; + + // Wait for the consumer to consume. + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + handle.abort(); + channel + .queue_delete(queue_name, Default::default()) + .await + .ok(); +} + +/// Check that the plugin keeps running when it can't send a message to svix +#[tokio::test] +async fn test_consume_svix_503() { + let mq_conn = mq_connection(MQ_URI).await; + let channel = mq_conn.create_channel().await.unwrap(); + // setup the queue before running the consumer or the consumer will error out + let queue = declare_queue("", &channel).await; + let queue_name = queue.name().as_str(); + + let mock_server = MockServer::start().await; + // The mock will make asserts on drop (i.e. when the body of the test is returning). + // The `expect` call should ensure we see exactly 1 POST request. + // + let mock = Mock::given(method("POST")) + .respond_with(ResponseTemplate::new(503)) + .named("create_message") + .expect(1); + mock_server.register(mock).await; + + let plugin = get_test_plugin(mock_server.uri(), MQ_URI, queue_name); + + let handle = tokio::spawn(async move { + let fut = plugin.run(); + fut.await + }); + // Wait for the consumer to connect + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + publish( + &channel, + queue_name, + &serde_json::to_vec(&CreateMessageRequest { + app_id: "app_1234".into(), + message: MessageIn::new("testing.things".into(), json!({"hi": "there"})), + post_options: None, + }) + .unwrap(), + ) + .await; + + // Wait for the consumer to consume. + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + assert!(!handle.is_finished()); + handle.abort(); + channel + .queue_delete(queue_name, Default::default()) + .await + .ok(); +} + +/// Check that the plugin keeps running when it can't send a message to svix because idk, the servers are all offline?? +#[tokio::test] +async fn test_consume_svix_offline() { + let mq_conn = mq_connection(MQ_URI).await; + let channel = mq_conn.create_channel().await.unwrap(); + // setup the queue before running the consumer or the consumer will error out + let queue = declare_queue("", &channel).await; + let queue_name = queue.name().as_str(); + + let mock_server = MockServer::start().await; + + let plugin = get_test_plugin(mock_server.uri(), MQ_URI, queue_name); + + // bye-bye svix... + drop(mock_server); + + let handle = tokio::spawn(async move { + let fut = plugin.run(); + fut.await + }); + // Wait for the consumer to connect + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + publish( + &channel, + queue_name, + &serde_json::to_vec(&CreateMessageRequest { + app_id: "app_1234".into(), + message: MessageIn::new("testing.things".into(), json!({"hi": "there"})), + post_options: None, + }) + .unwrap(), + ) + .await; + + // Wait for the consumer to consume. + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + assert!(!handle.is_finished()); + handle.abort(); + channel + .queue_delete(queue_name, Default::default()) + .await + .ok(); +} diff --git a/webhook-bridge/svix-agent-plugin-generic/tests/redis_stream_consumer.rs b/webhook-bridge/svix-agent-plugin-generic/tests/redis_stream_consumer.rs new file mode 100644 index 000000000..0349aea23 --- /dev/null +++ b/webhook-bridge/svix-agent-plugin-generic/tests/redis_stream_consumer.rs @@ -0,0 +1,301 @@ +//! Use the `testing-docker-compose.yml` in the repo root to run the dependencies for testing, +//! including Redis. + +use std::time::Duration; + +use redis::{AsyncCommands, Client}; +use serde_json::json; +use svix::api::MessageIn; +use svix_agent_plugin_generic::{ + config::{OutputOpts, SvixOptions}, + CreateMessageRequest, RedisConsumerConfig, RedisConsumerPlugin, RedisInputOpts, +}; +use svix_agent_types::Plugin; +use wiremock::matchers::method; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +fn get_test_plugin(svix_url: String, queue_key: String) -> RedisConsumerPlugin { + RedisConsumerPlugin::new(RedisConsumerConfig { + input: RedisInputOpts { + dsn: "redis://localhost/".to_owned(), + max_connections: 8, + reinsert_on_nack: false, + queue_key, + consumer_group: "test_cg".to_owned(), + consumer_name: "test_cn".to_owned(), + }, + output: OutputOpts { + token: "xxxx".to_string(), + svix_options: Some(SvixOptions { + server_url: Some(svix_url), + ..Default::default() + }), + }, + }) +} + +async fn redis_connection() -> Client { + Client::open("redis://localhost/").unwrap() +} + +async fn create_test_stream(client: &Client) -> String { + let name: String = std::iter::repeat_with(fastrand::alphanumeric) + .take(8) + .collect(); + + let mut conn = client.get_async_connection().await.unwrap(); + + let _: () = conn + .xgroup_create_mkstream(&name, "test_cg", 0i8) + .await + .unwrap(); + + name +} + +async fn delete_test_stream(client: &Client, key: &str) { + let mut conn = client.get_async_connection().await.unwrap(); + let _: () = conn.del(key).await.unwrap(); +} + +async fn publish(client: &Client, key: &str, payload: &str) { + let mut conn = client.get_async_connection().await.unwrap(); + + let _: () = conn.xadd(key, "*", &[("payload", payload)]).await.unwrap(); +} + +/// General "pause while we wait for messages to travel" beat. If you're seeing flakes, bump this up. +const WAIT_MS: u64 = 250; + +/// Push a msg on the queue. +/// Check to see if the svix server sees a request. +#[tokio::test] +async fn test_consume_ok() { + let client = redis_connection().await; + let key = create_test_stream(&client).await; + + let mock_server = MockServer::start().await; + // The mock will make asserts on drop (i.e. when the body of the test is returning). + // The `expect` call should ensure we see exactly 1 POST request. + // + let mock = Mock::given(method("POST")) + .respond_with(ResponseTemplate::new(202).set_body_json(json!({ + "eventType": "testing.things", + "payload": { + "_SVIX_APP_ID": "app_1234", + "_SVIX_EVENT_TYPE": "testing.things", + "hi": "there", + }, + "id": "msg_xxxx", + "timestamp": "2023-04-25T00:00:00Z" + }))) + .named("create_message") + .expect(1); + mock_server.register(mock).await; + + let plugin = get_test_plugin(mock_server.uri(), key.clone()); + + let handle = tokio::spawn(async move { + let fut = plugin.run(); + fut.await + }); + // Wait for the consumer to connect + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + let msg = CreateMessageRequest { + app_id: "app_1234".into(), + message: MessageIn::new("testing.things".into(), json!({"hi": "there"})), + post_options: None, + }; + + publish(&client, &key, &serde_json::to_string(&msg).unwrap()).await; + + // Wait for the consumer to consume. + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + handle.abort(); + + delete_test_stream(&client, &key).await; +} + +#[tokio::test] +async fn test_missing_app_id_nack() { + let client = redis_connection().await; + let key = create_test_stream(&client).await; + + let mock_server = MockServer::start().await; + let mock = Mock::given(method("POST")) + // The response doesn't really matter, but we need to define it to be able to `expect(0)`. + .respond_with(ResponseTemplate::new(400)) + .named("create_message") + // No requests should be made when the event type or app id are missing. + .expect(0); + mock_server.register(mock).await; + + let plugin = get_test_plugin(mock_server.uri(), key.clone()); + + let handle = tokio::spawn(async move { + let fut = plugin.run(); + fut.await + }); + + // Wait for the consumer to connect + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + publish( + &client, + &key, + &serde_json::to_string(&json!({ + // No app id + "message": { + "eventType": "testing.things", + "payload": { + "hi": "there", + } + }, + + })) + .unwrap(), + ) + .await; + + // Wait for the consumer to consume. + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + handle.abort(); + + delete_test_stream(&client, &key).await +} + +#[tokio::test] +async fn test_missing_event_type_nack() { + let client = redis_connection().await; + let key = create_test_stream(&client).await; + + let mock_server = MockServer::start().await; + let mock = Mock::given(method("POST")) + // The response doesn't really matter, but we need to define it to be able to `expect(0)`. + .respond_with(ResponseTemplate::new(400)) + .named("create_message") + // No requests should be made when the event type or app id are missing. + .expect(0); + mock_server.register(mock).await; + + let plugin = get_test_plugin(mock_server.uri(), key.clone()); + + let handle = tokio::spawn(async move { + let fut = plugin.run(); + fut.await + }); + + // Wait for the consumer to connect + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + publish( + &client, + &key, + &serde_json::to_string(&json!({ + "app_id": "app_1234", + "message": { + // No event type + "payload": { + "hi": "there", + } + }, + })) + .unwrap(), + ) + .await; + + // Wait for the consumer to consume. + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + handle.abort(); + + delete_test_stream(&client, &key).await +} + +/// Check that the plugin keeps running when it can't send a message to svix +#[tokio::test] +async fn test_consume_svix_503() { + let client = redis_connection().await; + let key = create_test_stream(&client).await; + + let mock_server = MockServer::start().await; + // The mock will make asserts on drop (i.e. when the body of the test is returning). + // The `expect` call should ensure we see exactly 1 POST request. + // + let mock = Mock::given(method("POST")) + .respond_with(ResponseTemplate::new(503)) + .named("create_message") + .expect(1); + mock_server.register(mock).await; + + let plugin = get_test_plugin(mock_server.uri(), key.clone()); + + let handle = tokio::spawn(async move { + let fut = plugin.run(); + fut.await + }); + // Wait for the consumer to connect + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + publish( + &client, + &key, + &serde_json::to_string(&CreateMessageRequest { + app_id: "app_1234".into(), + message: MessageIn::new("testing.things".into(), json!({"hi": "there"})), + post_options: None, + }) + .unwrap(), + ) + .await; + + // Wait for the consumer to consume. + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + assert!(!handle.is_finished()); + handle.abort(); + + delete_test_stream(&client, &key).await +} + +/// Check that the plugin keeps running when it can't send a message to svix because idk, the servers are all offline?? +#[tokio::test] +async fn test_consume_svix_offline() { + let client = redis_connection().await; + let key = create_test_stream(&client).await; + + let mock_server = MockServer::start().await; + + let plugin = get_test_plugin(mock_server.uri(), key.clone()); + + // bye-bye svix... + drop(mock_server); + + let handle = tokio::spawn(async move { + let fut = plugin.run(); + fut.await + }); + // Wait for the consumer to connect + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + publish( + &client, + &key, + &serde_json::to_string(&CreateMessageRequest { + app_id: "app_1234".into(), + message: MessageIn::new("testing.things".into(), json!({"hi": "there"})), + post_options: None, + }) + .unwrap(), + ) + .await; + + // Wait for the consumer to consume. + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + assert!(!handle.is_finished()); + handle.abort(); + + delete_test_stream(&client, &key).await +} diff --git a/webhook-bridge/svix-agent-plugin-generic/tests/sqs_consumer.rs b/webhook-bridge/svix-agent-plugin-generic/tests/sqs_consumer.rs new file mode 100644 index 000000000..719f325ec --- /dev/null +++ b/webhook-bridge/svix-agent-plugin-generic/tests/sqs_consumer.rs @@ -0,0 +1,335 @@ +//! Use the `testing-docker-compose.yml` in the repo root to run the dependencies for testing, +//! including ElasticMQ. +//! +//! Use `run-tests.sh` to use the requisite environment for testing. + +use std::time::Duration; + +use aws_sdk_sqs::Client; +use serde_json::json; +use svix::api::MessageIn; +use svix_agent_plugin_generic::{ + config::{OutputOpts, SvixOptions}, + CreateMessageRequest, SqsConsumerConfig, SqsConsumerPlugin, SqsInputOpts, +}; +use svix_agent_types::Plugin; +use wiremock::matchers::method; +use wiremock::{Mock, MockServer, ResponseTemplate}; + +const ROOT_URL: &str = "http://localhost:9324"; + +fn get_test_plugin(svix_url: String, queue_dsn: String) -> SqsConsumerPlugin { + SqsConsumerPlugin::new(SqsConsumerConfig { + input: SqsInputOpts { + queue_dsn, + override_endpoint: true, + }, + output: OutputOpts { + token: "xxxx".to_string(), + svix_options: Some(SvixOptions { + server_url: Some(svix_url), + ..Default::default() + }), + }, + }) +} + +async fn mq_connection() -> Client { + let config = aws_config::from_env().endpoint_url(ROOT_URL).load().await; + Client::new(&config) +} + +async fn create_test_queue(client: &Client) -> String { + let name: String = std::iter::repeat_with(fastrand::alphanumeric) + .take(8) + .collect(); + client + .create_queue() + .queue_name(&name) + .send() + .await + .unwrap(); + + name +} + +async fn publish(client: &Client, url: &str, payload: &str) { + client + .send_message() + .queue_url(url) + .message_body(payload) + .send() + .await + .unwrap(); +} + +/// General "pause while we wait for messages to travel" beat. If you're seeing flakes, bump this up. +const WAIT_MS: u64 = 100; + +/// Push a msg on the queue. +/// Check to see if the svix server sees a request. +#[tokio::test] +async fn test_consume_ok() { + let client = mq_connection().await; + let queue_name = create_test_queue(&client).await; + + let queue_url = format!("{ROOT_URL}/queue/{queue_name}"); + + let mock_server = MockServer::start().await; + // The mock will make asserts on drop (i.e. when the body of the test is returning). + // The `expect` call should ensure we see exactly 1 POST request. + // + let mock = Mock::given(method("POST")) + .respond_with(ResponseTemplate::new(202).set_body_json(json!({ + "eventType": "testing.things", + "payload": { + "_SVIX_APP_ID": "app_1234", + "_SVIX_EVENT_TYPE": "testing.things", + "hi": "there", + }, + "id": "msg_xxxx", + "timestamp": "2023-04-25T00:00:00Z" + }))) + .named("create_message") + .expect(1); + mock_server.register(mock).await; + + let plugin = get_test_plugin(mock_server.uri(), queue_url.clone()); + + let handle = tokio::spawn(async move { + let fut = plugin.run(); + fut.await + }); + // Wait for the consumer to connect + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + let msg = CreateMessageRequest { + app_id: "app_1234".into(), + message: MessageIn::new("testing.things".into(), json!({"hi": "there"})), + post_options: None, + }; + + publish(&client, &queue_url, &serde_json::to_string(&msg).unwrap()).await; + + // Wait for the consumer to consume. + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + handle.abort(); + + client + .delete_queue() + .queue_url(&queue_url) + .send() + .await + .unwrap(); +} + +#[tokio::test] +async fn test_missing_app_id_nack() { + let client = mq_connection().await; + let queue_name = create_test_queue(&client).await; + + let queue_url = format!("{ROOT_URL}/queue/{queue_name}"); + + let mock_server = MockServer::start().await; + let mock = Mock::given(method("POST")) + // The response doesn't really matter, but we need to define it to be able to `expect(0)`. + .respond_with(ResponseTemplate::new(400)) + .named("create_message") + // No requests should be made when the event type or app id are missing. + .expect(0); + mock_server.register(mock).await; + + let plugin = get_test_plugin(mock_server.uri(), queue_url.clone()); + + let handle = tokio::spawn(async move { + let fut = plugin.run(); + fut.await + }); + + // Wait for the consumer to connect + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + publish( + &client, + &queue_url, + &serde_json::to_string(&json!({ + // No app id + "message": { + "eventType": "testing.things", + "payload": { + "hi": "there", + } + }, + + })) + .unwrap(), + ) + .await; + + // Wait for the consumer to consume. + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + handle.abort(); + + client + .delete_queue() + .queue_url(&queue_url) + .send() + .await + .unwrap(); +} + +#[tokio::test] +async fn test_missing_event_type_nack() { + let client = mq_connection().await; + let queue_name = create_test_queue(&client).await; + + let queue_url = format!("{ROOT_URL}/queue/{queue_name}"); + + let mock_server = MockServer::start().await; + let mock = Mock::given(method("POST")) + // The response doesn't really matter, but we need to define it to be able to `expect(0)`. + .respond_with(ResponseTemplate::new(400)) + .named("create_message") + // No requests should be made when the event type or app id are missing. + .expect(0); + mock_server.register(mock).await; + + let plugin = get_test_plugin(mock_server.uri(), queue_url.clone()); + + let handle = tokio::spawn(async move { + let fut = plugin.run(); + fut.await + }); + + // Wait for the consumer to connect + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + publish( + &client, + &queue_url, + &serde_json::to_string(&json!({ + "app_id": "app_1234", + "message": { + // No event type + "payload": { + "hi": "there", + } + }, + })) + .unwrap(), + ) + .await; + + // Wait for the consumer to consume. + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + handle.abort(); + + client + .delete_queue() + .queue_url(&queue_url) + .send() + .await + .unwrap(); +} + +/// Check that the plugin keeps running when it can't send a message to svix +#[tokio::test] +async fn test_consume_svix_503() { + let client = mq_connection().await; + let queue_name = create_test_queue(&client).await; + + let queue_url = format!("{ROOT_URL}/queue/{queue_name}"); + + let mock_server = MockServer::start().await; + // The mock will make asserts on drop (i.e. when the body of the test is returning). + // The `expect` call should ensure we see exactly 1 POST request. + // + let mock = Mock::given(method("POST")) + .respond_with(ResponseTemplate::new(503)) + .named("create_message") + .expect(1); + mock_server.register(mock).await; + + let plugin = get_test_plugin(mock_server.uri(), queue_url.clone()); + + let handle = tokio::spawn(async move { + let fut = plugin.run(); + fut.await + }); + // Wait for the consumer to connect + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + publish( + &client, + &queue_url, + &serde_json::to_string(&CreateMessageRequest { + app_id: "app_1234".into(), + message: MessageIn::new("testing.things".into(), json!({"hi": "there"})), + post_options: None, + }) + .unwrap(), + ) + .await; + + // Wait for the consumer to consume. + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + assert!(!handle.is_finished()); + handle.abort(); + + client + .delete_queue() + .queue_url(&queue_url) + .send() + .await + .unwrap(); +} + +/// Check that the plugin keeps running when it can't send a message to svix because idk, the servers are all offline?? +#[tokio::test] +async fn test_consume_svix_offline() { + let client = mq_connection().await; + let queue_name = create_test_queue(&client).await; + + let queue_url = format!("{ROOT_URL}/queue/{queue_name}"); + + let mock_server = MockServer::start().await; + + let plugin = get_test_plugin(mock_server.uri(), queue_url.clone()); + + // bye-bye svix... + drop(mock_server); + + let handle = tokio::spawn(async move { + let fut = plugin.run(); + fut.await + }); + // Wait for the consumer to connect + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + publish( + &client, + &queue_url, + &serde_json::to_string(&CreateMessageRequest { + app_id: "app_1234".into(), + message: MessageIn::new("testing.things".into(), json!({"hi": "there"})), + post_options: None, + }) + .unwrap(), + ) + .await; + + // Wait for the consumer to consume. + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + assert!(!handle.is_finished()); + handle.abort(); + + client + .delete_queue() + .queue_url(&queue_url) + .send() + .await + .unwrap(); +} diff --git a/webhook-bridge/svix-agent-plugin-webhook-receiver/Cargo.toml b/webhook-bridge/svix-agent-plugin-webhook-receiver/Cargo.toml new file mode 100644 index 000000000..1facec549 --- /dev/null +++ b/webhook-bridge/svix-agent-plugin-webhook-receiver/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "svix-agent-plugin-webhook-receiver" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +anyhow = "1" +axum = { version = "0.6", features = ["macros"] } +enum_dispatch = "0.3" +http = "0.2" +hyper = { version = "0.14", features = ["full"] } +serde = { version = "1", features = ["derive"] } +serde_json = "1" +smol = "1.3" +svix = "0.84.1" +svix-agent-types = { path = "../svix-agent-types" } +threadpool = "1" +tracing = "0.1" +tokio = { version = "1", features = ["full"] } +tower-http = { version = "0.3", features = ["trace"] } +serde_yaml = "0.9" +generic_queue = { path = "../generic-queue" } + +[dev-dependencies] +chrono = "0.4" diff --git a/webhook-bridge/svix-agent-plugin-webhook-receiver/src/config.rs b/webhook-bridge/svix-agent-plugin-webhook-receiver/src/config.rs new file mode 100644 index 000000000..8eb49f3f8 --- /dev/null +++ b/webhook-bridge/svix-agent-plugin-webhook-receiver/src/config.rs @@ -0,0 +1,175 @@ +use crate::types::IntegrationId; +use generic_queue::rabbitmq::{BasicProperties, BasicPublishOptions}; +use serde::{Deserialize, Serialize}; +use std::path::PathBuf; + +/// The [`IntegrationConfig`] is the struct associated with a given [`IntegrationId`]. When the route +/// associated with an [`IntegrationId`] receives a webhook, or any other HTTP request, then it will +/// attempt to be validated with the specified [`VerificationScheme`]. Should the configured scheme +/// indicate that the webhook is valid, then the webhook will be forwarded verbatim to the configured +/// [`ForwardDestination`]. +#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] +pub struct IntegrationConfig { + pub name: IntegrationId, + pub verification: VerificationScheme, + pub destination: ForwardDestination, +} + +/// The [`VerificationScheme`] is an enum with variant for every method for verifying a webhook's +/// authenticity that is supported by this service. As of present, the [`VerificationScheme::Svix`] +/// variant and [`VerificationScheme::JavaScript`] variant are supported. +/// +/// Upon an [`IntegrationId`] receiving a webhook, then the configured [`VerificationScheme`]'s +/// associated method will be called returning a simple [`bool`] result on whether the HTTP request +/// received is a valid webhook. +#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)] +#[serde(tag = "type", rename_all = "lowercase")] +pub enum VerificationScheme { + /// The [`VerificationScheme::Svix`] variant is a simple scheme which simply uses the official + /// [`svix`] library and a configured secret to verify webhooks dispatched by Svix. + Svix { + #[serde(flatten)] + secret: SvixSecret, + }, + None, +} + +#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)] +pub struct GCPPubSubOutputOpts { + pub topic: String, + pub credentials_file: Option, +} + +#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] +pub struct RabbitMqOutputOpts { + /// Connection string for RabbitMQ. + pub uri: String, + /// The exchange to publish messages to. + pub exchange: String, + /// The routing key to publish messages to. + pub routing_key: String, + #[serde(default)] + pub publish_options: BasicPublishOptions, + #[serde(default)] + pub publish_properties: BasicProperties, +} + +#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)] +pub struct RedisOutputOpts { + pub dsn: String, + pub max_connections: u16, + pub queue_key: String, +} + +#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)] +pub struct SqsOutputOpts { + pub queue_dsn: String, + #[serde(default)] + pub override_endpoint: bool, +} + +/// The [`ForwardDestination`] is a part of the [`IntegrationConfig`] for every `[IntegrationId`] +/// and defines where the webhook will be redirected upon the [`IntegrationConfig`]'s associated +/// [`VerificationScheme`] deeming that an HTTP request is a valid, authentic webhook. +#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] +#[serde(tag = "type", rename_all = "lowercase")] +pub enum ForwardDestination { + // FIXME: HTTP forwarding + GCPPubSub(GCPPubSubOutputOpts), + RabbitMQ(RabbitMqOutputOpts), + Redis(RedisOutputOpts), + SQS(SqsOutputOpts), +} + +/// All webhooks dispatched by Svix and all associated metadata are signed before being sent. The +/// key for verifying this signature is associated with a given endpoint registered with Svix and +/// begins with `whsec_`. +/// +/// This enum has two variants -- [`SvixSecret::Secret`] which is meant to be the direct secret +/// value, while [`SvixSecret::EnvVar`] is meant to be a valid env var which contains the secret. +/// +/// They are distinguished in configuration via setting either `secret` or `secret_env`. +#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)] +#[serde(untagged)] +pub enum SvixSecret { + Secret { secret: String }, + EnvVar { secret_env: String }, +} + +impl SvixSecret { + pub fn to_secret(&self) -> anyhow::Result { + match self { + SvixSecret::Secret { secret } => Ok(secret.clone()), + SvixSecret::EnvVar { secret_env } => { + if let Ok(secret) = std::env::var(secret_env) { + Ok(secret) + } else { + anyhow::bail!("env var {secret_env} not set or invalid"); + } + } + } + } +} + +#[cfg(test)] +mod tests { + use super::{SvixSecret, VerificationScheme}; + use crate::WebhookReceiverPluginConfig; + + #[test] + fn test_svix_secret_parsing() { + // FIXME: destinations need fixups + let config_text = r#" + listen_addr: "0.0.0.0:1234" + routes: + - name: "path-1" + verification: + type: svix + secret: whsec_bm90IHJlYWw= + destination: + type: rabbitmq + uri: amqp://guest:guest@localhost:5672/%2f + exchange: "myexhange" + routing_key: "" + - name: "path_2" + verification: + type: svix + secret_env: SECRET_SVIX_TOKEN + destination: + type: sqs + queue_dsn: http://localhost:9324/queue/my-queue + "#; + + let config: WebhookReceiverPluginConfig = serde_yaml::from_str(config_text).unwrap(); + + let configured_secret = "whsec_bm90IHJlYWw=".to_owned(); + let set_secret_env = "whsec_invalid".to_owned(); + + assert!(matches!( + &config.routes[0].verification, + VerificationScheme::Svix { + secret: SvixSecret::Secret { .. } + } + )); + assert!(matches!( + &config.routes[1].verification, + VerificationScheme::Svix { + secret: SvixSecret::EnvVar { .. } + } + )); + + std::env::set_var("SECRET_SVIX_TOKEN", &set_secret_env); + + let secret = match &config.routes[0].verification { + VerificationScheme::Svix { secret } => secret, + _ => panic!("unexpected verification scheme"), + }; + assert_eq!(secret.to_secret().unwrap(), configured_secret); + + let secret = match &config.routes[1].verification { + VerificationScheme::Svix { secret } => secret, + _ => panic!("unexpected verification scheme"), + }; + assert_eq!(secret.to_secret().unwrap(), set_secret_env); + } +} diff --git a/webhook-bridge/svix-agent-plugin-webhook-receiver/src/forwarding.rs b/webhook-bridge/svix-agent-plugin-webhook-receiver/src/forwarding.rs new file mode 100644 index 000000000..c929677c1 --- /dev/null +++ b/webhook-bridge/svix-agent-plugin-webhook-receiver/src/forwarding.rs @@ -0,0 +1,121 @@ +use crate::config::{GCPPubSubOutputOpts, RabbitMqOutputOpts, RedisOutputOpts, SqsOutputOpts}; +use crate::types::{SerializablePayload, SerializableRequest, Validated}; +use anyhow::Result; +use axum::async_trait; +use enum_dispatch::enum_dispatch; +use generic_queue::gcp_pubsub::{GCPPubSubConfig, GCPPubSubQueueBackend}; +use generic_queue::rabbitmq::{RabbitMqBackend, RabbitMqConfig}; +use generic_queue::redis::{RedisConfig, RedisQueueBackend}; +use generic_queue::sqs::{SqsConfig, SqsQueueBackend}; +use generic_queue::{TaskQueueBackend, TaskQueueSend}; +use std::sync::Arc; + +#[async_trait] +#[enum_dispatch] +pub trait ForwardingMethod { + async fn forward(&self, req: SerializableRequest) -> Result; +} + +#[derive(Clone)] +pub struct GenericQueueForwarder { + // FIXME: if we retain things like the queue name we can show this in the Debug impl + sender: Arc>>, +} + +type Msg = serde_json::Value; + +impl GenericQueueForwarder { + pub async fn from_rabbitmq_cfg(cfg: RabbitMqOutputOpts) -> Result { + let sender = >::producing_half(RabbitMqConfig { + uri: cfg.uri, + // N.b the connection properties type is not serde-friendly. If we want to expose some + // of these settings we'll probably need to provide our own type and build the real one + // here from cfg. + connection_properties: Default::default(), + publish_exchange: cfg.exchange, + publish_routing_key: cfg.routing_key, + publish_options: cfg.publish_options, + publish_properites: cfg.publish_properties, + // consumer stuff we don't care about + consume_queue: "".to_string(), + consumer_tag: "".to_string(), + consume_options: Default::default(), + consume_arguments: Default::default(), + requeue_on_nack: false, + }) + .await?; + + Ok(Self { + sender: Arc::new(Box::new(sender)), + }) + } + + pub async fn from_redis_cfg(cfg: RedisOutputOpts) -> Result { + let sender = >::producing_half(RedisConfig { + dsn: cfg.dsn, + max_connections: cfg.max_connections, + queue_key: cfg.queue_key, + // consumer stuff we don't really care about + reinsert_on_nack: false, + consumer_group: "".to_string(), + consumer_name: "".to_string(), + }) + .await?; + + Ok(Self { + sender: Arc::new(Box::new(sender)), + }) + } + + pub async fn from_sqs_cfg(cfg: SqsOutputOpts) -> Result { + let sender = >::producing_half(SqsConfig { + queue_dsn: cfg.queue_dsn, + override_endpoint: cfg.override_endpoint, + }) + .await?; + + Ok(Self { + sender: Arc::new(Box::new(sender)), + }) + } + + pub async fn from_gcp_pupsub_cfg(cfg: GCPPubSubOutputOpts) -> Result { + let sender = + >::producing_half(GCPPubSubConfig { + topic: cfg.topic, + credentials_file: cfg.credentials_file, + // Don't need this. Subscriptions are for consumers only. + subscription_id: String::new(), + }) + .await?; + + Ok(Self { + sender: Arc::new(Box::new(sender)), + }) + } +} +impl std::fmt::Debug for GenericQueueForwarder { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("GenericQueueForwarder").finish() + } +} +#[async_trait] +impl ForwardingMethod for GenericQueueForwarder { + async fn forward(&self, req: SerializableRequest) -> Result { + let payload = match req.payload() { + SerializablePayload::Standard(data) => serde_json::from_slice(data)?, + SerializablePayload::StringSerializable(s) => serde_json::from_str(s)?, + }; + + self.sender.send(payload).await?; + Ok(http::StatusCode::OK) + } +} + +// FIXME: HTTP Forwarder + +#[enum_dispatch(ForwardingMethod)] +#[derive(Clone, Debug)] +pub enum Forwarder { + GenericQueueForwarder, +} diff --git a/webhook-bridge/svix-agent-plugin-webhook-receiver/src/lib.rs b/webhook-bridge/svix-agent-plugin-webhook-receiver/src/lib.rs new file mode 100644 index 000000000..be8be54c0 --- /dev/null +++ b/webhook-bridge/svix-agent-plugin-webhook-receiver/src/lib.rs @@ -0,0 +1,106 @@ +use crate::config::IntegrationConfig; +use axum::{ + extract::{Path, State}, + routing::post, + Router, +}; +use serde::Deserialize; +use std::net::SocketAddr; +use svix_agent_types::{async_trait, Plugin}; +use tracing::instrument; +use types::{IntegrationId, IntegrationState, InternalState, SerializableRequest, Unvalidated}; + +pub mod config; +mod forwarding; +mod types; +mod verification; + +pub const PLUGIN_NAME: &str = env!("CARGO_PKG_NAME"); +pub const PLUGIN_VERS: &str = env!("CARGO_PKG_VERSION"); + +#[derive(Clone, Debug, Deserialize, PartialEq)] +pub struct WebhookReceiverPluginConfig { + pub listen_addr: SocketAddr, + pub routes: Vec, +} + +#[derive(Clone, Debug, Deserialize, PartialEq)] +pub struct WebhookReceiverPlugin { + cfg: WebhookReceiverPluginConfig, +} + +impl WebhookReceiverPlugin { + pub fn new(cfg: WebhookReceiverPluginConfig) -> Self { + Self { cfg } + } +} + +impl TryInto> for WebhookReceiverPluginConfig { + type Error = &'static str; + + fn try_into(self) -> Result, Self::Error> { + Ok(Box::new(WebhookReceiverPlugin::new(self))) + } +} + +#[async_trait] +impl Plugin for WebhookReceiverPlugin { + async fn run(&self) -> std::io::Result<()> { + let addr = &self.cfg.listen_addr; + let state = InternalState::from_routes(self.cfg.routes.as_slice()) + .await + .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?; + + let router = Router::new() + .route( + "/webhook/:integration_id", + post(route).put(route).get(route).patch(route), + ) + .route( + "/webhook/:integration_id/", + post(route).put(route).get(route).patch(route), + ) + .with_state(state); + + tracing::info!("Listening on: {addr}"); + axum::Server::bind(addr) + .serve(router.into_make_service()) + .await + .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e)) + } +} + +#[instrument( + skip_all, + level="error", + fields( + integration_id=integration_id.as_ref(), + svixagent_plugin.name = PLUGIN_NAME, + svixagent_plugin.vers = PLUGIN_VERS, + ) +)] +async fn route( + Path(integration_id): Path, + State(InternalState(id_map)): State, + req: SerializableRequest, +) -> http::StatusCode { + if let Some(IntegrationState { + verifier, + forwarder, + }) = id_map.get(&integration_id) + { + match req.validate(verifier).await { + Ok(req) => { + tracing::debug!("forwarding request"); + req.forward(forwarder).await + } + Err(code) => { + tracing::warn!("validation failed: {code}"); + code + } + } + } else { + tracing::trace!("integration not found"); + http::StatusCode::NOT_FOUND + } +} diff --git a/webhook-bridge/svix-agent-plugin-webhook-receiver/src/runtime.rs b/webhook-bridge/svix-agent-plugin-webhook-receiver/src/runtime.rs new file mode 100644 index 000000000..8e858316c --- /dev/null +++ b/webhook-bridge/svix-agent-plugin-webhook-receiver/src/runtime.rs @@ -0,0 +1,173 @@ +//! N.b. this module is not currently attached to the project. +//! Retained as-is from the original webhook-ingester for now since it seems adaptable for the +//! upcoming "transformations" work. +use std::{pin::Pin, rc::Rc, sync::Arc}; + +use anyhow::Result; +use deno_core::{ + futures::FutureExt, + resolve_import, serde_v8, + v8::{self, Global, Value}, + FsModuleLoader, ModuleLoader, ModuleSource, ModuleSourceFuture, ModuleSpecifier, ModuleType, +}; +use deno_runtime::{ + permissions::Permissions, + worker::{MainWorker, WorkerOptions}, +}; +use threadpool::ThreadPool; +use tokio::sync::{oneshot, Mutex}; + +use crate::types::{SerializableRequest, Unvalidated}; + +/// This [`ModuleLoader`] implementation loads the configured script when loading the "file" +/// `virt:///user/script`, but otherwise loads things from disk +pub struct ConfiguredModuleLoader(pub String); + +impl ModuleLoader for ConfiguredModuleLoader { + fn resolve(&self, specifier: &str, referrer: &str, _is_main: bool) -> Result { + Ok(resolve_import(specifier, referrer)?) + } + + fn load( + &self, + module_specifier: &ModuleSpecifier, + _maybe_referrer: Option, + _is_dynamic: bool, + ) -> Pin> { + let module_specifier = module_specifier.clone(); + let code = self.0.clone(); + + if Ok(module_specifier.clone()) == resolve_import("virt:///user/script", "") { + async move { + Ok(ModuleSource { + code: Box::from(code.as_bytes()), + module_type: ModuleType::JavaScript, + module_url_specified: module_specifier.to_string(), + module_url_found: module_specifier.to_string(), + }) + } + .boxed_local() + } else { + FsModuleLoader.load(&module_specifier, _maybe_referrer, _is_dynamic) + } + } +} + +/// In the context of this service, the only valid return value of the exported function run by the +/// Deno runtime is a `bool` value. Any other values are invalid and should become an error in the +/// [`VerificationMethod`] implementation. +pub enum JsReturn { + Bool(bool), + Invalid, +} + +// NOTE: The worker is in a struct to try and get around requirements for Send with `async`, but I +// think it can be eliminated with a little effort. +struct Worker { + pub worker: MainWorker, +} + +impl Worker { + pub async fn load_script(&mut self, script: String) -> Result<()> { + let loader = ConfiguredModuleLoader(script.clone()); + let worker_main_module = loader.resolve("virt:///user/script", "", true)?; + + let module_id = self + .worker + .js_runtime + .load_main_module(&worker_main_module, Some(script)) + .await?; + + let eval = self.worker.js_runtime.mod_evaluate(module_id); + self.worker.js_runtime.run_event_loop(true).await?; + eval.await??; + + Ok(()) + } + + pub fn run_script(&mut self, req: SerializableRequest) -> Result> { + // This defines the global `input` variable + self.worker + .execute_script("bootstrap", &define_global(&req)?)?; + + // And this calls the `handler` function in the main module + let out = self.worker.execute_script( + "run", + "import('virt:///user/script').then(module => module.default(input));", + )?; + + Ok(out) + } + + pub async fn resolve_value(&mut self, res: Global) -> Result { + let awaited = self.worker.js_runtime.resolve_value(res).await?; + + let scope = &mut self.worker.js_runtime.handle_scope(); + let local = v8::Local::new(scope, awaited); + + match serde_v8::from_v8::(scope, local) { + Ok(b) => Ok(JsReturn::Bool(b)), + Err(e) if e == deno_core::serde_v8::Error::ExpectedBoolean => Ok(JsReturn::Invalid), + Err(e) => Err(e)?, + } + } +} + +#[derive(Clone, Debug)] +pub struct TpHandle(pub Arc>); + +impl TpHandle { + pub fn new() -> Self { + Self(Arc::new(Mutex::new(ThreadPool::default()))) + } + + pub async fn run_script( + &self, + req: SerializableRequest, + script: String, + ) -> Result { + let (tx, rx) = oneshot::channel(); + + self.0.lock().await.execute(move || { + let _ = tx.send(run_script_inner(req, script)); + }); + + rx.await? + } +} + +fn run_script_inner(req: SerializableRequest, script: String) -> Result { + let mut worker = { + let worker_main_module = deno_core::resolve_path("file://dne").unwrap(); + + let worker_options = WorkerOptions { + module_loader: Rc::new(ConfiguredModuleLoader(script.clone())), + ..Default::default() + }; + + let worker_permissions = Permissions::allow_all(); + + Worker { + worker: MainWorker::bootstrap_from_options( + worker_main_module, + worker_permissions, + worker_options, + ), + } + }; + + smol::block_on(async { worker.load_script(script).await })?; + let res = worker.run_script(req)?; + smol::block_on(async { worker.resolve_value(res).await }) +} + +fn define_global(req: &SerializableRequest) -> Result { + Ok(format!( + "Object.defineProperty(\ + globalThis,\ + 'input',\ + {{ value: {}, writable: true, enumerable: true, configurable: true }}\ + );", + serde_json::to_string(req)?, + )) +} diff --git a/webhook-bridge/svix-agent-plugin-webhook-receiver/src/types.rs b/webhook-bridge/svix-agent-plugin-webhook-receiver/src/types.rs new file mode 100644 index 000000000..03337466b --- /dev/null +++ b/webhook-bridge/svix-agent-plugin-webhook-receiver/src/types.rs @@ -0,0 +1,388 @@ +use crate::config::IntegrationConfig; +use crate::forwarding::GenericQueueForwarder; +use crate::{ + config::{ForwardDestination, VerificationScheme}, + forwarding::{Forwarder, ForwardingMethod}, + verification::{NoVerifier, SvixVerifier, VerificationMethod, Verifier}, +}; +use anyhow::Result; +use axum::{ + async_trait, + body::{Bytes, HttpBody}, + extract::FromRequest, + BoxError, +}; +use http::{HeaderMap, HeaderValue, Request}; +use serde::{Deserialize, Serialize}; +use std::{collections::HashMap, marker::PhantomData, sync::Arc}; + +#[derive(Clone, Debug)] +/// The [`InternalState`] is passed to the Axum route and is used to map the "IntegrationId" in the +/// URL to the configured [`Verifier`] and [`Forwarder`] variants. +pub struct InternalState(pub HashMap); +impl InternalState { + pub async fn from_routes(routes: &[IntegrationConfig]) -> Result { + let mut state_map = HashMap::new(); + + for cfg in routes { + let verifier = match &cfg.verification { + VerificationScheme::None => NoVerifier.into(), + VerificationScheme::Svix { secret } => SvixVerifier::new(Arc::new( + svix::webhooks::Webhook::new( + &secret.to_secret().expect("Error reading secret"), + ) + .expect("Invalid Svix secret"), + )) + .into(), + }; + + let forwarder = match &cfg.destination { + ForwardDestination::GCPPubSub(sender_cfg) => { + GenericQueueForwarder::from_gcp_pupsub_cfg(sender_cfg.clone()).await? + } + ForwardDestination::RabbitMQ(sender_cfg) => { + GenericQueueForwarder::from_rabbitmq_cfg(sender_cfg.clone()).await? + } + ForwardDestination::Redis(sender_cfg) => { + GenericQueueForwarder::from_redis_cfg(sender_cfg.clone()).await? + } + ForwardDestination::SQS(sender_cfg) => { + GenericQueueForwarder::from_sqs_cfg(sender_cfg.clone()).await? + } + } + .into(); + + state_map.insert( + cfg.name.clone(), + IntegrationState { + verifier, + forwarder, + }, + ); + } + + Ok(InternalState(state_map)) + } +} + +/// Each [`IntegrationId`] is a valid route for webhooks to be dispatched to managed by this server, +/// and each [`IntegrationId`] has an associated configuration which defines how the webhook is +/// verified (the [`VerificationScheme`]) and where the webhook is routed to once it is verified +/// (the [`ForwardDestination`]). +/// +/// Internally it is also associated with an [`IntegrationState`] which will contain the necessary +/// members to actually perform these actions eg. a handle to a [`FutureProducer`] instead of simply +/// the address(es) of the Kafka bootstrap server(s). +/// +/// This type is simply a wrapper for a [`String`] which *should* be safe to use in a URL. If it is +/// not a valid path component for a URL, then the [`IntegrationId`] will never receive any +/// webhooks. However, for simplicity, the inner [`String`] is not validated for URL-safety at this +/// time. +#[repr(transparent)] +#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)] +pub struct IntegrationId(String); + +impl AsRef for IntegrationId { + fn as_ref(&self) -> &str { + &self.0 + } +} + +/// The [`IntegrationState`] is a struct which is only able to be created via conversion from a +/// [`IntegrationConfig`]. This struct is what is associated with an `[IntegrationId`] internally +/// after the configuration has been read. +/// +/// What distinguishes it from the [`IntegrationConfig`] is that it contains the necessary members +/// for validating and forwarding a webhook instead of just containing the definition of how to +/// derive these necessary members. +#[derive(Clone, Debug)] +pub struct IntegrationState { + pub verifier: Verifier, + pub forwarder: Forwarder, +} + +/// Any arbitrary HTTP request which is not a webhook dispatched by Svix may also have arbitrary +/// validation associated with it by means of custom JavaScript. This JavaScript is evaluated by +/// the Deno JS runtime. +/// +/// The convention of the contained JavaScript is that it should include a function as a default +/// export which takes a single input. This input will be a JSON object including all headers that +/// came from the request in a map and the payload verbatim. This exported function must return a +/// `bool` for the associated [`IntegrationId`]'s route to function in any capacity. +/// +/// Should the `handler` function return `true`, then the request is deemed a valid webhook as per +/// the user's specifications and the webhook is then forwarded as with the Svix scheme via the +/// configured [`ForwardDestination`]. +/// +/// Should a `handler` return `false`, then the request is either silently discarded or logged at +/// the warning level depending on the value of `log_on_invalid` in the [`crate::config::Config`]. +/// +/// Should a `handler` throw an error or return a value that is not a `bool`, then an error will +/// be logged and the request is discarded. +#[repr(transparent)] +#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)] +pub struct JsCode(pub String); + +impl From for JsCode { + fn from(s: String) -> Self { + Self(s) + } +} + +/// The [`RequestFromParts`] is a structure consisting of all relevant parts of the HTTP request to +/// be validated by a [`Verifier`] implementor. This is to be immediately converted into the struct +/// [`SerializableRequest`] via its [`FromRequest`] implementation. +/// +/// NOTE: This struct is never to be used directly unless by proxy of the aforementioned impl of +/// [`FromRequest`]. It's simply used as any easy way to implement [`FromRequest`] via a macro . +#[derive(Clone, Debug, FromRequest)] +pub struct RequestFromParts { + headers: HeaderMap, + payload: Bytes, +} + +/// A simple marker trait to denote the state of a [`SerializableRequest`]. The only way to publicly +/// construct any [`SerializableRequest`]s is via the associated method on unvalidated +/// request's, [`SerializableRequest::validate`]. +pub trait RequestState {} + +#[derive(Clone, Copy, Debug)] +pub struct Unvalidated; +impl RequestState for Unvalidated {} + +#[derive(Clone, Copy, Debug)] +pub struct Validated; +impl RequestState for Validated {} + +/// This intermediary representation is necessary because it is preferable to serialize the headers +/// and/or body as a [`String`] over bytes when dealing with some [`VerificationMethod`]s and some +/// [`ForwardingMethod`]s. This struct represents both the headers and body as enums which allow for +/// either textual representations or byte representations when [`Serialize`]d via [`serde`]. +/// +/// On trying to convert a [`Standard`] variant into a [`StringSerializable`] variant, HTTP headers +/// will be represented textually if and only if they are completely ASCII, while any bodies will +/// attempt to be read as UTF-8 before falling back to bytes. +/// +/// NOTE: This conversion *should* be lazy. The [`String`] variant are only acceptable in a subset +/// of all cases, so lazy-conversion will prevent needless conversion back and forth. You may check +/// whether the conversion is required and/or helpful with [`VerificationMethod::want_string_rep`] +/// or [`VerificationMethod::need_string_rep`] plus the [`ForwardingMethod`] equivalents. +/// +/// The intended course of action is to attempt to convert to string-serializable variants of the +/// header map and the body immediately if either of the aforementioned methods are true -- but +/// only returning an [`Err`] response if it *needs* it. Then, if the validation is a success (see +/// [`SerializableRequest::validate`] and a validated equivalent is returned, then the +/// same checks are to be performed, but with the [`ForwardingMethod`] methods before being sent to +/// the appropriate [`ForwardingMethod`] implementor. +#[derive(Clone, Debug, Serialize)] +pub struct SerializableRequest { + headers: SerializableHeaderMap, + payload: SerializablePayload, + + #[serde(skip)] + _pd: PhantomData, +} + +impl SerializableRequest { + pub fn headers(&self) -> &SerializableHeaderMap { + &self.headers + } + + pub fn payload(&self) -> &SerializablePayload { + &self.payload + } +} + +impl From for SerializableRequest { + fn from(value: RequestFromParts) -> Self { + Self { + headers: SerializableHeaderMap::Standard(value.headers), + payload: SerializablePayload::Standard(value.payload.to_vec()), + + _pd: PhantomData, + } + } +} + +#[async_trait] +impl FromRequest for SerializableRequest +where + S: Send + Sync, + B: HttpBody + Send + Sync + 'static, + B::Data: Send, + B::Error: Into, +{ + type Rejection = >::Rejection; + + async fn from_request(req: Request, state: &S) -> Result { + RequestFromParts::from_request(req, state) + .await + .map(Into::into) + } +} + +impl SerializableRequest { + /// Given a specific validator + pub async fn validate( + mut self, + verifier: &V, + ) -> Result, http::StatusCode> { + // Do relevant conversions to [`String`] representaitons if wanted/needed + match (verifier.want_string_rep(), verifier.need_string_rep()) { + // Needed + (true, true) | (false, true) => { + self.headers = self + .headers + .try_to_string() + .map_err(|_| http::StatusCode::BAD_REQUEST)?; + self.payload = self + .payload + .try_to_string() + .map_err(|_| http::StatusCode::BAD_REQUEST)?; + } + + // Wanted, but not needed + (true, false) => { + self.headers = match self.headers.try_to_string() { + Ok(h) => h, + Err(h) => h, + }; + + self.payload = match self.payload.try_to_string() { + Ok(p) => p, + Err(p) => p, + }; + } + + // Not wanted + (false, false) => {} + }; + + // FIXME: No cloning + // Then actually use the [`VerificationMethod`] implementor. + match verifier.validate(self.clone()).await { + Ok(true) => Ok(SerializableRequest:: { + headers: self.headers, + payload: self.payload, + + _pd: PhantomData, + }), + + Ok(false) => { + // FIXME: Read config to know whether to log + Err(http::StatusCode::BAD_REQUEST) + } + + Err(e) => { + tracing::error!("Error validating request: {}", e); + println!("Error validating request: {}", e); + Err(http::StatusCode::INTERNAL_SERVER_ERROR) + } + } + } +} + +impl SerializableRequest { + pub async fn forward(self, f: &F) -> http::StatusCode { + match f.forward(self).await { + Ok(c) => c, + Err(e) => { + tracing::error!("Error forwarding request: {}", e); + http::StatusCode::INTERNAL_SERVER_ERROR + } + } + } +} + +#[derive(Clone, Debug)] +pub enum SerializableHeaderMap { + Standard(HeaderMap), + StringSerializable(HashMap), +} + +impl<'a> IntoIterator for &'a SerializableHeaderMap { + type Item = (&'a str, &'a [u8]); + type IntoIter = SerializableHeaderMapIter<'a>; + + fn into_iter(self) -> Self::IntoIter { + match self { + SerializableHeaderMap::Standard(hm) => SerializableHeaderMapIter::HeaderMap(hm.iter()), + SerializableHeaderMap::StringSerializable(hm) => { + SerializableHeaderMapIter::HashMap(hm.iter()) + } + } + } +} + +impl SerializableHeaderMap { + pub fn try_to_string(self) -> Result { + match self { + Self::Standard(header_map) => Ok(Self::StringSerializable( + header_map + .iter() + .map(|(name, value)| Ok((name.as_str().to_owned(), value.to_str()?.to_owned()))) + .collect::>>() + .map_err(|_| Self::Standard(header_map))?, + )), + Self::StringSerializable(hash_map) => Ok(Self::StringSerializable(hash_map)), + } + } + + pub fn len(&self) -> usize { + match self { + Self::Standard(m) => m.len(), + Self::StringSerializable(m) => m.len(), + } + } +} + +/// Serialize is not implemented on [`HeaderMap`]s themselves, so custom serialization is required. +impl Serialize for SerializableHeaderMap { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + match self { + Self::Standard(header_map) => header_map + .iter() + .map(|(name, value)| (name.as_str().to_owned(), value.as_bytes().to_vec())) + .collect::>>() + .serialize(serializer), + Self::StringSerializable(hash_map) => hash_map.serialize(serializer), + } + } +} + +pub enum SerializableHeaderMapIter<'a> { + HeaderMap(http::header::Iter<'a, HeaderValue>), + HashMap(std::collections::hash_map::Iter<'a, String, String>), +} + +impl<'a> Iterator for SerializableHeaderMapIter<'a> { + type Item = (&'a str, &'a [u8]); + + fn next(&mut self) -> Option { + match self { + Self::HeaderMap(hm) => hm.next().map(|(k, v)| (k.as_str(), v.as_bytes())), + Self::HashMap(hm) => hm.next().map(|(k, v)| (k.as_str(), v.as_bytes())), + } + } +} + +#[derive(Clone, Debug, Serialize)] +#[serde(untagged)] +pub enum SerializablePayload { + Standard(Vec), + StringSerializable(String), +} + +impl SerializablePayload { + fn try_to_string(self) -> Result { + match self { + Self::Standard(v) => Ok(Self::StringSerializable( + String::from_utf8(v).map_err(|e| Self::Standard(e.into_bytes()))?, + )), + Self::StringSerializable(s) => Ok(Self::StringSerializable(s)), + } + } +} diff --git a/webhook-bridge/svix-agent-plugin-webhook-receiver/src/verification.rs b/webhook-bridge/svix-agent-plugin-webhook-receiver/src/verification.rs new file mode 100644 index 000000000..7fb2b9904 --- /dev/null +++ b/webhook-bridge/svix-agent-plugin-webhook-receiver/src/verification.rs @@ -0,0 +1,125 @@ +use crate::types::{SerializableHeaderMap, SerializablePayload, SerializableRequest, Unvalidated}; +use anyhow::Result; +use axum::async_trait; +use enum_dispatch::enum_dispatch; +use std::sync::Arc; +use svix::webhooks::Webhook; + +#[async_trait] +#[enum_dispatch] +pub trait VerificationMethod { + async fn validate(&self, req: SerializableRequest) -> Result; + + fn want_string_rep(&self) -> bool { + false + } + fn need_string_rep(&self) -> bool { + false + } +} + +#[derive(Clone)] +pub struct SvixVerifier { + webhook: Arc, +} + +impl SvixVerifier { + pub fn new(webhook: Arc) -> Self { + Self { webhook } + } +} + +impl std::fmt::Debug for SvixVerifier { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("SvixVerifier").finish() + } +} + +#[async_trait] +impl VerificationMethod for SvixVerifier { + /// This [`VerificationMethod::validate`] implementation *requires* that the headers *and* payload + /// be in their byte representations due to the requirements of the [`svix`] library. Please lazily + /// convert these values such as to avoid pointless back-and-forth conversions. + async fn validate(&self, req: SerializableRequest) -> Result { + let headers = req.headers(); + let payload = req.payload(); + + match (headers, payload) { + (SerializableHeaderMap::Standard(headers), SerializablePayload::Standard(payload)) => { + if self.webhook.verify(payload, headers).is_ok() { + Ok(true) + } else { + Ok(false) + } + } + + _ => { + anyhow::bail!("`SvixVerifier::validate` given string representations") + } + } + } +} + +#[derive(Clone, Copy, Debug)] +pub struct NoVerifier; + +#[async_trait] +impl VerificationMethod for NoVerifier { + async fn validate(&self, _req: SerializableRequest) -> Result { + Ok(true) + } +} + +// Allowed due to restrictions by [`enum_dispatch`] on variant names matching the structure names +#[allow(clippy::enum_variant_names)] +#[enum_dispatch(VerificationMethod)] +#[derive(Clone, Debug)] +pub enum Verifier { + SvixVerifier, + NoVerifier, +} + +#[cfg(test)] +mod tests { + use super::{SvixVerifier, VerificationMethod}; + use crate::types::SerializableRequest; + use axum::extract::FromRequest; + use std::sync::Arc; + use svix::webhooks::Webhook; + + #[tokio::test] + async fn test_svix_verification() { + let secret = "whsec_C2FVsBQIhrscChlQIMV+b5sSYspob7oD".to_owned(); + let webhook = Arc::new(Webhook::new(&secret).unwrap()); + + let payload = "example payload".as_bytes(); + let timestamp = chrono::Utc::now().timestamp(); + let signature = webhook.sign("msg_valid", timestamp, payload).unwrap(); + + let sv = SvixVerifier { webhook }; + + let req = http::request::Request::builder() + .method("POST") + .uri("test.uri") + .header("svix-id", "msg_valid") + .header("svix-signature", signature.clone()) + .header("svix-timestamp", &format!("{timestamp}")) + .body(axum::body::Full::new(payload)) + .unwrap(); + + let sr = SerializableRequest::from_request(req, &()).await.unwrap(); + assert!(sv.validate(sr).await.unwrap()); + + let req = http::request::Request::builder() + .method("POST") + .uri("test.uri") + .header("svix-id", "msg_invalid") + .header("svix-signature", signature) + .header("svix-timestamp", &format!("{timestamp}")) + .body(axum::body::Full::new(payload)) + .unwrap(); + + let sr = SerializableRequest::from_request(req, &()).await.unwrap(); + assert!(!sv.validate(sr).await.unwrap()); + } +} diff --git a/webhook-bridge/svix-agent-types/Cargo.toml b/webhook-bridge/svix-agent-types/Cargo.toml new file mode 100644 index 000000000..47b3ccc0e --- /dev/null +++ b/webhook-bridge/svix-agent-types/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "svix-agent-types" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +async-trait = "0.1" \ No newline at end of file diff --git a/webhook-bridge/svix-agent-types/src/lib.rs b/webhook-bridge/svix-agent-types/src/lib.rs new file mode 100644 index 000000000..c02a90e61 --- /dev/null +++ b/webhook-bridge/svix-agent-types/src/lib.rs @@ -0,0 +1,9 @@ +pub use async_trait::async_trait; + +/// Effectively a black box to the supervisor. +/// Plugins should run until they are done, and likely they should not be "done" until the program +/// exits. +#[async_trait] +pub trait Plugin: Send { + async fn run(&self) -> std::io::Result<()>; +} diff --git a/webhook-bridge/svix-agent.example.yaml b/webhook-bridge/svix-agent.example.yaml new file mode 100644 index 000000000..14202fb89 --- /dev/null +++ b/webhook-bridge/svix-agent.example.yaml @@ -0,0 +1,125 @@ +# Set the log level for the service. Supported: error, info, warn, debug, trace. Default: info +#log_level: "debug" + +# The log format that all output will follow. Supported: default, json +#log_format: "json" + +# The OpenTelemetry address to send events to if given. +#opentelemetry_address: "http://localhost:1234" + +# The OpenTelemetry service name to use. Default: "svix-agent" +# If the OpenTelemetry address is not set, this will do nothing. +#opentelemetry_service_name: "my-agent" + +# The ratio at which to sample spans when sending to OpenTelemetry. When not given it defaults +# to always sending. +# If the OpenTelemetry address is not set, this will do nothing. +#opentelemetry_sample_ratio: 0.1 + +# A list of plugins to run. +# Note: the service requires at least 1 valid plugin to be defined or else it will exit immediately. +plugins: +# GCP Pub/Sub Consumer +- type: "gcppubsubconsumer" + input: + subscription_id: "my-subscription" + # Optional - will fallback to looking at env vars when left unset. + credentials_file: "/path/to/credentials.json" + output: + # Required (the Svix token to use when creating messages with this consumer) + token: "XYZ" + +# RabbitMQ Consumer +- type: "rabbitmqconsumer" + input: + # Required + uri: "amqp://guest:guest@localhost:5672/%2f" + # Required + queue_name: "my-queue" + # Optional (default: unset, managed by rabbitmq) + consumer_tag: "my-consumer-001" + # Optional (default: false) + requeue_on_nack: true + output: + # Required (the Svix token to use when creating messages with this consumer) + token: "XYZ" + +# Redis Consumer +- type: "redisconsumer" + input: + # Required + dsn: "redis://localhost:6379/" + # Required + queue_key: "my_queue" + # Required + consumer_name: "my_consumer" + # Required + consumer_group: "my_group" + # Required + max_connections: 4 + # Optional (default: false) + requeue_on_nack: true + output: + # Required (the Svix token to use when creating messages with this consumer) + token: "XYZ" + +# SQS Consumer +# Also remember to set your AWS credentials in env vars to use this: +# - `AWS_DEFAULT_REGION` +# - `AWS_ACCESS_KEY_ID` +# - `AWS_SECRET_ACCESS_KEY` +- type: "sqsconsumer" + input: + # Required + queue_dsn: "http://localhost:19324/000000000000/local" + # Optional (default: false) + override_endpoint: true + output: + # Required (the Svix token to use when creating messages with this consumer) + token: "XYZ" + +# Webhook Receiver +- type: "webhookreceiver" + listen_addr: "0.0.0.0:5000" + routes: + - name: "goog-forwarder" + verification: + type: "svix" + secret: "whsec_zzYYxx=" + destination: + type: "gcppubsub" + topic: "my-topic" + # Optional - falls back to env otherwise, eg. + # - `GOOGLE_APPLICATION_CREDENTIALS` + # - `GOOGLE_APPLICATION_CREDENTIALS_JSON` + credentials_file: "/path/to/creds.json" + + - name: "open-rabbit" + verification: + type: "none" + destination: + type: "rabbitmq" + uri: "amqp://guest:guest@localhost:5672/%2f" + exchange: "" + routing_key: "example" + + - name: "secure-rabbit" + verification: + type: "svix" + secret: "whsec_zzYYxx=" + destination: + type: "rabbitmq" + uri: "amqp://guest:guest@localhost:5672/%2f" + exchange: "" + routing_key: "example" + + - name: "amz" + verification: + type: "none" + destination: + # Note that the SQS forwarder requires credentials to be set as environment vars: + # - `AWS_DEFAULT_REGION` + # - `AWS_ACCESS_KEY_ID` + # - `AWS_SECRET_ACCESS_KEY` + type: "sqs" + queue_dsn: "https://example.aws.com/my-queue" diff --git a/webhook-bridge/svix-agent/Cargo.toml b/webhook-bridge/svix-agent/Cargo.toml new file mode 100644 index 000000000..723343ce1 --- /dev/null +++ b/webhook-bridge/svix-agent/Cargo.toml @@ -0,0 +1,33 @@ +[package] +name = "svix-agent" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +clap = { version = "4.2.4", features = ["env", "derive"] } +lazy_static = "1.4" +opentelemetry = { version = "0.18.0", features = ["rt-tokio"] } +opentelemetry-http = "0.7.0" +opentelemetry-otlp = { version = "0.11.0", features = ["metrics", "grpc-tonic", "http-proto", "reqwest-client"] } +serde = { version = "1.0", features = ["derive"] } +serde_yaml = "0.9.21" +svix-agent-plugin-generic = { optional=true, path = "../svix-agent-plugin-generic" } +svix-agent-plugin-webhook-receiver = { optional=true, path = "../svix-agent-plugin-webhook-receiver" } +svix-agent-types = { path = "../svix-agent-types" } +svix-ksuid = "0.7.0" +tokio = { version = "1", features=["full"] } +tracing = "0.1" +tracing-opentelemetry = "0.18.0" +tracing-subscriber = { version="0.3", features=["env-filter", "fmt", "json"] } + +[features] +default = ["gcp-pubsub", "rabbitmq", "redis", "sqs", "webhook-receiver"] + +gcp-pubsub = ["generic-queue"] +generic-queue = ["dep:svix-agent-plugin-generic"] +rabbitmq = ["generic-queue"] +redis = ["generic-queue"] +sqs = ["generic-queue"] +webhook-receiver = ["dep:svix-agent-plugin-webhook-receiver"] diff --git a/webhook-bridge/svix-agent/src/config/mod.rs b/webhook-bridge/svix-agent/src/config/mod.rs new file mode 100644 index 000000000..aa1fb2960 --- /dev/null +++ b/webhook-bridge/svix-agent/src/config/mod.rs @@ -0,0 +1,101 @@ +use serde::Deserialize; +use svix_agent_types::Plugin; +use tracing::Level; + +#[derive(Deserialize)] +pub struct Config { + #[serde(default)] + pub plugins: Vec, + /// The log level to run the service with. Supported: info, debug, trace + #[serde(default)] + pub log_level: LogLevel, + /// The log format that all output will follow. Supported: default, json + #[serde(default)] + pub log_format: LogFormat, + /// The OpenTelemetry service name to use + pub opentelemetry_service_name: Option, + /// The OpenTelemetry address to send events to if given. + pub opentelemetry_address: Option, + /// The ratio at which to sample spans when sending to OpenTelemetry. When not given it defaults + /// to always sending. If the OpenTelemetry address is not set, this will do nothing. + pub opentelemetry_sample_ratio: Option, +} + +#[derive(Clone, Debug, Default, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum LogLevel { + #[default] + Info, + Debug, + Trace, +} + +impl ToString for LogLevel { + fn to_string(&self) -> String { + match self { + Self::Info => Level::INFO, + Self::Debug => Level::DEBUG, + Self::Trace => Level::TRACE, + } + .to_string() + } +} + +#[derive(Clone, Debug, Default, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum LogFormat { + #[default] + Default, + Json, +} + +// FIXME: ideally we wouldn't need to modify `ConsumerConfig` when adding new plugins. +// Possibly we could codegen this type via macro that look at a data/cfg file, or in +// a build script. +#[derive(Deserialize)] +#[serde(tag = "type")] +#[serde(rename_all = "lowercase")] +pub enum PluginConfig { + #[cfg(feature = "gcp-pubsub")] + GCPPubSubConsumer(svix_agent_plugin_generic::GCPPubSubConsumerConfig), + + #[cfg(feature = "rabbitmq")] + RabbitMQConsumer(svix_agent_plugin_generic::RabbitMqConsumerConfig), + + #[cfg(feature = "redis")] + RedisConsumer(svix_agent_plugin_generic::RedisConsumerConfig), + + #[cfg(feature = "sqs")] + SqsConsumer(svix_agent_plugin_generic::SqsConsumerConfig), + + #[cfg(feature = "webhook-receiver")] + WebhookReceiver(svix_agent_plugin_webhook_receiver::WebhookReceiverPluginConfig), + + #[serde(other)] + Unknown, +} + +impl TryInto> for PluginConfig { + type Error = &'static str; + + fn try_into(self) -> Result, Self::Error> { + match self { + #[cfg(feature = "gcp-pubsub")] + PluginConfig::GCPPubSubConsumer(cc) => cc.try_into(), + + #[cfg(feature = "rabbitmq")] + PluginConfig::RabbitMQConsumer(cc) => cc.try_into(), + + #[cfg(feature = "redis")] + PluginConfig::RedisConsumer(cc) => cc.try_into(), + + #[cfg(feature = "sqs")] + PluginConfig::SqsConsumer(cc) => cc.try_into(), + + #[cfg(feature = "webhook-receiver")] + PluginConfig::WebhookReceiver(cc) => cc.try_into(), + + PluginConfig::Unknown => Err("unknown plugin"), + } + } +} diff --git a/webhook-bridge/svix-agent/src/main.rs b/webhook-bridge/svix-agent/src/main.rs new file mode 100644 index 000000000..3c54ab521 --- /dev/null +++ b/webhook-bridge/svix-agent/src/main.rs @@ -0,0 +1,186 @@ +use self::config::Config; +use clap::Parser; +use lazy_static::lazy_static; +use opentelemetry::runtime::Tokio; +use opentelemetry_otlp::WithExportConfig; +use std::path::PathBuf; +use svix_agent_types::Plugin; +use svix_ksuid::{KsuidLike as _, KsuidMs}; +use tracing_subscriber::prelude::*; + +mod config; + +lazy_static! { + // Seems like it would be useful to be able to configure this. + // In some docker setups, hostname is sometimes the container id, and advertising this can be + // helpful. + pub static ref INSTANCE_ID: String = KsuidMs::new(None, None).to_string(); +} + +fn get_svc_identifiers(cfg: &Config) -> opentelemetry::sdk::Resource { + opentelemetry::sdk::Resource::new(vec![ + opentelemetry::KeyValue::new( + "service.name", + cfg.opentelemetry_service_name + .as_deref() + // FIXME: can we do something better? + .unwrap_or("svix-agent") + .to_owned(), + ), + opentelemetry::KeyValue::new("instance_id", INSTANCE_ID.to_owned()), + ]) +} + +fn setup_tracing(cfg: &Config) { + if std::env::var_os("RUST_LOG").is_none() { + const CRATE_NAME: &str = env!("CARGO_CRATE_NAME"); + let level = cfg.log_level.to_string(); + let var = vec![ + format!("{CRATE_NAME}={level}"), + // XXX: Assuming this applies to the Producer side (aka `og-ingester`) when we fold it back in. + format!("tower_http={level}"), + ]; + std::env::set_var("RUST_LOG", var.join(",")); + } + + let otel_layer = cfg.opentelemetry_address.as_ref().map(|addr| { + // Configure the OpenTelemetry tracing layer + opentelemetry::global::set_text_map_propagator( + opentelemetry::sdk::propagation::TraceContextPropagator::new(), + ); + + let exporter = opentelemetry_otlp::new_exporter() + .tonic() + .with_endpoint(addr); + + let tracer = opentelemetry_otlp::new_pipeline() + .tracing() + .with_exporter(exporter) + .with_trace_config( + opentelemetry::sdk::trace::config() + .with_sampler( + cfg.opentelemetry_sample_ratio + .map(opentelemetry::sdk::trace::Sampler::TraceIdRatioBased) + .unwrap_or(opentelemetry::sdk::trace::Sampler::AlwaysOn), + ) + .with_resource(get_svc_identifiers(cfg)), + ) + .install_batch(Tokio) + .unwrap(); + + tracing_opentelemetry::layer().with_tracer(tracer) + }); + + // Then initialize logging with an additional layer printing to stdout. This additional layer is + // either formatted normally or in JSON format + // Fails if the subscriber was already initialized, which we can safely and silently ignore. + let _ = match cfg.log_format { + config::LogFormat::Default => { + let stdout_layer = tracing_subscriber::fmt::layer(); + tracing_subscriber::Registry::default() + .with(otel_layer) + .with(stdout_layer) + .with(tracing_subscriber::EnvFilter::from_default_env()) + .try_init() + } + config::LogFormat::Json => { + let fmt = tracing_subscriber::fmt::format().json().flatten_event(true); + let json_fields = tracing_subscriber::fmt::format::JsonFields::new(); + + let stdout_layer = tracing_subscriber::fmt::layer() + .event_format(fmt) + .fmt_fields(json_fields); + + tracing_subscriber::Registry::default() + .with(otel_layer) + .with(stdout_layer) + .with(tracing_subscriber::EnvFilter::from_default_env()) + .try_init() + } + }; +} + +async fn supervise(consumers: Vec>) -> std::io::Result<()> { + let mut set = tokio::task::JoinSet::new(); + for consumer in consumers { + set.spawn(async move { + // FIXME: needs much better signaling for termination + loop { + let fut = consumer.run(); + // If this future returns, the consumer terminated unexpectedly. + if let Err(e) = fut.await { + tracing::warn!("plugin unexpectedly terminated: {}", e); + } else { + tracing::warn!("plugin unexpectedly terminated"); + } + } + }); + } + + // FIXME: add signal handling to trigger a (intentional) graceful shutdown. + + // FIXME: when a plugin exits unexpectedly, what do? + // Most consumers are probably stateful/brittle and may disconnect from time to time. + // Ideally none of these tasks would ever return Ok or Err. They'd run forever. + // Having the tasks themselves try to recover means if we see a task finish here, something + // must be really wrong, so maybe we trigger a shutdown of the rest when one stops here. + while let Some(_res) = set.join_next().await { + // In order for plugins to coordinate a shutdown, maybe they could: + // - have a shutdown method and handle their own internal signalling, or maybe + // - take a oneshot channel as an arg to `run()` + // Basically we need something that formalizes the shutdown flow in a cross-crate + // friendly way. + todo!("graceful shutdown"); + } + Ok(()) +} + +#[derive(Parser)] +pub struct Args { + #[arg(short, long, env = "SVIX_AGENT_CFG")] + cfg: Option, +} + +#[tokio::main] +async fn main() -> std::io::Result<()> { + let args = Args::parse(); + + let config = args.cfg.unwrap_or_else(|| { + std::env::current_dir() + .expect("current dir") + .join("svix-agent.yaml") + }); + let cfg: Config = serde_yaml::from_str(&std::fs::read_to_string(&config).map_err(|e| { + let p = config.into_os_string().into_string().expect("config path"); + std::io::Error::new( + std::io::ErrorKind::Other, + format!("Failed to read {p}: {e}"), + ) + })?) + .map_err(|e| { + std::io::Error::new( + std::io::ErrorKind::Other, + format!("Failed to parse config: {}", e), + ) + })?; + setup_tracing(&cfg); + + tracing::info!("starting"); + + let mut consumers = Vec::with_capacity(cfg.plugins.len()); + for cc in cfg.plugins { + let consumer = cc.try_into().map_err(|e| { + std::io::Error::new( + std::io::ErrorKind::Other, + format!("Failed to configure consumer plugin: {}", e), + ) + })?; + consumers.push(consumer); + } + if consumers.is_empty() { + tracing::warn!("No consumers configured.") + } + supervise(consumers).await?; + tracing::info!("exiting..."); + Ok(()) +} diff --git a/webhook-bridge/testing-docker-compose.yml b/webhook-bridge/testing-docker-compose.yml new file mode 100644 index 000000000..f2ea049e6 --- /dev/null +++ b/webhook-bridge/testing-docker-compose.yml @@ -0,0 +1,28 @@ +version: "3.7" +services: + mq: + image: rabbitmq:3.11.11-management-alpine + ports: + - "5672:5672" + - "15672:15672" + + elasticmq: # Drop-in SQS replacement + image: softwaremill/elasticmq:1.3.14 + ports: + - "9324:9324" + - "9325:9325" + + redis: + image: redis:7 + ports: + - "6379:6379" + + gcp-pubsub: + image: gcr.io/google.com/cloudsdktool/google-cloud-cli:emulators + ports: + - "8085:8085" + command: [ + "gcloud", "beta", "emulators", "pubsub", "start", + "--project", "local-project", + "--host-port", "0.0.0.0:8085" + ] From cbe04b74717f7ca339537034acb6257602433440 Mon Sep 17 00:00:00 2001 From: Owen Nelson Date: Mon, 8 May 2023 14:30:58 -0700 Subject: [PATCH 2/8] renaming svix-agent to svix-webhook-bridge --- webhook-bridge/Cargo.lock | 42 +++++++++---------- webhook-bridge/Cargo.toml | 8 ++-- webhook-bridge/Dockerfile | 39 ++++++++--------- webhook-bridge/README.md | 22 +++++----- .../generic-queue/src/gcp_pubsub.rs | 2 +- .../Cargo.toml | 4 +- .../src/config.rs | 0 .../src/error.rs | 0 .../src/gcp_pubsub/mod.rs | 2 +- .../src/lib.rs | 2 +- .../tests/gcp_pubsub_consumer.rs | 6 +-- .../tests/rabbitmq_consumer.rs | 4 +- .../tests/redis_stream_consumer.rs | 4 +- .../tests/sqs_consumer.rs | 4 +- .../Cargo.toml | 4 +- .../src/config.rs | 0 .../src/forwarding.rs | 0 .../src/lib.rs | 2 +- .../src/runtime.rs | 0 .../src/types.rs | 0 .../src/verification.rs | 0 .../Cargo.toml | 2 +- .../src/lib.rs | 0 ....yaml => svix-webhook-bridge.example.yaml} | 2 +- .../Cargo.toml | 12 +++--- .../src/config/mod.rs | 12 +++--- .../src/main.rs | 28 +++++++------ webhook-bridge/testing-docker-compose.yml | 2 +- 28 files changed, 103 insertions(+), 100 deletions(-) rename webhook-bridge/{svix-agent-plugin-generic => svix-webhook-bridge-plugin-queue-consumer}/Cargo.toml (87%) rename webhook-bridge/{svix-agent-plugin-generic => svix-webhook-bridge-plugin-queue-consumer}/src/config.rs (100%) rename webhook-bridge/{svix-agent-plugin-generic => svix-webhook-bridge-plugin-queue-consumer}/src/error.rs (100%) rename webhook-bridge/{svix-agent-plugin-generic => svix-webhook-bridge-plugin-queue-consumer}/src/gcp_pubsub/mod.rs (98%) rename webhook-bridge/{svix-agent-plugin-generic => svix-webhook-bridge-plugin-queue-consumer}/src/lib.rs (99%) rename webhook-bridge/{svix-agent-plugin-generic => svix-webhook-bridge-plugin-queue-consumer}/tests/gcp_pubsub_consumer.rs (98%) rename webhook-bridge/{svix-agent-plugin-generic => svix-webhook-bridge-plugin-queue-consumer}/tests/rabbitmq_consumer.rs (99%) rename webhook-bridge/{svix-agent-plugin-generic => svix-webhook-bridge-plugin-queue-consumer}/tests/redis_stream_consumer.rs (99%) rename webhook-bridge/{svix-agent-plugin-generic => svix-webhook-bridge-plugin-queue-consumer}/tests/sqs_consumer.rs (99%) rename webhook-bridge/{svix-agent-plugin-webhook-receiver => svix-webhook-bridge-plugin-webhook-receiver}/Cargo.toml (83%) rename webhook-bridge/{svix-agent-plugin-webhook-receiver => svix-webhook-bridge-plugin-webhook-receiver}/src/config.rs (100%) rename webhook-bridge/{svix-agent-plugin-webhook-receiver => svix-webhook-bridge-plugin-webhook-receiver}/src/forwarding.rs (100%) rename webhook-bridge/{svix-agent-plugin-webhook-receiver => svix-webhook-bridge-plugin-webhook-receiver}/src/lib.rs (98%) rename webhook-bridge/{svix-agent-plugin-webhook-receiver => svix-webhook-bridge-plugin-webhook-receiver}/src/runtime.rs (100%) rename webhook-bridge/{svix-agent-plugin-webhook-receiver => svix-webhook-bridge-plugin-webhook-receiver}/src/types.rs (100%) rename webhook-bridge/{svix-agent-plugin-webhook-receiver => svix-webhook-bridge-plugin-webhook-receiver}/src/verification.rs (100%) rename webhook-bridge/{svix-agent-types => svix-webhook-bridge-types}/Cargo.toml (83%) rename webhook-bridge/{svix-agent-types => svix-webhook-bridge-types}/src/lib.rs (100%) rename webhook-bridge/{svix-agent.example.yaml => svix-webhook-bridge.example.yaml} (98%) rename webhook-bridge/{svix-agent => svix-webhook-bridge}/Cargo.toml (65%) rename webhook-bridge/{svix-agent => svix-webhook-bridge}/src/config/mod.rs (84%) rename webhook-bridge/{svix-agent => svix-webhook-bridge}/src/main.rs (90%) diff --git a/webhook-bridge/Cargo.lock b/webhook-bridge/Cargo.lock index 8bed90a54..1da8cdffe 100644 --- a/webhook-bridge/Cargo.lock +++ b/webhook-bridge/Cargo.lock @@ -3125,7 +3125,19 @@ dependencies = [ ] [[package]] -name = "svix-agent" +name = "svix-ksuid" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75d773122e48817eb6eb74605cf799574a855bf4c7eb0c1bb06c005067123b13" +dependencies = [ + "base-encode", + "byteorder", + "getrandom 0.2.9", + "time 0.3.21", +] + +[[package]] +name = "svix-webhook-bridge" version = "0.1.0" dependencies = [ "clap", @@ -3135,10 +3147,10 @@ dependencies = [ "opentelemetry-otlp", "serde", "serde_yaml", - "svix-agent-plugin-generic", - "svix-agent-plugin-webhook-receiver", - "svix-agent-types", "svix-ksuid", + "svix-webhook-bridge-plugin-queue-consumer", + "svix-webhook-bridge-plugin-webhook-receiver", + "svix-webhook-bridge-types", "tokio", "tracing", "tracing-opentelemetry", @@ -3146,7 +3158,7 @@ dependencies = [ ] [[package]] -name = "svix-agent-plugin-generic" +name = "svix-webhook-bridge-plugin-queue-consumer" version = "0.1.0" dependencies = [ "aws-config", @@ -3164,7 +3176,7 @@ dependencies = [ "serde", "serde_json", "svix", - "svix-agent-types", + "svix-webhook-bridge-types", "tokio", "tokio-executor-trait", "tokio-reactor-trait", @@ -3174,7 +3186,7 @@ dependencies = [ ] [[package]] -name = "svix-agent-plugin-webhook-receiver" +name = "svix-webhook-bridge-plugin-webhook-receiver" version = "0.1.0" dependencies = [ "anyhow", @@ -3189,7 +3201,7 @@ dependencies = [ "serde_yaml", "smol", "svix", - "svix-agent-types", + "svix-webhook-bridge-types", "threadpool", "tokio", "tower-http", @@ -3197,24 +3209,12 @@ dependencies = [ ] [[package]] -name = "svix-agent-types" +name = "svix-webhook-bridge-types" version = "0.1.0" dependencies = [ "async-trait", ] -[[package]] -name = "svix-ksuid" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75d773122e48817eb6eb74605cf799574a855bf4c7eb0c1bb06c005067123b13" -dependencies = [ - "base-encode", - "byteorder", - "getrandom 0.2.9", - "time 0.3.21", -] - [[package]] name = "syn" version = "1.0.109" diff --git a/webhook-bridge/Cargo.toml b/webhook-bridge/Cargo.toml index 5ab4dc496..1543ef6ea 100644 --- a/webhook-bridge/Cargo.toml +++ b/webhook-bridge/Cargo.toml @@ -1,8 +1,8 @@ [workspace] members = [ "generic-queue", - "svix-agent-types", - "svix-agent", - "svix-agent-plugin-generic", - "svix-agent-plugin-webhook-receiver", + "svix-webhook-bridge-types", + "svix-webhook-bridge", + "svix-webhook-bridge-plugin-queue-consumer", + "svix-webhook-bridge-plugin-webhook-receiver", ] diff --git a/webhook-bridge/Dockerfile b/webhook-bridge/Dockerfile index 9ad43a9a9..8526e4c22 100644 --- a/webhook-bridge/Dockerfile +++ b/webhook-bridge/Dockerfile @@ -23,28 +23,28 @@ WORKDIR /app COPY Cargo.toml . COPY Cargo.lock . COPY generic-queue/Cargo.toml generic-queue/ -COPY svix-agent-types/Cargo.toml svix-agent-types/ -COPY svix-agent-plugin-generic/Cargo.toml svix-agent-plugin-generic/ -COPY svix-agent-plugin-webhook-receiver/Cargo.toml svix-agent-plugin-webhook-receiver/ -COPY svix-agent/Cargo.toml svix-agent/ +COPY svix-webhook-bridge-types/Cargo.toml svix-webhook-bridge-types/ +COPY svix-webhook-bridge-plugin-queue-consumer/Cargo.toml svix-webhook-bridge-plugin-queue-consumer/ +COPY svix-webhook-bridge-plugin-webhook-receiver/Cargo.toml svix-webhook-bridge-plugin-webhook-receiver/ +COPY svix-webhook-bridge/Cargo.toml svix-webhook-bridge/ RUN set -ex ;\ mkdir generic-queue/src ;\ - mkdir svix-agent-plugin-generic/src ;\ - mkdir svix-agent-plugin-webhook-receiver/src ;\ - mkdir svix-agent-types/src ;\ - mkdir svix-agent/src ;\ + mkdir svix-webhook-bridge-plugin-queue-consumer/src ;\ + mkdir svix-webhook-bridge-plugin-webhook-receiver/src ;\ + mkdir svix-webhook-bridge-types/src ;\ + mkdir svix-webhook-bridge/src ;\ echo '' > generic-queue/src/lib.rs ;\ - echo '' > svix-agent-plugin-generic/src/lib.rs ;\ - echo '' > svix-agent-plugin-webhook-receiver/src/lib.rs ;\ - echo '' > svix-agent-types/src/lib.rs ;\ - echo 'fn main() { println!("Dummy!"); }' > svix-agent/src/main.rs ;\ + echo '' > svix-webhook-bridge-plugin-queue-consumer/src/lib.rs ;\ + echo '' > svix-webhook-bridge-plugin-webhook-receiver/src/lib.rs ;\ + echo '' > svix-webhook-bridge-types/src/lib.rs ;\ + echo 'fn main() { println!("Dummy!"); }' > svix-webhook-bridge/src/main.rs ;\ cargo build --release ;\ rm -rf \ generic-queue/src \ - svix-agent-plugin-generic/src \ - svix-agent-plugin-webhook-receiver/src \ - svix-agent-types/src \ - svix-agent/src + svix-webhook-bridge-plugin-queue-consumer/src \ + svix-webhook-bridge-plugin-webhook-receiver/src \ + svix-webhook-bridge-types/src \ + svix-webhook-bridge/src COPY . . # touching the lib.rs/main.rs ensures cargo rebuilds them instead of considering them already built. @@ -68,7 +68,8 @@ RUN apt-get update ;\ USER appuser -COPY --from=build /app/target/release/svix-agent /usr/local/bin/svix-agent +COPY --from=build /app/target/release/svix-webhook-bridge /usr/local/bin/svix-webhook-bridge -# Will fail if there's no `svix-agent.yaml` in the CWD or `SVIX_AGENT_CFG` is not set to a valid path to a config -CMD ["svix-agent"] +# Will fail if there's no `svix-webhook-bridge.yaml` in the CWD or `SVIX_WEBHOOK_BRIDGE_CFG` is not set to a valid +# path to a config. +CMD ["svix-webhook-bridge"] diff --git a/webhook-bridge/README.md b/webhook-bridge/README.md index 5ed3ae73d..53f3ea9a6 100644 --- a/webhook-bridge/README.md +++ b/webhook-bridge/README.md @@ -1,23 +1,23 @@ -# Svix Agent +# Svix Webhook Bridge This service subscribes to a queue or stream and forwards each item to Svix when a valid message is found. ## Usage ``` -svix-agent -c path/to/svix-agent.yaml +svix-webhook-bridge -c path/to/svix-webhook-bridge.yaml ``` ## Configuration -> For an annotated sample configuration see [the example config](svix-agent.example.yaml). +> For an annotated sample configuration see [the example config](svix-webhook-bridge.example.yaml). -`svix-agent` is organized in terms of "plugins" which are tasks that run in tandem. -Each plugin represents a unit of work performed while the agent while it runs. +`svix-webhook-bridge` is organized in terms of "plugins" which are tasks that run in tandem. +Each plugin represents a unit of work performed while the service runs. -Presently there are 2 "plugins" available for `svix-agent`. +Presently there are 2 "plugins" available for `svix-webhook-bridge`. -### svix-agent-plugin-generic +### svix-webhook-bridge-plugin-queue-consumer This plugin consumes messages from message queues to and forwards them to Svix to create messages. @@ -56,7 +56,7 @@ Messages received by these consumers must follow an expected format: For detail on the `message` field, see: Important to note that queues, exchanges, topics, or what have you, should be created and configured independently, -prior to using the agent plugin. There's nothing in place to automatically create these resources. +prior to using the plugin. There's nothing in place to automatically create these resources. The plugin will only try (and fail) to read from the stream in such a case. @@ -129,7 +129,7 @@ Note that the SQS consumer requires credentials to be set as environment vars: - `AWS_ACCESS_KEY_ID` - `AWS_SECRET_ACCESS_KEY` -> This incidentally means all SQS consumers configured for a given `svix-agent` will need to share these details. +> This incidentally means all SQS consumers configured for a given `svix-webhook-bridge` will need to share these details. ```yaml plugins: @@ -145,12 +145,12 @@ plugins: ``` -### svix-agent-plugin-webhook-receiver +### svix-webhook-bridge-plugin-webhook-receiver This plugin starts an HTTP server which accepts webhooks and forwards them to one of the supported messaging systems. -Again, same as with `svix-agent-plugin-generic`, the supported systems are: +Again, same as with `svix-webhook-bridge-plugin-queue-consumer`, the supported systems are: - GCP Pub/Sub - RabbitMQ diff --git a/webhook-bridge/generic-queue/src/gcp_pubsub.rs b/webhook-bridge/generic-queue/src/gcp_pubsub.rs index 45e2daffb..cb1ea30a0 100644 --- a/webhook-bridge/generic-queue/src/gcp_pubsub.rs +++ b/webhook-bridge/generic-queue/src/gcp_pubsub.rs @@ -222,7 +222,7 @@ impl TaskQueueSend for GCPPubSubQueuePr let topic = self.client.topic(&self.topic); // Publishing to a non-existent topic will cause the publisher to wait (forever?) - // Giving this error will allow dependents like `svix-agent-plugin-webhook-receiver` to + // Giving this error will allow dependents like `svix-webhook-bridge-plugin-webhook-receiver` to // respond 500 immediately when this happens, instead of holding the connection open // indefinitely. if !topic.exists(None).await.map_err(QueueError::generic)? { diff --git a/webhook-bridge/svix-agent-plugin-generic/Cargo.toml b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/Cargo.toml similarity index 87% rename from webhook-bridge/svix-agent-plugin-generic/Cargo.toml rename to webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/Cargo.toml index f9560b9b6..415b93eae 100644 --- a/webhook-bridge/svix-agent-plugin-generic/Cargo.toml +++ b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "svix-agent-plugin-generic" +name = "svix-webhook-bridge-plugin-queue-consumer" version = "0.1.0" edition = "2021" @@ -11,7 +11,7 @@ generic_queue = { path = "../generic-queue" } serde_json = "1.0" serde = { version = "1.0", features = ["derive"] } svix = "0.84.1" -svix-agent-types = { path = "../svix-agent-types" } +svix-webhook-bridge-types = { path = "../svix-webhook-bridge-types" } tokio = { version = "1", features = ["full"] } tokio-executor-trait = "2.1" tokio-reactor-trait = "1.1" diff --git a/webhook-bridge/svix-agent-plugin-generic/src/config.rs b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/src/config.rs similarity index 100% rename from webhook-bridge/svix-agent-plugin-generic/src/config.rs rename to webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/src/config.rs diff --git a/webhook-bridge/svix-agent-plugin-generic/src/error.rs b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/src/error.rs similarity index 100% rename from webhook-bridge/svix-agent-plugin-generic/src/error.rs rename to webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/src/error.rs diff --git a/webhook-bridge/svix-agent-plugin-generic/src/gcp_pubsub/mod.rs b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/src/gcp_pubsub/mod.rs similarity index 98% rename from webhook-bridge/svix-agent-plugin-generic/src/gcp_pubsub/mod.rs rename to webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/src/gcp_pubsub/mod.rs index 0e60a7a82..4209504b3 100644 --- a/webhook-bridge/svix-agent-plugin-generic/src/gcp_pubsub/mod.rs +++ b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/src/gcp_pubsub/mod.rs @@ -9,7 +9,7 @@ use generic_queue::gcp_pubsub::{ use generic_queue::{Delivery, TaskQueueBackend, TaskQueueReceive}; use std::time::{Duration, Instant}; use svix::api::Svix; -use svix_agent_types::{async_trait, Plugin}; +use svix_webhook_bridge_types::{async_trait, Plugin}; use tracing::instrument; pub struct GCPPubSubConsumerPlugin { diff --git a/webhook-bridge/svix-agent-plugin-generic/src/lib.rs b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/src/lib.rs similarity index 99% rename from webhook-bridge/svix-agent-plugin-generic/src/lib.rs rename to webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/src/lib.rs index 199c51f6a..ae53a24fa 100644 --- a/webhook-bridge/svix-agent-plugin-generic/src/lib.rs +++ b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/src/lib.rs @@ -10,7 +10,7 @@ use generic_queue::{ }; use serde::{Deserialize, Serialize}; use svix::api::{MessageIn, PostOptions as PostOptions_, Svix}; -use svix_agent_types::{async_trait, Plugin}; +use svix_webhook_bridge_types::{async_trait, Plugin}; pub mod config; pub use config::{ diff --git a/webhook-bridge/svix-agent-plugin-generic/tests/gcp_pubsub_consumer.rs b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/gcp_pubsub_consumer.rs similarity index 98% rename from webhook-bridge/svix-agent-plugin-generic/tests/gcp_pubsub_consumer.rs rename to webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/gcp_pubsub_consumer.rs index 366661302..2147dcd4c 100644 --- a/webhook-bridge/svix-agent-plugin-generic/tests/gcp_pubsub_consumer.rs +++ b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/gcp_pubsub_consumer.rs @@ -11,12 +11,12 @@ use std::time::Duration; use serde_json::json; use svix::api::MessageIn; -use svix_agent_plugin_generic::config::GCPPubSubInputOpts; -use svix_agent_plugin_generic::{ +use svix_webhook_bridge_plugin_queue_consumer::config::GCPPubSubInputOpts; +use svix_webhook_bridge_plugin_queue_consumer::{ config::{OutputOpts, SvixOptions}, CreateMessageRequest, GCPPubSubConsumerConfig, GCPPubSubConsumerPlugin, }; -use svix_agent_types::Plugin; +use svix_webhook_bridge_types::Plugin; use wiremock::matchers::method; use wiremock::{Mock, MockServer, ResponseTemplate}; diff --git a/webhook-bridge/svix-agent-plugin-generic/tests/rabbitmq_consumer.rs b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/rabbitmq_consumer.rs similarity index 99% rename from webhook-bridge/svix-agent-plugin-generic/tests/rabbitmq_consumer.rs rename to webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/rabbitmq_consumer.rs index dacf471a2..54952f196 100644 --- a/webhook-bridge/svix-agent-plugin-generic/tests/rabbitmq_consumer.rs +++ b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/rabbitmq_consumer.rs @@ -7,11 +7,11 @@ use lapin::{options::QueueDeclareOptions, Channel, Connection, ConnectionPropert use serde_json::json; use std::time::Duration; use svix::api::MessageIn; -use svix_agent_plugin_generic::{ +use svix_webhook_bridge_plugin_queue_consumer::{ config::{OutputOpts, RabbitMqInputOpts, SvixOptions}, CreateMessageRequest, RabbitMqConsumerConfig, RabbitMqConsumerPlugin, }; -use svix_agent_types::Plugin; +use svix_webhook_bridge_types::Plugin; use wiremock::matchers::method; use wiremock::{Mock, MockServer, ResponseTemplate}; diff --git a/webhook-bridge/svix-agent-plugin-generic/tests/redis_stream_consumer.rs b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/redis_stream_consumer.rs similarity index 99% rename from webhook-bridge/svix-agent-plugin-generic/tests/redis_stream_consumer.rs rename to webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/redis_stream_consumer.rs index 0349aea23..1e2e492fd 100644 --- a/webhook-bridge/svix-agent-plugin-generic/tests/redis_stream_consumer.rs +++ b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/redis_stream_consumer.rs @@ -6,11 +6,11 @@ use std::time::Duration; use redis::{AsyncCommands, Client}; use serde_json::json; use svix::api::MessageIn; -use svix_agent_plugin_generic::{ +use svix_webhook_bridge_plugin_queue_consumer::{ config::{OutputOpts, SvixOptions}, CreateMessageRequest, RedisConsumerConfig, RedisConsumerPlugin, RedisInputOpts, }; -use svix_agent_types::Plugin; +use svix_webhook_bridge_types::Plugin; use wiremock::matchers::method; use wiremock::{Mock, MockServer, ResponseTemplate}; diff --git a/webhook-bridge/svix-agent-plugin-generic/tests/sqs_consumer.rs b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/sqs_consumer.rs similarity index 99% rename from webhook-bridge/svix-agent-plugin-generic/tests/sqs_consumer.rs rename to webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/sqs_consumer.rs index 719f325ec..ed654bc4b 100644 --- a/webhook-bridge/svix-agent-plugin-generic/tests/sqs_consumer.rs +++ b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/sqs_consumer.rs @@ -8,11 +8,11 @@ use std::time::Duration; use aws_sdk_sqs::Client; use serde_json::json; use svix::api::MessageIn; -use svix_agent_plugin_generic::{ +use svix_webhook_bridge_plugin_queue_consumer::{ config::{OutputOpts, SvixOptions}, CreateMessageRequest, SqsConsumerConfig, SqsConsumerPlugin, SqsInputOpts, }; -use svix_agent_types::Plugin; +use svix_webhook_bridge_types::Plugin; use wiremock::matchers::method; use wiremock::{Mock, MockServer, ResponseTemplate}; diff --git a/webhook-bridge/svix-agent-plugin-webhook-receiver/Cargo.toml b/webhook-bridge/svix-webhook-bridge-plugin-webhook-receiver/Cargo.toml similarity index 83% rename from webhook-bridge/svix-agent-plugin-webhook-receiver/Cargo.toml rename to webhook-bridge/svix-webhook-bridge-plugin-webhook-receiver/Cargo.toml index 1facec549..969d2cece 100644 --- a/webhook-bridge/svix-agent-plugin-webhook-receiver/Cargo.toml +++ b/webhook-bridge/svix-webhook-bridge-plugin-webhook-receiver/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "svix-agent-plugin-webhook-receiver" +name = "svix-webhook-bridge-plugin-webhook-receiver" version = "0.1.0" edition = "2021" @@ -15,7 +15,7 @@ serde = { version = "1", features = ["derive"] } serde_json = "1" smol = "1.3" svix = "0.84.1" -svix-agent-types = { path = "../svix-agent-types" } +svix-webhook-bridge-types = { path = "../svix-webhook-bridge-types" } threadpool = "1" tracing = "0.1" tokio = { version = "1", features = ["full"] } diff --git a/webhook-bridge/svix-agent-plugin-webhook-receiver/src/config.rs b/webhook-bridge/svix-webhook-bridge-plugin-webhook-receiver/src/config.rs similarity index 100% rename from webhook-bridge/svix-agent-plugin-webhook-receiver/src/config.rs rename to webhook-bridge/svix-webhook-bridge-plugin-webhook-receiver/src/config.rs diff --git a/webhook-bridge/svix-agent-plugin-webhook-receiver/src/forwarding.rs b/webhook-bridge/svix-webhook-bridge-plugin-webhook-receiver/src/forwarding.rs similarity index 100% rename from webhook-bridge/svix-agent-plugin-webhook-receiver/src/forwarding.rs rename to webhook-bridge/svix-webhook-bridge-plugin-webhook-receiver/src/forwarding.rs diff --git a/webhook-bridge/svix-agent-plugin-webhook-receiver/src/lib.rs b/webhook-bridge/svix-webhook-bridge-plugin-webhook-receiver/src/lib.rs similarity index 98% rename from webhook-bridge/svix-agent-plugin-webhook-receiver/src/lib.rs rename to webhook-bridge/svix-webhook-bridge-plugin-webhook-receiver/src/lib.rs index be8be54c0..6cc2986c5 100644 --- a/webhook-bridge/svix-agent-plugin-webhook-receiver/src/lib.rs +++ b/webhook-bridge/svix-webhook-bridge-plugin-webhook-receiver/src/lib.rs @@ -6,7 +6,7 @@ use axum::{ }; use serde::Deserialize; use std::net::SocketAddr; -use svix_agent_types::{async_trait, Plugin}; +use svix_webhook_bridge_types::{async_trait, Plugin}; use tracing::instrument; use types::{IntegrationId, IntegrationState, InternalState, SerializableRequest, Unvalidated}; diff --git a/webhook-bridge/svix-agent-plugin-webhook-receiver/src/runtime.rs b/webhook-bridge/svix-webhook-bridge-plugin-webhook-receiver/src/runtime.rs similarity index 100% rename from webhook-bridge/svix-agent-plugin-webhook-receiver/src/runtime.rs rename to webhook-bridge/svix-webhook-bridge-plugin-webhook-receiver/src/runtime.rs diff --git a/webhook-bridge/svix-agent-plugin-webhook-receiver/src/types.rs b/webhook-bridge/svix-webhook-bridge-plugin-webhook-receiver/src/types.rs similarity index 100% rename from webhook-bridge/svix-agent-plugin-webhook-receiver/src/types.rs rename to webhook-bridge/svix-webhook-bridge-plugin-webhook-receiver/src/types.rs diff --git a/webhook-bridge/svix-agent-plugin-webhook-receiver/src/verification.rs b/webhook-bridge/svix-webhook-bridge-plugin-webhook-receiver/src/verification.rs similarity index 100% rename from webhook-bridge/svix-agent-plugin-webhook-receiver/src/verification.rs rename to webhook-bridge/svix-webhook-bridge-plugin-webhook-receiver/src/verification.rs diff --git a/webhook-bridge/svix-agent-types/Cargo.toml b/webhook-bridge/svix-webhook-bridge-types/Cargo.toml similarity index 83% rename from webhook-bridge/svix-agent-types/Cargo.toml rename to webhook-bridge/svix-webhook-bridge-types/Cargo.toml index 47b3ccc0e..25dfc4c36 100644 --- a/webhook-bridge/svix-agent-types/Cargo.toml +++ b/webhook-bridge/svix-webhook-bridge-types/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "svix-agent-types" +name = "svix-webhook-bridge-types" version = "0.1.0" edition = "2021" diff --git a/webhook-bridge/svix-agent-types/src/lib.rs b/webhook-bridge/svix-webhook-bridge-types/src/lib.rs similarity index 100% rename from webhook-bridge/svix-agent-types/src/lib.rs rename to webhook-bridge/svix-webhook-bridge-types/src/lib.rs diff --git a/webhook-bridge/svix-agent.example.yaml b/webhook-bridge/svix-webhook-bridge.example.yaml similarity index 98% rename from webhook-bridge/svix-agent.example.yaml rename to webhook-bridge/svix-webhook-bridge.example.yaml index 14202fb89..6bc3ba2ea 100644 --- a/webhook-bridge/svix-agent.example.yaml +++ b/webhook-bridge/svix-webhook-bridge.example.yaml @@ -7,7 +7,7 @@ # The OpenTelemetry address to send events to if given. #opentelemetry_address: "http://localhost:1234" -# The OpenTelemetry service name to use. Default: "svix-agent" +# The OpenTelemetry service name to use. Default: "svix-webhook-bridge" # If the OpenTelemetry address is not set, this will do nothing. #opentelemetry_service_name: "my-agent" diff --git a/webhook-bridge/svix-agent/Cargo.toml b/webhook-bridge/svix-webhook-bridge/Cargo.toml similarity index 65% rename from webhook-bridge/svix-agent/Cargo.toml rename to webhook-bridge/svix-webhook-bridge/Cargo.toml index 723343ce1..eec8a87e9 100644 --- a/webhook-bridge/svix-agent/Cargo.toml +++ b/webhook-bridge/svix-webhook-bridge/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "svix-agent" +name = "svix-webhook-bridge" version = "0.1.0" edition = "2021" @@ -13,9 +13,9 @@ opentelemetry-http = "0.7.0" opentelemetry-otlp = { version = "0.11.0", features = ["metrics", "grpc-tonic", "http-proto", "reqwest-client"] } serde = { version = "1.0", features = ["derive"] } serde_yaml = "0.9.21" -svix-agent-plugin-generic = { optional=true, path = "../svix-agent-plugin-generic" } -svix-agent-plugin-webhook-receiver = { optional=true, path = "../svix-agent-plugin-webhook-receiver" } -svix-agent-types = { path = "../svix-agent-types" } +svix-webhook-bridge-plugin-queue-consumer = { optional=true, path = "../svix-webhook-bridge-plugin-queue-consumer" } +svix-webhook-bridge-plugin-webhook-receiver = { optional=true, path = "../svix-webhook-bridge-plugin-webhook-receiver" } +svix-webhook-bridge-types = { path = "../svix-webhook-bridge-types" } svix-ksuid = "0.7.0" tokio = { version = "1", features=["full"] } tracing = "0.1" @@ -26,8 +26,8 @@ tracing-subscriber = { version="0.3", features=["env-filter", "fmt", "json"] } default = ["gcp-pubsub", "rabbitmq", "redis", "sqs", "webhook-receiver"] gcp-pubsub = ["generic-queue"] -generic-queue = ["dep:svix-agent-plugin-generic"] +generic-queue = ["dep:svix-webhook-bridge-plugin-queue-consumer"] rabbitmq = ["generic-queue"] redis = ["generic-queue"] sqs = ["generic-queue"] -webhook-receiver = ["dep:svix-agent-plugin-webhook-receiver"] +webhook-receiver = ["dep:svix-webhook-bridge-plugin-webhook-receiver"] diff --git a/webhook-bridge/svix-agent/src/config/mod.rs b/webhook-bridge/svix-webhook-bridge/src/config/mod.rs similarity index 84% rename from webhook-bridge/svix-agent/src/config/mod.rs rename to webhook-bridge/svix-webhook-bridge/src/config/mod.rs index aa1fb2960..967d3762a 100644 --- a/webhook-bridge/svix-agent/src/config/mod.rs +++ b/webhook-bridge/svix-webhook-bridge/src/config/mod.rs @@ -1,5 +1,5 @@ use serde::Deserialize; -use svix_agent_types::Plugin; +use svix_webhook_bridge_types::Plugin; use tracing::Level; #[derive(Deserialize)] @@ -57,19 +57,19 @@ pub enum LogFormat { #[serde(rename_all = "lowercase")] pub enum PluginConfig { #[cfg(feature = "gcp-pubsub")] - GCPPubSubConsumer(svix_agent_plugin_generic::GCPPubSubConsumerConfig), + GCPPubSubConsumer(svix_webhook_bridge_plugin_queue_consumer::GCPPubSubConsumerConfig), #[cfg(feature = "rabbitmq")] - RabbitMQConsumer(svix_agent_plugin_generic::RabbitMqConsumerConfig), + RabbitMQConsumer(svix_webhook_bridge_plugin_queue_consumer::RabbitMqConsumerConfig), #[cfg(feature = "redis")] - RedisConsumer(svix_agent_plugin_generic::RedisConsumerConfig), + RedisConsumer(svix_webhook_bridge_plugin_queue_consumer::RedisConsumerConfig), #[cfg(feature = "sqs")] - SqsConsumer(svix_agent_plugin_generic::SqsConsumerConfig), + SqsConsumer(svix_webhook_bridge_plugin_queue_consumer::SqsConsumerConfig), #[cfg(feature = "webhook-receiver")] - WebhookReceiver(svix_agent_plugin_webhook_receiver::WebhookReceiverPluginConfig), + WebhookReceiver(svix_webhook_bridge_plugin_webhook_receiver::WebhookReceiverPluginConfig), #[serde(other)] Unknown, diff --git a/webhook-bridge/svix-agent/src/main.rs b/webhook-bridge/svix-webhook-bridge/src/main.rs similarity index 90% rename from webhook-bridge/svix-agent/src/main.rs rename to webhook-bridge/svix-webhook-bridge/src/main.rs index 3c54ab521..4f79c60de 100644 --- a/webhook-bridge/svix-agent/src/main.rs +++ b/webhook-bridge/svix-webhook-bridge/src/main.rs @@ -4,8 +4,9 @@ use lazy_static::lazy_static; use opentelemetry::runtime::Tokio; use opentelemetry_otlp::WithExportConfig; use std::path::PathBuf; -use svix_agent_types::Plugin; +use std::time::Duration; use svix_ksuid::{KsuidLike as _, KsuidMs}; +use svix_webhook_bridge_types::Plugin; use tracing_subscriber::prelude::*; mod config; @@ -24,7 +25,7 @@ fn get_svc_identifiers(cfg: &Config) -> opentelemetry::sdk::Resource { cfg.opentelemetry_service_name .as_deref() // FIXME: can we do something better? - .unwrap_or("svix-agent") + .unwrap_or("svix-webhook-bridge") .to_owned(), ), opentelemetry::KeyValue::new("instance_id", INSTANCE_ID.to_owned()), @@ -100,19 +101,20 @@ fn setup_tracing(cfg: &Config) { }; } -async fn supervise(consumers: Vec>) -> std::io::Result<()> { +async fn supervise(plugins: Vec>) -> std::io::Result<()> { let mut set = tokio::task::JoinSet::new(); - for consumer in consumers { + for plugin in plugins { set.spawn(async move { // FIXME: needs much better signaling for termination loop { - let fut = consumer.run(); + let fut = plugin.run(); // If this future returns, the consumer terminated unexpectedly. if let Err(e) = fut.await { tracing::warn!("plugin unexpectedly terminated: {}", e); } else { tracing::warn!("plugin unexpectedly terminated"); } + tokio::time::sleep(Duration::from_secs(1)).await; } }); } @@ -137,7 +139,7 @@ async fn supervise(consumers: Vec>) -> std::io::Result<()> { #[derive(Parser)] pub struct Args { - #[arg(short, long, env = "SVIX_AGENT_CFG")] + #[arg(short, long, env = "SVIX_WEBHOOK_BRIDGE_CFG")] cfg: Option, } @@ -148,7 +150,7 @@ async fn main() -> std::io::Result<()> { let config = args.cfg.unwrap_or_else(|| { std::env::current_dir() .expect("current dir") - .join("svix-agent.yaml") + .join("svix-webhook-bridge.yaml") }); let cfg: Config = serde_yaml::from_str(&std::fs::read_to_string(&config).map_err(|e| { let p = config.into_os_string().into_string().expect("config path"); @@ -167,20 +169,20 @@ async fn main() -> std::io::Result<()> { tracing::info!("starting"); - let mut consumers = Vec::with_capacity(cfg.plugins.len()); + let mut plugins = Vec::with_capacity(cfg.plugins.len()); for cc in cfg.plugins { let consumer = cc.try_into().map_err(|e| { std::io::Error::new( std::io::ErrorKind::Other, - format!("Failed to configure consumer plugin: {}", e), + format!("Failed to configure plugin: {}", e), ) })?; - consumers.push(consumer); + plugins.push(consumer); } - if consumers.is_empty() { - tracing::warn!("No consumers configured.") + if plugins.is_empty() { + tracing::warn!("No plugins configured.") } - supervise(consumers).await?; + supervise(plugins).await?; tracing::info!("exiting..."); Ok(()) } diff --git a/webhook-bridge/testing-docker-compose.yml b/webhook-bridge/testing-docker-compose.yml index f2ea049e6..d49930795 100644 --- a/webhook-bridge/testing-docker-compose.yml +++ b/webhook-bridge/testing-docker-compose.yml @@ -1,6 +1,6 @@ version: "3.7" services: - mq: + rabbitmq: image: rabbitmq:3.11.11-management-alpine ports: - "5672:5672" From 0c0e14b1cc1a0aeb0dc016f7261ed2625f6ac61c Mon Sep 17 00:00:00 2001 From: Owen Nelson Date: Mon, 8 May 2023 14:54:08 -0700 Subject: [PATCH 3/8] add docker publish for webhook-bridge --- .github/workflows/webhook-bridge-release.yml | 38 ++++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 .github/workflows/webhook-bridge-release.yml diff --git a/.github/workflows/webhook-bridge-release.yml b/.github/workflows/webhook-bridge-release.yml new file mode 100644 index 000000000..9a35fa681 --- /dev/null +++ b/.github/workflows/webhook-bridge-release.yml @@ -0,0 +1,38 @@ +name: Webhook Bridge Release + +on: + release: + types: [published] + +jobs: + docker: + name: release docker + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@master + + - name: Setup QEMU + uses: docker/setup-qemu-action@v2 + + - name: Login Docker + uses: docker/login-action@v1 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Setup Docker Buildx + uses: docker/setup-buildx-action@v2 + + - name: Derive Version Numbers + run: | + export REPO="${{ secrets.DOCKERHUB_USERNAME }}/svix-webhook-bridge" + echo DOCKER_TAGS="$(echo "${{ github.event.release.tag_name }}" | sed -E "s#v([0-9]+)\.([0-9]+)\.([0-9]+)#${REPO}:latest,${REPO}:v\1.\2.\3,${REPO}:v\1.\2,${REPO}:v\1#")" >> "$GITHUB_ENV" + + - name: Build and push Docker image + uses: docker/build-push-action@v2 + with: + context: ./webhook-bridge + file: ./webhook-bridge/Dockerfile + push: true + tags: ${{ env.DOCKER_TAGS }} + platforms: linux/amd64 From de54faf41b2169869158e2d5b4e39dcbad06257f Mon Sep 17 00:00:00 2001 From: Owen Nelson Date: Mon, 8 May 2023 14:59:28 -0700 Subject: [PATCH 4/8] switch schedule for action to run on branch push --- .github/workflows/webhook-bridge-release.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/webhook-bridge-release.yml b/.github/workflows/webhook-bridge-release.yml index 9a35fa681..ff58577da 100644 --- a/.github/workflows/webhook-bridge-release.yml +++ b/.github/workflows/webhook-bridge-release.yml @@ -1,8 +1,9 @@ name: Webhook Bridge Release on: - release: - types: [published] + push: + branches: + - onelson/bridge jobs: docker: From ddb3656ad84b46181aee882a452cb8dcefdc2909 Mon Sep 17 00:00:00 2001 From: Owen Nelson Date: Mon, 8 May 2023 15:04:34 -0700 Subject: [PATCH 5/8] workaround not having a release to read the version from --- .github/workflows/webhook-bridge-release.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/webhook-bridge-release.yml b/.github/workflows/webhook-bridge-release.yml index ff58577da..8afddede9 100644 --- a/.github/workflows/webhook-bridge-release.yml +++ b/.github/workflows/webhook-bridge-release.yml @@ -25,9 +25,10 @@ jobs: uses: docker/setup-buildx-action@v2 - name: Derive Version Numbers + # FIXME: don't have a release name when running on a branch push - hardcode to 0.0.0 for now run: | export REPO="${{ secrets.DOCKERHUB_USERNAME }}/svix-webhook-bridge" - echo DOCKER_TAGS="$(echo "${{ github.event.release.tag_name }}" | sed -E "s#v([0-9]+)\.([0-9]+)\.([0-9]+)#${REPO}:latest,${REPO}:v\1.\2.\3,${REPO}:v\1.\2,${REPO}:v\1#")" >> "$GITHUB_ENV" + echo DOCKER_TAGS="$(echo "v0.0.0" | sed -E "s#v([0-9]+)\.([0-9]+)\.([0-9]+)#${REPO}:latest,${REPO}:v\1.\2.\3,${REPO}:v\1.\2,${REPO}:v\1#")" >> "$GITHUB_ENV" - name: Build and push Docker image uses: docker/build-push-action@v2 From ad7e151b741f6401e7ff54eb184fbff54612ddc0 Mon Sep 17 00:00:00 2001 From: Owen Nelson Date: Mon, 8 May 2023 16:00:49 -0700 Subject: [PATCH 6/8] add remarks about SVIX_WEBHOOK_BRIDGE_CFG in README --- webhook-bridge/README.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/webhook-bridge/README.md b/webhook-bridge/README.md index 53f3ea9a6..bac105fb6 100644 --- a/webhook-bridge/README.md +++ b/webhook-bridge/README.md @@ -10,6 +10,12 @@ svix-webhook-bridge -c path/to/svix-webhook-bridge.yaml ## Configuration +The CLI itself exposes only a single flag (`-c`, `--cfg`) used to set the path for the config file. +The location of the config file can also be set with the `SVIX_WEBHOOK_BRIDGE_CFG` env var. +The config file itself does the heavy lifting. + +When unset, the current working directory is checked for a file named `svix-webhook-bridge.yaml`. + > For an annotated sample configuration see [the example config](svix-webhook-bridge.example.yaml). `svix-webhook-bridge` is organized in terms of "plugins" which are tasks that run in tandem. From a8d2ca1045279ca0a0d732658ad63ae2baec96a3 Mon Sep 17 00:00:00 2001 From: svix-onelson <123012825+svix-onelson@users.noreply.github.com> Date: Fri, 12 May 2023 07:47:39 -0700 Subject: [PATCH 7/8] add js transforms to bridge (#917) Embeds a deno runtime in the bridge binary to allow ad hoc reshaping of messages read from queues before they are sent to Svix. ## Motivation When consuming JSON from messaging systems to generate new webhooks, it's common to need to transform the payload before making the Create Message request to Svix. ## Solution Embedding a JS runtime into the bridge process allows us to run user-defined functions on the payloads as they move from input to output. With this diff, plugin instances provided by `svix-webhook-bridge-plugin-queue-consumer` can now evaluate js included in the plugin config: ```yaml plugins: - type: "rabbitmqconsumer" input: uri: "amqp://guest:guest@localhost:5672/%2f" queue_name: "local" requeue_on_nack: false transformation: | function handler(input) { return { app_id: input.key, message: { eventType: input.event_type, payload: input.data } }; } output: token: "***************" ``` The `transformation` key can now be set on any of these consumers. The JS fragment should include a default export of a function that accepts an object and returns an object. These functions can be used to reshape the payload as necessary. --- webhook-bridge/Cargo.lock | 252 ++++++++++++++--- webhook-bridge/Cargo.toml | 6 + webhook-bridge/README.md | 31 +- webhook-bridge/generic-queue/Cargo.toml | 2 +- .../src/config.rs | 4 + .../src/error.rs | 8 + .../src/gcp_pubsub/mod.rs | 78 ++--- .../src/lib.rs | 266 ++++++++++++------ .../tests/gcp_pubsub_consumer.rs | 98 ++++++- .../tests/rabbitmq_consumer.rs | 101 ++++++- .../tests/redis_stream_consumer.rs | 96 ++++++- .../tests/sqs_consumer.rs | 103 ++++++- .../src/runtime.rs | 173 ------------ .../svix-webhook-bridge-types/Cargo.toml | 4 +- .../svix-webhook-bridge-types/src/lib.rs | 49 ++++ .../svix-webhook-bridge.example.yaml | 44 +++ webhook-bridge/svix-webhook-bridge/Cargo.toml | 40 +++ .../svix-webhook-bridge/src/main.rs | 43 ++- .../svix-webhook-bridge/src/runtime/mod.rs | 49 ++++ .../svix-webhook-bridge/src/runtime/tests.rs | 56 ++++ 20 files changed, 1130 insertions(+), 373 deletions(-) delete mode 100644 webhook-bridge/svix-webhook-bridge-plugin-webhook-receiver/src/runtime.rs create mode 100644 webhook-bridge/svix-webhook-bridge/src/runtime/mod.rs create mode 100644 webhook-bridge/svix-webhook-bridge/src/runtime/tests.rs diff --git a/webhook-bridge/Cargo.lock b/webhook-bridge/Cargo.lock index 1da8cdffe..958cd454d 100644 --- a/webhook-bridge/Cargo.lock +++ b/webhook-bridge/Cargo.lock @@ -639,7 +639,7 @@ dependencies = [ "aws-smithy-http", "aws-smithy-types", "http", - "rustc_version", + "rustc_version 0.4.0", "tracing", ] @@ -711,6 +711,12 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a17bd29f7c70f32e9387f4d4acfa5ea7b7749ef784fb78cf382df97069337b8c" +[[package]] +name = "base64" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b41b7ea54a0c9d92199de89e20e58d49f02f8e699814ef3fdf266f6f748d15c7" + [[package]] name = "base64" version = "0.13.1" @@ -798,9 +804,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.12.1" +version = "3.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b1ce199063694f33ffb7dd4e0ee620741495c32833cde5aa08f02a0bf96f0c8" +checksum = "3c6ed94e98ecff0c12dd1b04c15ec0d7d9458ca8fe806cea6f12954efe74c63b" [[package]] name = "byteorder" @@ -810,9 +816,9 @@ checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" [[package]] name = "bytes" -version = "1.4.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" +checksum = "ec8a7b6a70fde80372154c65702f00a0f56f3e1c36abbc6c440484be248856db" [[package]] name = "bytes-utils" @@ -951,6 +957,12 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + [[package]] name = "cookie-factory" version = "0.3.2" @@ -1102,6 +1114,56 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eaa37046cc0f6c3cc6090fbdbf73ef0b8ef4cfcc37f6befc0020f63e8cf121e1" +[[package]] +name = "deno_core" +version = "0.142.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65c902448001f76f4112341c226456d20ecffe4266051495c2181ad60144b38c" +dependencies = [ + "anyhow", + "deno_ops", + "futures", + "indexmap", + "libc", + "log", + "once_cell", + "parking_lot 0.12.1", + "pin-project", + "serde", + "serde_json", + "serde_v8", + "sourcemap", + "url", + "v8", +] + +[[package]] +name = "deno_ops" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a66c12cd4ed52c7a96b4ab4663d4b2a0098489986316bb2e36dcdaffe7ae6e3d" +dependencies = [ + "once_cell", + "proc-macro-crate", + "proc-macro2", + "quote", + "regex", + "syn 1.0.109", +] + +[[package]] +name = "derive_more" +version = "0.99.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +dependencies = [ + "convert_case", + "proc-macro2", + "quote", + "rustc_version 0.4.0", + "syn 1.0.109", +] + [[package]] name = "des" version = "0.8.1" @@ -1258,6 +1320,16 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "fslock" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57eafdd0c16f57161105ae1b98a1238f97645f2f588438b2949c99a2af9616bf" +dependencies = [ + "libc", + "winapi", +] + [[package]] name = "futures" version = "0.3.28" @@ -1761,6 +1833,12 @@ dependencies = [ "unicode-normalization", ] +[[package]] +name = "if_chain" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb56e1aa765b4b4f3aadfab769793b7087bb03a4ea4920644a6d238e2df5b9ed" + [[package]] name = "indexmap" version = "1.9.3" @@ -1842,9 +1920,9 @@ checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6" [[package]] name = "js-sys" -version = "0.3.61" +version = "0.3.62" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "445dde2150c55e483f3d8416706b97ec8e8237c307e5b7b4b8dd15e6af2a0730" +checksum = "68c16e1bfd491478ab155fd8b4896b86f9ede344949b641e61501e07c2b8b4d5" dependencies = [ "wasm-bindgen", ] @@ -2414,6 +2492,16 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "proc-macro-crate" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" +dependencies = [ + "once_cell", + "toml_edit", +] + [[package]] name = "proc-macro2" version = "1.0.56" @@ -2722,13 +2810,22 @@ dependencies = [ "winapi", ] +[[package]] +name = "rustc_version" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" +dependencies = [ + "semver 0.9.0", +] + [[package]] name = "rustc_version" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver", + "semver 1.0.17", ] [[package]] @@ -2878,26 +2975,41 @@ dependencies = [ "libc", ] +[[package]] +name = "semver" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" +dependencies = [ + "semver-parser", +] + [[package]] name = "semver" version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed" +[[package]] +name = "semver-parser" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" + [[package]] name = "serde" -version = "1.0.162" +version = "1.0.163" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71b2f6e1ab5c2b98c05f0f35b236b22e8df7ead6ffbf51d7808da7f8817e7ab6" +checksum = "2113ab51b87a539ae008b5c6c02dc020ffa39afd2d83cffcb3f4eb2722cebec2" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.162" +version = "1.0.163" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2a0814352fd64b58489904a44ea8d90cb1a91dcb6b4f5ebabc32c8318e93cb6" +checksum = "8c805777e3930c8883389c602315a24224bcc738b63905ef87cd1420353ea93e" dependencies = [ "proc-macro2", "quote", @@ -2910,6 +3022,7 @@ version = "1.0.96" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1" dependencies = [ + "indexmap", "itoa", "ryu", "serde", @@ -2947,6 +3060,19 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_v8" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b0c0792ac64702a8aba4f6520b190ea5651db42266136636ad2a6d04811686f" +dependencies = [ + "bytes", + "derive_more", + "serde", + "smallvec", + "v8", +] + [[package]] name = "serde_yaml" version = "0.9.21" @@ -3079,6 +3205,22 @@ dependencies = [ "winapi", ] +[[package]] +name = "sourcemap" +version = "6.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e031f2463ecbdd5f34c950f89f5c1e1032f22c0f8e3dc4bdb2e8b6658cf61eb" +dependencies = [ + "base64 0.11.0", + "if_chain", + "lazy_static", + "regex", + "rustc_version 0.2.3", + "serde", + "serde_json", + "url", +] + [[package]] name = "spin" version = "0.5.2" @@ -3140,17 +3282,22 @@ dependencies = [ name = "svix-webhook-bridge" version = "0.1.0" dependencies = [ + "anyhow", "clap", + "deno_core", "lazy_static", "opentelemetry", "opentelemetry-http", "opentelemetry-otlp", "serde", + "serde_json", "serde_yaml", + "smol", "svix-ksuid", "svix-webhook-bridge-plugin-queue-consumer", "svix-webhook-bridge-plugin-webhook-receiver", "svix-webhook-bridge-types", + "threadpool", "tokio", "tracing", "tracing-opentelemetry", @@ -3213,6 +3360,8 @@ name = "svix-webhook-bridge-types" version = "0.1.0" dependencies = [ "async-trait", + "serde_json", + "tokio", ] [[package]] @@ -3371,9 +3520,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.28.0" +version = "1.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3c786bf8134e5a3a166db9b29ab8f48134739014a3eca7bc6bfa95d673b136f" +checksum = "0aa32867d44e6f2ce3385e89dceb990188b8bb0fb25b0cf576647a6f98ac5105" dependencies = [ "autocfg", "bytes", @@ -3491,6 +3640,23 @@ dependencies = [ "tracing", ] +[[package]] +name = "toml_datetime" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ab8ed2edee10b50132aed5f331333428b011c99402b5a534154ed15746f9622" + +[[package]] +name = "toml_edit" +version = "0.19.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "239410c8609e8125456927e6707163a3b1fdb40561e4b803bc041f466ccfdc13" +dependencies = [ + "indexmap", + "toml_datetime", + "winnow", +] + [[package]] name = "tonic" version = "0.8.3" @@ -3617,9 +3783,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.30" +version = "0.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a" +checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a" dependencies = [ "once_cell", "valuable", @@ -3775,6 +3941,19 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" +[[package]] +name = "v8" +version = "0.44.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3f92c29dd66c7342443280695afc5bb79d773c3aa3eb02978cf24f058ae2b3d" +dependencies = [ + "bitflags", + "fslock", + "lazy_static", + "libc", + "which", +] + [[package]] name = "valuable" version = "0.1.0" @@ -3835,9 +4014,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.84" +version = "0.2.85" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b" +checksum = "5b6cb788c4e39112fbe1822277ef6fb3c55cd86b95cb3d3c4c1c9597e4ac74b4" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -3845,24 +4024,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.84" +version = "0.2.85" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9" +checksum = "35e522ed4105a9d626d885b35d62501b30d9666283a5c8be12c14a8bdafe7822" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.15", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.34" +version = "0.4.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f219e0d211ba40266969f6dbdd90636da12f75bee4fc9d6c23d1260dadb51454" +checksum = "083abe15c5d88556b77bdf7aef403625be9e327ad37c62c4e4129af740168163" dependencies = [ "cfg-if", "js-sys", @@ -3872,9 +4051,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.84" +version = "0.2.85" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5" +checksum = "358a79a0cb89d21db8120cbfb91392335913e4890665b1a7981d9e956903b434" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -3882,28 +4061,28 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.84" +version = "0.2.85" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6" +checksum = "4783ce29f09b9d93134d41297aded3a712b7b979e9c6f28c32cb88c973a94869" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.15", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.84" +version = "0.2.85" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d" +checksum = "a901d592cafaa4d711bc324edfaff879ac700b19c3dfd60058d2b445be2691eb" [[package]] name = "web-sys" -version = "0.3.61" +version = "0.3.62" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e33b99f4b23ba3eec1a53ac264e35a755f00e966e0065077d6027c0f575b0b97" +checksum = "16b5f940c7edfdc6d12126d98c9ef4d1b3d470011c47c76a6581df47ad9ba721" dependencies = [ "js-sys", "wasm-bindgen", @@ -4126,6 +4305,15 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" +[[package]] +name = "winnow" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61de7bac303dc551fe038e2b3cef0f571087a47571ea6e79a87692ac99b99699" +dependencies = [ + "memchr", +] + [[package]] name = "winreg" version = "0.10.1" diff --git a/webhook-bridge/Cargo.toml b/webhook-bridge/Cargo.toml index 1543ef6ea..139b98333 100644 --- a/webhook-bridge/Cargo.toml +++ b/webhook-bridge/Cargo.toml @@ -1,4 +1,10 @@ [workspace] +# Earlier versions of deno fail to compile in a workspace because of wgpu-hal +# The "fix" is to enable resolver 2 at the workspace. Crates with edition 2021 +# use this by default, but workspaces are set independently for some reason. +# +resolver = "2" + members = [ "generic-queue", "svix-webhook-bridge-types", diff --git a/webhook-bridge/README.md b/webhook-bridge/README.md index bac105fb6..3de8ef1f1 100644 --- a/webhook-bridge/README.md +++ b/webhook-bridge/README.md @@ -33,12 +33,39 @@ Currently this supports the following messaging systems: - Redis - SQS -Generally instances of this plugin are configured in terms of inputs and outputs, where the input configuration varies -by the messaging system. +Generally instances of this plugin are configured in terms of inputs, _optional transformations_, and outputs, where +the input configuration varies by the messaging system. The output options control how the Svix client is built and configured. The sole required field is `token`. +The optional _transformation_ can be set to a JavaScript fragment which can be used to reshape the messages as they flow through. + +```yaml + +plugins: +- type: ... + input: + # ... snip ... + + # Reshape the messages we get from the queue before they get sent to Svix + transformation: | + function handler(input) { + return { + app_id: input.key, + message: { + eventType: input.event_type, + payload: input.data + } + }; + } + + output: + # ... snip ... +``` + +Transformations should define a function called `handler` that accepts an object and returns an object. + Messages received by these consumers must follow an expected format: ``` diff --git a/webhook-bridge/generic-queue/Cargo.toml b/webhook-bridge/generic-queue/Cargo.toml index d34538595..ebb51b838 100644 --- a/webhook-bridge/generic-queue/Cargo.toml +++ b/webhook-bridge/generic-queue/Cargo.toml @@ -24,7 +24,7 @@ serde = { version = "1", features = ["derive", "rc"] } serde_json = "1" thiserror = "1" tokio = { version = "1", features = ["full"] } -tokio-util = { version = "0.7.8", optional = true } +tokio-util = { version = "0.7", optional = true } futures-util = { version = "0.3.28", optional = true } tracing = "0.1" diff --git a/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/src/config.rs b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/src/config.rs index a316319eb..4171a8f26 100644 --- a/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/src/config.rs +++ b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/src/config.rs @@ -6,24 +6,28 @@ use svix::api::SvixOptions as _SvixOptions; #[derive(Debug, Default, Deserialize)] pub struct RabbitMqConsumerConfig { pub input: RabbitMqInputOpts, + pub transformation: Option, pub output: OutputOpts, } #[derive(Debug, Default, Deserialize)] pub struct RedisConsumerConfig { pub input: RedisInputOpts, + pub transformation: Option, pub output: OutputOpts, } #[derive(Debug, Default, Deserialize)] pub struct SqsConsumerConfig { pub input: SqsInputOpts, + pub transformation: Option, pub output: OutputOpts, } #[derive(Debug, Default, Deserialize)] pub struct GCPPubSubConsumerConfig { pub input: GCPPubSubInputOpts, + pub transformation: Option, pub output: OutputOpts, } diff --git a/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/src/error.rs b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/src/error.rs index 3b7812a9d..d43b93484 100644 --- a/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/src/error.rs +++ b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/src/error.rs @@ -5,6 +5,7 @@ pub enum Error { Json(serde_json::Error), Queue(QueueError), Svix(svix::error::Error), + Generic(String), } impl From for Error { @@ -25,6 +26,12 @@ impl From for Error { } } +impl From for Error { + fn from(value: String) -> Self { + Self::Generic(value) + } +} + impl From for std::io::Error { fn from(value: Error) -> Self { match value { @@ -32,6 +39,7 @@ impl From for std::io::Error { Error::Json(e) => std::io::Error::new(std::io::ErrorKind::Other, e), Error::Queue(e) => std::io::Error::new(std::io::ErrorKind::Other, e), Error::Svix(e) => std::io::Error::new(std::io::ErrorKind::Other, e), + Error::Generic(e) => std::io::Error::new(std::io::ErrorKind::Other, e), } } } diff --git a/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/src/gcp_pubsub/mod.rs b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/src/gcp_pubsub/mod.rs index 4209504b3..ad2b1aa45 100644 --- a/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/src/gcp_pubsub/mod.rs +++ b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/src/gcp_pubsub/mod.rs @@ -1,27 +1,37 @@ use crate::config::{GCPPubSubConsumerConfig, GCPPubSubInputOpts}; use crate::error::Error; -use crate::PLUGIN_NAME; use crate::PLUGIN_VERS; use crate::{create_svix_message, CreateMessageRequest}; +use crate::{run_inner, Consumer, PLUGIN_NAME}; use generic_queue::gcp_pubsub::{ GCPPubSubConfig, GCPPubSubDelivery, GCPPubSubQueueBackend, GCPPubSubQueueConsumer, }; use generic_queue::{Delivery, TaskQueueBackend, TaskQueueReceive}; -use std::time::{Duration, Instant}; +use std::time::Duration; use svix::api::Svix; -use svix_webhook_bridge_types::{async_trait, Plugin}; +use svix_webhook_bridge_types::{async_trait, JsObject, Plugin, TransformerTx}; use tracing::instrument; pub struct GCPPubSubConsumerPlugin { input_options: GCPPubSubInputOpts, svix_client: Svix, + transformer_tx: Option, + transformation: Option, } impl GCPPubSubConsumerPlugin { - pub fn new(GCPPubSubConsumerConfig { input, output }: GCPPubSubConsumerConfig) -> Self { + pub fn new( + GCPPubSubConsumerConfig { + input, + transformation, + output, + }: GCPPubSubConsumerConfig, + ) -> Self { Self { input_options: input, svix_client: Svix::new(output.token, output.svix_options.map(Into::into)), + transformer_tx: None, + transformation, } } @@ -49,10 +59,10 @@ impl GCPPubSubConsumerPlugin { } /// Parses the delivery as JSON and feeds it into [`create_svix_message`]. - /// Will nack the delivery if either the JSON parse step, or the request to svix fails. + /// Will nack the delivery if either the JSON parse, transformation, or the request to svix fails. #[instrument(skip_all, fields(messaging.operation = "process"))] - async fn process(&self, delivery: GCPPubSubDelivery) -> std::io::Result<()> { - let payload = match Delivery::::payload(&delivery) { + async fn process(&self, delivery: GCPPubSubDelivery) -> std::io::Result<()> { + let payload = match Delivery::::payload(&delivery) { Ok(p) => p, Err(e) => { tracing::warn!("{e}"); @@ -61,6 +71,19 @@ impl GCPPubSubConsumerPlugin { } }; + let payload = if let Some(script) = &self.transformation { + match self.transform(script.clone(), payload).await { + Err(e) => { + tracing::error!("nack: {e}"); + delivery.nack().await.map_err(Error::from)?; + return Ok(()); + } + Ok(x) => x, + } + } else { + payload + }; + match create_svix_message(&self.svix_client, payload).await { Ok(_) => { tracing::trace!("ack"); @@ -73,7 +96,13 @@ impl GCPPubSubConsumerPlugin { } Ok(()) } +} +#[async_trait] +impl Consumer for GCPPubSubConsumerPlugin { + fn transformer_tx(&self) -> Option<&TransformerTx> { + self.transformer_tx.as_ref() + } async fn consume(&self) -> std::io::Result<()> { let mut consumer = >::consuming_half( @@ -86,7 +115,6 @@ impl GCPPubSubConsumerPlugin { ) .await .map_err(Error::from)?; - tracing::debug!( "gcp pubsub consuming: {}", &self.input_options.subscription_id @@ -100,7 +128,6 @@ impl GCPPubSubConsumerPlugin { impl TryInto> for GCPPubSubConsumerConfig { type Error = &'static str; - fn try_into(self) -> Result, Self::Error> { Ok(Box::new(GCPPubSubConsumerPlugin::new(self))) } @@ -108,35 +135,10 @@ impl TryInto> for GCPPubSubConsumerConfig { #[async_trait] impl Plugin for GCPPubSubConsumerPlugin { + fn set_transformer(&mut self, tx: Option) { + self.transformer_tx = tx; + } async fn run(&self) -> std::io::Result<()> { - let mut fails: u64 = 0; - let mut last_fail = Instant::now(); - - tracing::info!( - "gcp pubsub starting: {}", - &self.input_options.subscription_id - ); - - loop { - if let Err(e) = self.consume().await { - tracing::error!("{e}"); - } - - tracing::error!( - "gcp pubsub disconnected: {}", - &self.input_options.subscription_id - ); - - if last_fail.elapsed() > Duration::from_secs(10) { - // reset the fail count if we didn't have a hiccup in the past short while. - tracing::trace!("been a while since last fail, resetting count"); - fails = 0; - } else { - fails += 1; - } - - last_fail = Instant::now(); - tokio::time::sleep(Duration::from_millis((300 * fails).min(3000))).await; - } + run_inner(self, "gcp subsub", &self.input_options.subscription_id).await } } diff --git a/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/src/lib.rs b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/src/lib.rs index ae53a24fa..04dbe2dd1 100644 --- a/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/src/lib.rs +++ b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/src/lib.rs @@ -1,5 +1,3 @@ -use std::time::{Duration, Instant}; - use generic_queue::{ rabbitmq::{ BasicProperties, BasicPublishOptions, ConnectionProperties, RabbitMqBackend, RabbitMqConfig, @@ -9,8 +7,11 @@ use generic_queue::{ Delivery, TaskQueueBackend, TaskQueueReceive, }; use serde::{Deserialize, Serialize}; +use std::time::{Duration, Instant}; use svix::api::{MessageIn, PostOptions as PostOptions_, Svix}; -use svix_webhook_bridge_types::{async_trait, Plugin}; +use svix_webhook_bridge_types::{ + async_trait, JsObject, JsReturn, Plugin, TransformerJob, TransformerTx, +}; pub mod config; pub use config::{ @@ -25,19 +26,52 @@ pub use gcp_pubsub::GCPPubSubConsumerPlugin; pub const PLUGIN_NAME: &str = env!("CARGO_PKG_NAME"); pub const PLUGIN_VERS: &str = env!("CARGO_PKG_VERSION"); +#[async_trait] +trait Consumer { + fn transformer_tx(&self) -> Option<&TransformerTx>; + async fn transform(&self, script: String, payload: JsObject) -> std::io::Result { + let (job, rx) = TransformerJob::new(script.clone(), payload); + self.transformer_tx() + .expect("transformations not configured") + .send(job) + .map_err(|e| Error::Generic(e.to_string()))?; + + let ret = rx + .await + .map_err(|_e| Error::Generic("transformation rx failed".to_string())) + .and_then(|x| { + x.map_err(|_e| Error::Generic("transformation execution failed".to_string())) + })?; + + match ret { + JsReturn::Object(v) => Ok(v), + JsReturn::Invalid => { + Err(Error::Generic("transformation produced unexpected value".to_string()).into()) + } + } + } + async fn consume(&self) -> std::io::Result<()>; +} + pub struct RabbitMqConsumerPlugin { input_options: RabbitMqInputOpts, svix_client: Svix, + transformer_tx: Option, + transformation: Option, } pub struct RedisConsumerPlugin { input_options: RedisInputOpts, svix_client: Svix, + transformer_tx: Option, + transformation: Option, } pub struct SqsConsumerPlugin { input_options: SqsInputOpts, svix_client: Svix, + transformer_tx: Option, + transformation: Option, } impl TryInto> for RabbitMqConsumerConfig { @@ -65,30 +99,42 @@ impl TryInto> for SqsConsumerConfig { } impl RabbitMqConsumerPlugin { - pub fn new(RabbitMqConsumerConfig { input, output }: RabbitMqConsumerConfig) -> Self { + pub fn new( + RabbitMqConsumerConfig { + input, + transformation, + output, + }: RabbitMqConsumerConfig, + ) -> Self { Self { input_options: input, svix_client: Svix::new(output.token, output.svix_options.map(Into::into)), + transformer_tx: None, + transformation, } } +} +#[async_trait] +impl Consumer for RabbitMqConsumerPlugin { + fn transformer_tx(&self) -> Option<&TransformerTx> { + self.transformer_tx.as_ref() + } async fn consume(&self) -> std::io::Result<()> { let mut consumer = - >::consuming_half( - RabbitMqConfig { - uri: self.input_options.uri.clone(), - connection_properties: ConnectionProperties::default(), - publish_exchange: String::new(), - publish_routing_key: String::new(), - publish_options: BasicPublishOptions::default(), - publish_properites: BasicProperties::default(), - consume_queue: self.input_options.queue_name.clone(), - consumer_tag: self.input_options.consumer_tag.clone().unwrap_or_default(), - consume_options: self.input_options.consume_opts.unwrap_or_default(), - consume_arguments: self.input_options.consume_args.clone().unwrap_or_default(), - requeue_on_nack: self.input_options.requeue_on_nack, - }, - ) + >::consuming_half(RabbitMqConfig { + uri: self.input_options.uri.clone(), + connection_properties: ConnectionProperties::default(), + publish_exchange: String::new(), + publish_routing_key: String::new(), + publish_options: BasicPublishOptions::default(), + publish_properites: BasicProperties::default(), + consume_queue: self.input_options.queue_name.clone(), + consumer_tag: self.input_options.consumer_tag.clone().unwrap_or_default(), + consume_options: self.input_options.consume_opts.unwrap_or_default(), + consume_arguments: self.input_options.consume_args.clone().unwrap_or_default(), + requeue_on_nack: self.input_options.requeue_on_nack, + }) .await .map_err(Error::from)?; @@ -112,7 +158,7 @@ impl RabbitMqConsumerPlugin { let span = tracing::error_span!("process", messaging.operation = "process"); let _enter = span.enter(); - let payload = match Delivery::::payload(&delivery) { + let payload = match Delivery::::payload(&delivery) { Ok(p) => p, Err(e) => { tracing::warn!("nack: {e}"); @@ -121,12 +167,24 @@ impl RabbitMqConsumerPlugin { } }; + let payload = if let Some(script) = &self.transformation { + match self.transform(script.clone(), payload).await { + Err(e) => { + tracing::error!("nack: {e}"); + delivery.nack().await.map_err(Error::from)?; + continue; + } + Ok(x) => x, + } + } else { + payload + }; + match create_svix_message(&self.svix_client, payload).await { Ok(_) => { tracing::trace!("ack"); delivery.ack().await.map_err(Error::from)? } - Err(e) => { tracing::error!("nack: {e}"); delivery.nack().await.map_err(Error::from)? @@ -134,47 +192,42 @@ impl RabbitMqConsumerPlugin { } } } - Ok(()) } } #[async_trait] impl Plugin for RabbitMqConsumerPlugin { + fn set_transformer(&mut self, tx: Option) { + self.transformer_tx = tx; + } async fn run(&self) -> std::io::Result<()> { - let mut fails: u64 = 0; - let mut last_fail = Instant::now(); - - tracing::info!("rabbitmq starting: {}", &self.input_options.queue_name); - - loop { - if let Err(e) = self.consume().await { - tracing::error!("{e}"); - } - tracing::error!("rabbitmq disconnected: {}", &self.input_options.queue_name); - - if last_fail.elapsed() > Duration::from_secs(10) { - // reset the fail count if we didn't have a hiccup in the past short while. - tracing::trace!("been a while since last fail, resetting count"); - fails = 0; - } else { - fails += 1; - } - - last_fail = Instant::now(); - tokio::time::sleep(Duration::from_millis((300 * fails).min(3000))).await; - } + run_inner(self, "rabbitmq", &self.input_options.queue_name).await } } impl RedisConsumerPlugin { - pub fn new(RedisConsumerConfig { input, output }: RedisConsumerConfig) -> Self { + pub fn new( + RedisConsumerConfig { + input, + transformation, + output, + }: RedisConsumerConfig, + ) -> Self { Self { input_options: input, svix_client: Svix::new(output.token, output.svix_options.map(Into::into)), + transformer_tx: None, + transformation, } } +} +#[async_trait] +impl Consumer for RedisConsumerPlugin { + fn transformer_tx(&self) -> Option<&TransformerTx> { + self.transformer_tx.as_ref() + } async fn consume(&self) -> std::io::Result<()> { let mut consumer = >::consuming_half( @@ -209,7 +262,7 @@ impl RedisConsumerPlugin { let span = tracing::error_span!("process", messaging.operation = "process"); let _enter = span.enter(); - let payload = match Delivery::::payload(&delivery) { + let payload = match Delivery::::payload(&delivery) { Ok(p) => p, Err(e) => { tracing::warn!("nack: {e}"); @@ -218,6 +271,19 @@ impl RedisConsumerPlugin { } }; + let payload = if let Some(script) = &self.transformation { + match self.transform(script.clone(), payload).await { + Err(e) => { + tracing::error!("nack: {e}"); + delivery.nack().await.map_err(Error::from)?; + continue; + } + Ok(x) => x, + } + } else { + payload + }; + match create_svix_message(&self.svix_client, payload).await { Ok(_) => { tracing::trace!("ack"); @@ -230,47 +296,42 @@ impl RedisConsumerPlugin { } } } - Ok(()) } } #[async_trait] impl Plugin for RedisConsumerPlugin { + fn set_transformer(&mut self, tx: Option) { + self.transformer_tx = tx; + } async fn run(&self) -> std::io::Result<()> { - let mut fails: u64 = 0; - let mut last_fail = Instant::now(); - - tracing::info!("redis starting: {}", &self.input_options.queue_key); - - loop { - if let Err(e) = self.consume().await { - tracing::error!("{e}"); - } - - tracing::error!("redis disconnected: {}", &self.input_options.queue_key); - if last_fail.elapsed() > Duration::from_secs(10) { - // reset the fail count if we didn't have a hiccup in the past short while. - tracing::trace!("been a while since last fail, resetting count"); - fails = 0; - } else { - fails += 1; - } - - last_fail = Instant::now(); - tokio::time::sleep(Duration::from_millis((300 * fails).min(3000))).await; - } + run_inner(self, "redis", &self.input_options.queue_key).await } } impl SqsConsumerPlugin { - pub fn new(SqsConsumerConfig { input, output }: SqsConsumerConfig) -> Self { + pub fn new( + SqsConsumerConfig { + input, + transformation, + output, + }: SqsConsumerConfig, + ) -> Self { Self { input_options: input, svix_client: Svix::new(output.token, output.svix_options.map(Into::into)), + transformer_tx: None, + transformation, } } +} +#[async_trait] +impl Consumer for SqsConsumerPlugin { + fn transformer_tx(&self) -> Option<&TransformerTx> { + self.transformer_tx.as_ref() + } async fn consume(&self) -> std::io::Result<()> { let mut consumer = >::consuming_half( @@ -301,7 +362,7 @@ impl SqsConsumerPlugin { let span = tracing::error_span!("process", messaging.operation = "process"); let _enter = span.enter(); - let payload = match Delivery::::payload(&delivery) { + let payload = match Delivery::::payload(&delivery) { Ok(p) => p, Err(e) => { tracing::warn!("nack: {e}"); @@ -310,6 +371,19 @@ impl SqsConsumerPlugin { } }; + let payload = if let Some(script) = &self.transformation { + match self.transform(script.clone(), payload).await { + Err(e) => { + tracing::error!("nack: {e}"); + delivery.nack().await.map_err(Error::from)?; + continue; + } + Ok(x) => x, + } + } else { + payload + }; + match create_svix_message(&self.svix_client, payload).await { Ok(_) => { tracing::trace!("ack"); @@ -329,32 +403,44 @@ impl SqsConsumerPlugin { #[async_trait] impl Plugin for SqsConsumerPlugin { + fn set_transformer(&mut self, tx: Option) { + self.transformer_tx = tx; + } async fn run(&self) -> std::io::Result<()> { - let mut fails: u64 = 0; - let mut last_fail = Instant::now(); + run_inner(self, "sqs", &self.input_options.queue_dsn).await + } +} - tracing::info!("sqs starting: {}", &self.input_options.queue_dsn); +async fn run_inner( + consumer: &impl Consumer, + system_name: &str, + source: &str, +) -> std::io::Result<()> { + let mut fails: u64 = 0; + let mut last_fail = Instant::now(); - loop { - if let Err(e) = self.consume().await { - tracing::error!("{e}"); - } + tracing::info!("{system_name} starting: {source}"); - tracing::error!("sqs disconnected: {}", &self.input_options.queue_dsn); + loop { + if let Err(e) = consumer.consume().await { + tracing::error!("{e}"); + } - if last_fail.elapsed() > Duration::from_secs(10) { - // reset the fail count if we didn't have a hiccup in the past short while. - tracing::trace!("been a while since last fail, resetting count"); - fails = 0; - } else { - fails += 1; - } + tracing::error!("{system_name} disconnected: {source}"); - last_fail = Instant::now(); - tokio::time::sleep(Duration::from_millis((300 * fails).min(3000))).await; + if last_fail.elapsed() > Duration::from_secs(10) { + // reset the fail count if we didn't have a hiccup in the past short while. + tracing::trace!("been a while since last fail, resetting count"); + fails = 0; + } else { + fails += 1; } + + last_fail = Instant::now(); + tokio::time::sleep(Duration::from_millis((300 * fails).min(3000))).await; } } + #[derive(Clone, Default, Deserialize, Serialize)] pub struct PostOptions { idempotency_key: Option, @@ -376,12 +462,12 @@ pub struct CreateMessageRequest { pub post_options: Option, } -async fn create_svix_message(svix: &Svix, value: serde_json::Value) -> std::io::Result<()> { +async fn create_svix_message(svix: &Svix, value: JsObject) -> std::io::Result<()> { let CreateMessageRequest { app_id, message, post_options, - }: CreateMessageRequest = serde_json::from_value(value)?; + }: CreateMessageRequest = serde_json::from_value(value.into())?; let span = tracing::error_span!( "create_svix_message", app_id = app_id, diff --git a/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/gcp_pubsub_consumer.rs b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/gcp_pubsub_consumer.rs index 2147dcd4c..ffeb40ecd 100644 --- a/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/gcp_pubsub_consumer.rs +++ b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/gcp_pubsub_consumer.rs @@ -16,18 +16,29 @@ use svix_webhook_bridge_plugin_queue_consumer::{ config::{OutputOpts, SvixOptions}, CreateMessageRequest, GCPPubSubConsumerConfig, GCPPubSubConsumerPlugin, }; -use svix_webhook_bridge_types::Plugin; -use wiremock::matchers::method; +use svix_webhook_bridge_types::{JsReturn, Plugin, TransformerJob}; +use wiremock::matchers::{body_partial_json, method}; use wiremock::{Mock, MockServer, ResponseTemplate}; const DEFAULT_PUBSUB_EMULATOR_HOST: &str = "localhost:8085"; -fn get_test_plugin(svix_url: String, subscription_id: String) -> GCPPubSubConsumerPlugin { +fn get_test_plugin( + svix_url: String, + subscription_id: String, + use_transformation: bool, +) -> GCPPubSubConsumerPlugin { GCPPubSubConsumerPlugin::new(GCPPubSubConsumerConfig { input: GCPPubSubInputOpts { subscription_id, credentials_file: None, }, + transformation: if use_transformation { + // The actual script doesn't matter since the test case will be performing the + // transformation, not the actual JS executor. + Some(String::from("export default function (x) { return x; }")) + } else { + None + }, output: OutputOpts { token: "xxxx".to_string(), svix_options: Some(SvixOptions { @@ -130,7 +141,78 @@ async fn test_consume_ok() { .expect(1); mock_server.register(mock).await; - let plugin = get_test_plugin(mock_server.uri(), subscription.id()); + let plugin = get_test_plugin(mock_server.uri(), subscription.id(), false); + + let handle = tokio::spawn(async move { + let fut = plugin.run(); + fut.await + }); + // Wait for the consumer to connect + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + let msg = CreateMessageRequest { + app_id: "app_1234".into(), + message: MessageIn::new("testing.things".into(), json!({"hi": "there"})), + post_options: None, + }; + + publish(&topic, &serde_json::to_string(&msg).unwrap()).await; + + // Wait for the consumer to consume. + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + handle.abort(); + + subscription.delete(None).await.ok(); + topic.delete(None).await.ok(); +} + +/// Push a msg on the queue. +/// Check to see if the svix server sees a request, but this time transform the payload. +#[tokio::test] +async fn test_consume_transformed_ok() { + let client = mq_connection().await; + let (topic, subscription) = create_test_queue(&client).await; + + let mock_server = MockServer::start().await; + // The mock will make asserts on drop (i.e. when the body of the test is returning). + // The `expect` call should ensure we see exactly 1 POST request. + // + let mock = Mock::given(method("POST")) + // The transformed bit of the payload + .and(body_partial_json(json!({ "payload": { "good": "bye" } }))) + .respond_with(ResponseTemplate::new(202).set_body_json(json!({ + "eventType": "testing.things", + "payload": { + "_SVIX_APP_ID": "app_1234", + "_SVIX_EVENT_TYPE": "testing.things", + // The adjustment made via the transformation... + "good": "bye", + }, + "id": "msg_xxxx", + "timestamp": "2023-04-25T00:00:00Z" + }))) + .named("create_message") + .expect(1); + mock_server.register(mock).await; + + let mut plugin = get_test_plugin(mock_server.uri(), subscription.id(), true); + let (transformer_tx, mut transformer_rx) = + tokio::sync::mpsc::unbounded_channel::(); + let _handle = tokio::spawn(async move { + while let Some(x) = transformer_rx.recv().await { + let mut out = x.payload; + // Prune out the "hi" key. + out["message"]["payload"] + .as_object_mut() + .unwrap() + .remove("hi"); + // Add the "good" key. + out["message"]["payload"]["good"] = json!("bye"); + x.callback_tx.send(Ok(JsReturn::Object(out))).ok(); + } + }); + plugin.set_transformer(Some(transformer_tx)); let handle = tokio::spawn(async move { let fut = plugin.run(); @@ -170,7 +252,7 @@ async fn test_missing_app_id_nack() { .expect(0); mock_server.register(mock).await; - let plugin = get_test_plugin(mock_server.uri(), subscription.id()); + let plugin = get_test_plugin(mock_server.uri(), subscription.id(), false); let handle = tokio::spawn(async move { let fut = plugin.run(); @@ -218,7 +300,7 @@ async fn test_missing_event_type_nack() { .expect(0); mock_server.register(mock).await; - let plugin = get_test_plugin(mock_server.uri(), subscription.id()); + let plugin = get_test_plugin(mock_server.uri(), subscription.id(), false); let handle = tokio::spawn(async move { let fut = plugin.run(); @@ -271,7 +353,7 @@ async fn test_consume_svix_503() { .expect(1..); mock_server.register(mock).await; - let plugin = get_test_plugin(mock_server.uri(), subscription.id()); + let plugin = get_test_plugin(mock_server.uri(), subscription.id(), false); let handle = tokio::spawn(async move { let fut = plugin.run(); @@ -309,7 +391,7 @@ async fn test_consume_svix_offline() { let mock_server = MockServer::start().await; - let plugin = get_test_plugin(mock_server.uri(), subscription.id()); + let plugin = get_test_plugin(mock_server.uri(), subscription.id(), false); // bye-bye svix... drop(mock_server); diff --git a/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/rabbitmq_consumer.rs b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/rabbitmq_consumer.rs index 54952f196..bf2cdb42a 100644 --- a/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/rabbitmq_consumer.rs +++ b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/rabbitmq_consumer.rs @@ -11,17 +11,29 @@ use svix_webhook_bridge_plugin_queue_consumer::{ config::{OutputOpts, RabbitMqInputOpts, SvixOptions}, CreateMessageRequest, RabbitMqConsumerConfig, RabbitMqConsumerPlugin, }; -use svix_webhook_bridge_types::Plugin; -use wiremock::matchers::method; +use svix_webhook_bridge_types::{JsReturn, Plugin, TransformerJob}; +use wiremock::matchers::{body_partial_json, method}; use wiremock::{Mock, MockServer, ResponseTemplate}; -fn get_test_plugin(svix_url: String, mq_uri: &str, queue_name: &str) -> RabbitMqConsumerPlugin { +fn get_test_plugin( + svix_url: String, + mq_uri: &str, + queue_name: &str, + use_transformation: bool, +) -> RabbitMqConsumerPlugin { RabbitMqConsumerPlugin::new(RabbitMqConsumerConfig { input: RabbitMqInputOpts { uri: mq_uri.to_string(), queue_name: queue_name.to_string(), ..Default::default() }, + transformation: if use_transformation { + // The actual script doesn't matter since the test case will be performing the + // transformation, not the actual JS executor. + Some(String::from("export default function (x) { return x; }")) + } else { + None + }, output: OutputOpts { token: "xxxx".to_string(), svix_options: Some(SvixOptions { @@ -102,7 +114,80 @@ async fn test_consume_ok() { .expect(1); mock_server.register(mock).await; - let plugin = get_test_plugin(mock_server.uri(), MQ_URI, queue_name); + let plugin = get_test_plugin(mock_server.uri(), MQ_URI, queue_name, false); + + let handle = tokio::spawn(async move { + let fut = plugin.run(); + fut.await + }); + // Wait for the consumer to connect + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + let msg = CreateMessageRequest { + app_id: "app_1234".into(), + message: MessageIn::new("testing.things".into(), json!({"hi": "there"})), + post_options: None, + }; + + publish(&channel, queue_name, &serde_json::to_vec(&msg).unwrap()).await; + + // Wait for the consumer to consume. + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + handle.abort(); + channel + .queue_delete(queue_name, Default::default()) + .await + .ok(); +} +/// Push a msg on the queue. +/// Check to see if the svix server sees a request, but this time transform the payload. +#[tokio::test] +async fn test_consume_transformed_ok() { + let mq_conn = mq_connection(MQ_URI).await; + let channel = mq_conn.create_channel().await.unwrap(); + // setup the queue before running the consumer or the consumer will error out + let queue = declare_queue("", &channel).await; + let queue_name = queue.name().as_str(); + + let mock_server = MockServer::start().await; + // The mock will make asserts on drop (i.e. when the body of the test is returning). + // The `expect` call should ensure we see exactly 1 POST request. + // + let mock = Mock::given(method("POST")) + .and(body_partial_json(json!({ "payload": { "good": "bye" } }))) + .respond_with(ResponseTemplate::new(202).set_body_json(json!({ + "eventType": "testing.things", + "payload": { + "_SVIX_APP_ID": "app_1234", + "_SVIX_EVENT_TYPE": "testing.things", + // The adjustment made via the transformation... + "good": "bye", + }, + "id": "msg_xxxx", + "timestamp": "2023-04-25T00:00:00Z" + }))) + .named("create_message") + .expect(1); + mock_server.register(mock).await; + + let mut plugin = get_test_plugin(mock_server.uri(), MQ_URI, queue_name, true); + let (transformer_tx, mut transformer_rx) = + tokio::sync::mpsc::unbounded_channel::(); + let _handle = tokio::spawn(async move { + while let Some(x) = transformer_rx.recv().await { + let mut out = x.payload; + // Prune out the "hi" key. + out["message"]["payload"] + .as_object_mut() + .unwrap() + .remove("hi"); + // Add the "good" key. + out["message"]["payload"]["good"] = json!("bye"); + x.callback_tx.send(Ok(JsReturn::Object(out))).ok(); + } + }); + plugin.set_transformer(Some(transformer_tx)); let handle = tokio::spawn(async move { let fut = plugin.run(); @@ -146,7 +231,7 @@ async fn test_missing_app_id_nack() { .expect(0); mock_server.register(mock).await; - let plugin = get_test_plugin(mock_server.uri(), MQ_URI, queue_name); + let plugin = get_test_plugin(mock_server.uri(), MQ_URI, queue_name, false); let handle = tokio::spawn(async move { let fut = plugin.run(); @@ -199,7 +284,7 @@ async fn test_missing_event_type_nack() { .expect(0); mock_server.register(mock).await; - let plugin = get_test_plugin(mock_server.uri(), MQ_URI, queue_name); + let plugin = get_test_plugin(mock_server.uri(), MQ_URI, queue_name, false); let handle = tokio::spawn(async move { let fut = plugin.run(); @@ -253,7 +338,7 @@ async fn test_consume_svix_503() { .expect(1); mock_server.register(mock).await; - let plugin = get_test_plugin(mock_server.uri(), MQ_URI, queue_name); + let plugin = get_test_plugin(mock_server.uri(), MQ_URI, queue_name, false); let handle = tokio::spawn(async move { let fut = plugin.run(); @@ -296,7 +381,7 @@ async fn test_consume_svix_offline() { let mock_server = MockServer::start().await; - let plugin = get_test_plugin(mock_server.uri(), MQ_URI, queue_name); + let plugin = get_test_plugin(mock_server.uri(), MQ_URI, queue_name, false); // bye-bye svix... drop(mock_server); diff --git a/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/redis_stream_consumer.rs b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/redis_stream_consumer.rs index 1e2e492fd..16c1e966d 100644 --- a/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/redis_stream_consumer.rs +++ b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/redis_stream_consumer.rs @@ -10,11 +10,15 @@ use svix_webhook_bridge_plugin_queue_consumer::{ config::{OutputOpts, SvixOptions}, CreateMessageRequest, RedisConsumerConfig, RedisConsumerPlugin, RedisInputOpts, }; -use svix_webhook_bridge_types::Plugin; -use wiremock::matchers::method; +use svix_webhook_bridge_types::{JsReturn, Plugin, TransformerJob}; +use wiremock::matchers::{body_partial_json, method}; use wiremock::{Mock, MockServer, ResponseTemplate}; -fn get_test_plugin(svix_url: String, queue_key: String) -> RedisConsumerPlugin { +fn get_test_plugin( + svix_url: String, + queue_key: String, + use_transformation: bool, +) -> RedisConsumerPlugin { RedisConsumerPlugin::new(RedisConsumerConfig { input: RedisInputOpts { dsn: "redis://localhost/".to_owned(), @@ -24,6 +28,13 @@ fn get_test_plugin(svix_url: String, queue_key: String) -> RedisConsumerPlugin { consumer_group: "test_cg".to_owned(), consumer_name: "test_cn".to_owned(), }, + transformation: if use_transformation { + // The actual script doesn't matter since the test case will be performing the + // transformation, not the actual JS executor. + Some(String::from("export default function (x) { return x; }")) + } else { + None + }, output: OutputOpts { token: "xxxx".to_string(), svix_options: Some(SvixOptions { @@ -93,7 +104,76 @@ async fn test_consume_ok() { .expect(1); mock_server.register(mock).await; - let plugin = get_test_plugin(mock_server.uri(), key.clone()); + let plugin = get_test_plugin(mock_server.uri(), key.clone(), false); + + let handle = tokio::spawn(async move { + let fut = plugin.run(); + fut.await + }); + // Wait for the consumer to connect + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + let msg = CreateMessageRequest { + app_id: "app_1234".into(), + message: MessageIn::new("testing.things".into(), json!({"hi": "there"})), + post_options: None, + }; + + publish(&client, &key, &serde_json::to_string(&msg).unwrap()).await; + + // Wait for the consumer to consume. + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + handle.abort(); + + delete_test_stream(&client, &key).await; +} + +/// Push a msg on the queue. +/// Check to see if the svix server sees a request, but this time transform the payload. +#[tokio::test] +async fn test_consume_transformed_ok() { + let client = redis_connection().await; + let key = create_test_stream(&client).await; + + let mock_server = MockServer::start().await; + // The mock will make asserts on drop (i.e. when the body of the test is returning). + // The `expect` call should ensure we see exactly 1 POST request. + // + let mock = Mock::given(method("POST")) + .and(body_partial_json(json!({ "payload": { "good": "bye" } }))) + .respond_with(ResponseTemplate::new(202).set_body_json(json!({ + "eventType": "testing.things", + "payload": { + "_SVIX_APP_ID": "app_1234", + "_SVIX_EVENT_TYPE": "testing.things", + // The adjustment made via the transformation... + "good": "bye", + }, + "id": "msg_xxxx", + "timestamp": "2023-04-25T00:00:00Z" + }))) + .named("create_message") + .expect(1); + mock_server.register(mock).await; + + let mut plugin = get_test_plugin(mock_server.uri(), key.clone(), true); + let (transformer_tx, mut transformer_rx) = + tokio::sync::mpsc::unbounded_channel::(); + let _handle = tokio::spawn(async move { + while let Some(x) = transformer_rx.recv().await { + let mut out = x.payload; + // Prune out the "hi" key. + out["message"]["payload"] + .as_object_mut() + .unwrap() + .remove("hi"); + // Add the "good" key. + out["message"]["payload"]["good"] = json!("bye"); + x.callback_tx.send(Ok(JsReturn::Object(out))).ok(); + } + }); + plugin.set_transformer(Some(transformer_tx)); let handle = tokio::spawn(async move { let fut = plugin.run(); @@ -132,7 +212,7 @@ async fn test_missing_app_id_nack() { .expect(0); mock_server.register(mock).await; - let plugin = get_test_plugin(mock_server.uri(), key.clone()); + let plugin = get_test_plugin(mock_server.uri(), key.clone(), false); let handle = tokio::spawn(async move { let fut = plugin.run(); @@ -180,7 +260,7 @@ async fn test_missing_event_type_nack() { .expect(0); mock_server.register(mock).await; - let plugin = get_test_plugin(mock_server.uri(), key.clone()); + let plugin = get_test_plugin(mock_server.uri(), key.clone(), false); let handle = tokio::spawn(async move { let fut = plugin.run(); @@ -229,7 +309,7 @@ async fn test_consume_svix_503() { .expect(1); mock_server.register(mock).await; - let plugin = get_test_plugin(mock_server.uri(), key.clone()); + let plugin = get_test_plugin(mock_server.uri(), key.clone(), false); let handle = tokio::spawn(async move { let fut = plugin.run(); @@ -267,7 +347,7 @@ async fn test_consume_svix_offline() { let mock_server = MockServer::start().await; - let plugin = get_test_plugin(mock_server.uri(), key.clone()); + let plugin = get_test_plugin(mock_server.uri(), key.clone(), false); // bye-bye svix... drop(mock_server); diff --git a/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/sqs_consumer.rs b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/sqs_consumer.rs index ed654bc4b..e79036600 100644 --- a/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/sqs_consumer.rs +++ b/webhook-bridge/svix-webhook-bridge-plugin-queue-consumer/tests/sqs_consumer.rs @@ -12,18 +12,29 @@ use svix_webhook_bridge_plugin_queue_consumer::{ config::{OutputOpts, SvixOptions}, CreateMessageRequest, SqsConsumerConfig, SqsConsumerPlugin, SqsInputOpts, }; -use svix_webhook_bridge_types::Plugin; -use wiremock::matchers::method; +use svix_webhook_bridge_types::{JsReturn, Plugin, TransformerJob}; +use wiremock::matchers::{body_partial_json, method}; use wiremock::{Mock, MockServer, ResponseTemplate}; const ROOT_URL: &str = "http://localhost:9324"; -fn get_test_plugin(svix_url: String, queue_dsn: String) -> SqsConsumerPlugin { +fn get_test_plugin( + svix_url: String, + queue_dsn: String, + use_transformation: bool, +) -> SqsConsumerPlugin { SqsConsumerPlugin::new(SqsConsumerConfig { input: SqsInputOpts { queue_dsn, override_endpoint: true, }, + transformation: if use_transformation { + // The actual script doesn't matter since the test case will be performing the + // transformation, not the actual JS executor. + Some(String::from("export default function (x) { return x; }")) + } else { + None + }, output: OutputOpts { token: "xxxx".to_string(), svix_options: Some(SvixOptions { @@ -94,7 +105,83 @@ async fn test_consume_ok() { .expect(1); mock_server.register(mock).await; - let plugin = get_test_plugin(mock_server.uri(), queue_url.clone()); + let plugin = get_test_plugin(mock_server.uri(), queue_url.clone(), false); + + let handle = tokio::spawn(async move { + let fut = plugin.run(); + fut.await + }); + // Wait for the consumer to connect + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + let msg = CreateMessageRequest { + app_id: "app_1234".into(), + message: MessageIn::new("testing.things".into(), json!({"hi": "there"})), + post_options: None, + }; + + publish(&client, &queue_url, &serde_json::to_string(&msg).unwrap()).await; + + // Wait for the consumer to consume. + tokio::time::sleep(Duration::from_millis(WAIT_MS)).await; + + handle.abort(); + + client + .delete_queue() + .queue_url(&queue_url) + .send() + .await + .unwrap(); +} + +/// Push a msg on the queue. +/// Check to see if the svix server sees a request, but this time transform the payload. +#[tokio::test] +async fn test_consume_transformed_ok() { + let client = mq_connection().await; + let queue_name = create_test_queue(&client).await; + + let queue_url = format!("{ROOT_URL}/queue/{queue_name}"); + + let mock_server = MockServer::start().await; + // The mock will make asserts on drop (i.e. when the body of the test is returning). + // The `expect` call should ensure we see exactly 1 POST request. + // + let mock = Mock::given(method("POST")) + .and(body_partial_json(json!({ "payload": { "good": "bye" } }))) + .respond_with(ResponseTemplate::new(202).set_body_json(json!({ + "eventType": "testing.things", + "payload": { + "_SVIX_APP_ID": "app_1234", + "_SVIX_EVENT_TYPE": "testing.things", + // The adjustment made via the transformation... + "good": "bye", + }, + "id": "msg_xxxx", + "timestamp": "2023-04-25T00:00:00Z" + }))) + .named("create_message") + .expect(1); + mock_server.register(mock).await; + + let mut plugin = get_test_plugin(mock_server.uri(), queue_url.clone(), true); + let (transformer_tx, mut transformer_rx) = + tokio::sync::mpsc::unbounded_channel::(); + let _handle = tokio::spawn(async move { + while let Some(x) = transformer_rx.recv().await { + let mut out = x.payload; + // Prune out the "hi" key. + out["message"]["payload"] + .as_object_mut() + .unwrap() + .remove("hi"); + // Add the "good" key. + out["message"]["payload"]["good"] = json!("bye"); + x.callback_tx.send(Ok(JsReturn::Object(out))).ok(); + } + }); + plugin.set_transformer(Some(transformer_tx)); let handle = tokio::spawn(async move { let fut = plugin.run(); @@ -140,7 +227,7 @@ async fn test_missing_app_id_nack() { .expect(0); mock_server.register(mock).await; - let plugin = get_test_plugin(mock_server.uri(), queue_url.clone()); + let plugin = get_test_plugin(mock_server.uri(), queue_url.clone(), false); let handle = tokio::spawn(async move { let fut = plugin.run(); @@ -195,7 +282,7 @@ async fn test_missing_event_type_nack() { .expect(0); mock_server.register(mock).await; - let plugin = get_test_plugin(mock_server.uri(), queue_url.clone()); + let plugin = get_test_plugin(mock_server.uri(), queue_url.clone(), false); let handle = tokio::spawn(async move { let fut = plugin.run(); @@ -251,7 +338,7 @@ async fn test_consume_svix_503() { .expect(1); mock_server.register(mock).await; - let plugin = get_test_plugin(mock_server.uri(), queue_url.clone()); + let plugin = get_test_plugin(mock_server.uri(), queue_url.clone(), false); let handle = tokio::spawn(async move { let fut = plugin.run(); @@ -296,7 +383,7 @@ async fn test_consume_svix_offline() { let mock_server = MockServer::start().await; - let plugin = get_test_plugin(mock_server.uri(), queue_url.clone()); + let plugin = get_test_plugin(mock_server.uri(), queue_url.clone(), false); // bye-bye svix... drop(mock_server); diff --git a/webhook-bridge/svix-webhook-bridge-plugin-webhook-receiver/src/runtime.rs b/webhook-bridge/svix-webhook-bridge-plugin-webhook-receiver/src/runtime.rs deleted file mode 100644 index 8e858316c..000000000 --- a/webhook-bridge/svix-webhook-bridge-plugin-webhook-receiver/src/runtime.rs +++ /dev/null @@ -1,173 +0,0 @@ -//! N.b. this module is not currently attached to the project. -//! Retained as-is from the original webhook-ingester for now since it seems adaptable for the -//! upcoming "transformations" work. -use std::{pin::Pin, rc::Rc, sync::Arc}; - -use anyhow::Result; -use deno_core::{ - futures::FutureExt, - resolve_import, serde_v8, - v8::{self, Global, Value}, - FsModuleLoader, ModuleLoader, ModuleSource, ModuleSourceFuture, ModuleSpecifier, ModuleType, -}; -use deno_runtime::{ - permissions::Permissions, - worker::{MainWorker, WorkerOptions}, -}; -use threadpool::ThreadPool; -use tokio::sync::{oneshot, Mutex}; - -use crate::types::{SerializableRequest, Unvalidated}; - -/// This [`ModuleLoader`] implementation loads the configured script when loading the "file" -/// `virt:///user/script`, but otherwise loads things from disk -pub struct ConfiguredModuleLoader(pub String); - -impl ModuleLoader for ConfiguredModuleLoader { - fn resolve(&self, specifier: &str, referrer: &str, _is_main: bool) -> Result { - Ok(resolve_import(specifier, referrer)?) - } - - fn load( - &self, - module_specifier: &ModuleSpecifier, - _maybe_referrer: Option, - _is_dynamic: bool, - ) -> Pin> { - let module_specifier = module_specifier.clone(); - let code = self.0.clone(); - - if Ok(module_specifier.clone()) == resolve_import("virt:///user/script", "") { - async move { - Ok(ModuleSource { - code: Box::from(code.as_bytes()), - module_type: ModuleType::JavaScript, - module_url_specified: module_specifier.to_string(), - module_url_found: module_specifier.to_string(), - }) - } - .boxed_local() - } else { - FsModuleLoader.load(&module_specifier, _maybe_referrer, _is_dynamic) - } - } -} - -/// In the context of this service, the only valid return value of the exported function run by the -/// Deno runtime is a `bool` value. Any other values are invalid and should become an error in the -/// [`VerificationMethod`] implementation. -pub enum JsReturn { - Bool(bool), - Invalid, -} - -// NOTE: The worker is in a struct to try and get around requirements for Send with `async`, but I -// think it can be eliminated with a little effort. -struct Worker { - pub worker: MainWorker, -} - -impl Worker { - pub async fn load_script(&mut self, script: String) -> Result<()> { - let loader = ConfiguredModuleLoader(script.clone()); - let worker_main_module = loader.resolve("virt:///user/script", "", true)?; - - let module_id = self - .worker - .js_runtime - .load_main_module(&worker_main_module, Some(script)) - .await?; - - let eval = self.worker.js_runtime.mod_evaluate(module_id); - self.worker.js_runtime.run_event_loop(true).await?; - eval.await??; - - Ok(()) - } - - pub fn run_script(&mut self, req: SerializableRequest) -> Result> { - // This defines the global `input` variable - self.worker - .execute_script("bootstrap", &define_global(&req)?)?; - - // And this calls the `handler` function in the main module - let out = self.worker.execute_script( - "run", - "import('virt:///user/script').then(module => module.default(input));", - )?; - - Ok(out) - } - - pub async fn resolve_value(&mut self, res: Global) -> Result { - let awaited = self.worker.js_runtime.resolve_value(res).await?; - - let scope = &mut self.worker.js_runtime.handle_scope(); - let local = v8::Local::new(scope, awaited); - - match serde_v8::from_v8::(scope, local) { - Ok(b) => Ok(JsReturn::Bool(b)), - Err(e) if e == deno_core::serde_v8::Error::ExpectedBoolean => Ok(JsReturn::Invalid), - Err(e) => Err(e)?, - } - } -} - -#[derive(Clone, Debug)] -pub struct TpHandle(pub Arc>); - -impl TpHandle { - pub fn new() -> Self { - Self(Arc::new(Mutex::new(ThreadPool::default()))) - } - - pub async fn run_script( - &self, - req: SerializableRequest, - script: String, - ) -> Result { - let (tx, rx) = oneshot::channel(); - - self.0.lock().await.execute(move || { - let _ = tx.send(run_script_inner(req, script)); - }); - - rx.await? - } -} - -fn run_script_inner(req: SerializableRequest, script: String) -> Result { - let mut worker = { - let worker_main_module = deno_core::resolve_path("file://dne").unwrap(); - - let worker_options = WorkerOptions { - module_loader: Rc::new(ConfiguredModuleLoader(script.clone())), - ..Default::default() - }; - - let worker_permissions = Permissions::allow_all(); - - Worker { - worker: MainWorker::bootstrap_from_options( - worker_main_module, - worker_permissions, - worker_options, - ), - } - }; - - smol::block_on(async { worker.load_script(script).await })?; - let res = worker.run_script(req)?; - smol::block_on(async { worker.resolve_value(res).await }) -} - -fn define_global(req: &SerializableRequest) -> Result { - Ok(format!( - "Object.defineProperty(\ - globalThis,\ - 'input',\ - {{ value: {}, writable: true, enumerable: true, configurable: true }}\ - );", - serde_json::to_string(req)?, - )) -} diff --git a/webhook-bridge/svix-webhook-bridge-types/Cargo.toml b/webhook-bridge/svix-webhook-bridge-types/Cargo.toml index 25dfc4c36..fa1771937 100644 --- a/webhook-bridge/svix-webhook-bridge-types/Cargo.toml +++ b/webhook-bridge/svix-webhook-bridge-types/Cargo.toml @@ -6,4 +6,6 @@ edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -async-trait = "0.1" \ No newline at end of file +async-trait = "0.1" +tokio = { version = "1", features = ["full"] } +serde_json = "1" \ No newline at end of file diff --git a/webhook-bridge/svix-webhook-bridge-types/src/lib.rs b/webhook-bridge/svix-webhook-bridge-types/src/lib.rs index c02a90e61..0b236fc0c 100644 --- a/webhook-bridge/svix-webhook-bridge-types/src/lib.rs +++ b/webhook-bridge/svix-webhook-bridge-types/src/lib.rs @@ -1,9 +1,58 @@ pub use async_trait::async_trait; +use tokio::sync::mpsc; +use tokio::sync::oneshot; + +/// Plain old JSON objects are what the transformations expect to receive and produce. +pub type JsObject = serde_json::Map; +/// A channel for plugins to send payloads/scripts to for execution. +pub type TransformerTx = mpsc::UnboundedSender; +/// The receiver side for transformations. The JS executor reads from this. +pub type TransformerRx = mpsc::UnboundedReceiver; +/// A oneshot channel for the JS executor to "publish" return values to once complete. +// FIXME: better error type? +pub type TransformerCallbackTx = oneshot::Sender>; +/// Used by the caller of the transformer to await the execution's output. +// FIXME: better error type? +pub type TransformerCallbackRx = oneshot::Receiver>; + +/// A transformation job sent to the JS executor. +/// Once the script has been run on the payload, the transformed payload is sent back through the +/// callback channel. +pub struct TransformerJob { + pub callback_tx: TransformerCallbackTx, + pub payload: JsObject, + pub script: String, +} + +pub enum JsReturn { + /// A successfully transformed payload. + // XXX: not sure if there's a cheaper way to deserialize the output while requiring an Object. + Object(JsObject), + /// For cases where the JS script executes successfully but produces an unexpected output. + Invalid, +} + +impl TransformerJob { + pub fn new(script: String, payload: JsObject) -> (Self, TransformerCallbackRx) { + let (callback_tx, callback_rx) = oneshot::channel(); + ( + Self { + payload, + script, + callback_tx, + }, + callback_rx, + ) + } +} /// Effectively a black box to the supervisor. /// Plugins should run until they are done, and likely they should not be "done" until the program /// exits. #[async_trait] pub trait Plugin: Send { + /// For plugins that want to run JS transformations on payloads. + /// Giving them a sender lets them pass messages to the JS executor. + fn set_transformer(&mut self, _tx: Option) {} async fn run(&self) -> std::io::Result<()>; } diff --git a/webhook-bridge/svix-webhook-bridge.example.yaml b/webhook-bridge/svix-webhook-bridge.example.yaml index 6bc3ba2ea..908c37c02 100644 --- a/webhook-bridge/svix-webhook-bridge.example.yaml +++ b/webhook-bridge/svix-webhook-bridge.example.yaml @@ -25,6 +25,17 @@ plugins: subscription_id: "my-subscription" # Optional - will fallback to looking at env vars when left unset. credentials_file: "/path/to/credentials.json" + # Optional - when unset, messages from the queue will be sent to Svix as-is. + transformation: | + function handler(input) { + return { + app_id: input.key, + message: { + eventType: input.event_type, + payload: input.data + } + }; + } output: # Required (the Svix token to use when creating messages with this consumer) token: "XYZ" @@ -40,6 +51,17 @@ plugins: consumer_tag: "my-consumer-001" # Optional (default: false) requeue_on_nack: true + # Optional - when unset, messages from the queue will be sent to Svix as-is. + transformation: | + function handler(input) { + return { + app_id: input.key, + message: { + eventType: input.event_type, + payload: input.data + } + }; + } output: # Required (the Svix token to use when creating messages with this consumer) token: "XYZ" @@ -59,6 +81,17 @@ plugins: max_connections: 4 # Optional (default: false) requeue_on_nack: true + # Optional - when unset, messages from the queue will be sent to Svix as-is. + transformation: | + function handler(input) { + return { + app_id: input.key, + message: { + eventType: input.event_type, + payload: input.data + } + }; + } output: # Required (the Svix token to use when creating messages with this consumer) token: "XYZ" @@ -74,6 +107,17 @@ plugins: queue_dsn: "http://localhost:19324/000000000000/local" # Optional (default: false) override_endpoint: true + # Optional - when unset, messages from the queue will be sent to Svix as-is. + transformation: | + function handler(input) { + return { + app_id: input.key, + message: { + eventType: input.event_type, + payload: input.data + } + }; + } output: # Required (the Svix token to use when creating messages with this consumer) token: "XYZ" diff --git a/webhook-bridge/svix-webhook-bridge/Cargo.toml b/webhook-bridge/svix-webhook-bridge/Cargo.toml index eec8a87e9..08c51856b 100644 --- a/webhook-bridge/svix-webhook-bridge/Cargo.toml +++ b/webhook-bridge/svix-webhook-bridge/Cargo.toml @@ -6,17 +6,57 @@ edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] +anyhow = "1" clap = { version = "4.2.4", features = ["env", "derive"] } + +# Latest deno: leaks. +#deno_core = "0.185.0" +#deno_runtime = "0.111.0" + +# This is just after what og-ingester was using (tagged 1.29.4). +# The actuals: 0.165 and 0.91 (tagged 1.29.2). These fail to resolve deps - conflicts on tokio and related. +# Still leaks! +#deno_core = "0.167" +#deno_runtime = "0.93" + +# These are "old" deno - about a year old, v1.22.0. +# Worker API is different here. Not clear on how to get values back from a script execution in a worker; returns only unit. +#deno_core = "0.135.0" +#deno_runtime = "0.61.0" + +# This is v1.30.3 - the bug reports of leaks started in 1.31.x +#deno_core = "0.171.0" +#deno_runtime = "0.97.0" + +# tagged 1.27.2 (leaks) +#deno_core = "0.158.0" + +# tagged 1.26.2 (leaks) +#deno_core = "0.155.0" + +# tagged 1.25.4 +#deno_core = "0.152.0" + +# tagged 1.24.3 (leaks, but not quite as badly as later releases - backtrace showed v8 GC frames) +#deno_core = "0.147.0" + +# tagged 1.23.3 (does not seem to leak) +deno_core = "0.142.0" + lazy_static = "1.4" opentelemetry = { version = "0.18.0", features = ["rt-tokio"] } opentelemetry-http = "0.7.0" opentelemetry-otlp = { version = "0.11.0", features = ["metrics", "grpc-tonic", "http-proto", "reqwest-client"] } serde = { version = "1.0", features = ["derive"] } +serde_json = "1" serde_yaml = "0.9.21" +# FIXME: Used in deno execution - can we just use tokio? +smol = "1.3.0" svix-webhook-bridge-plugin-queue-consumer = { optional=true, path = "../svix-webhook-bridge-plugin-queue-consumer" } svix-webhook-bridge-plugin-webhook-receiver = { optional=true, path = "../svix-webhook-bridge-plugin-webhook-receiver" } svix-webhook-bridge-types = { path = "../svix-webhook-bridge-types" } svix-ksuid = "0.7.0" +threadpool = "1.8" tokio = { version = "1", features=["full"] } tracing = "0.1" tracing-opentelemetry = "0.18.0" diff --git a/webhook-bridge/svix-webhook-bridge/src/main.rs b/webhook-bridge/svix-webhook-bridge/src/main.rs index 4f79c60de..03c0e2114 100644 --- a/webhook-bridge/svix-webhook-bridge/src/main.rs +++ b/webhook-bridge/svix-webhook-bridge/src/main.rs @@ -6,10 +6,11 @@ use opentelemetry_otlp::WithExportConfig; use std::path::PathBuf; use std::time::Duration; use svix_ksuid::{KsuidLike as _, KsuidMs}; -use svix_webhook_bridge_types::Plugin; +use svix_webhook_bridge_types::{Plugin, TransformerJob}; use tracing_subscriber::prelude::*; mod config; +mod runtime; lazy_static! { // Seems like it would be useful to be able to configure this. @@ -24,7 +25,6 @@ fn get_svc_identifiers(cfg: &Config) -> opentelemetry::sdk::Resource { "service.name", cfg.opentelemetry_service_name .as_deref() - // FIXME: can we do something better? .unwrap_or("svix-webhook-bridge") .to_owned(), ), @@ -169,15 +169,50 @@ async fn main() -> std::io::Result<()> { tracing::info!("starting"); + let (xform_tx, mut xform_rx) = tokio::sync::mpsc::unbounded_channel::(); + + // XXX: this is a bit nasty, but might be okay to start. + // The nested spawns are needed to make sure we can saturate the + // threadpool (otherwise we'd run each job serially). + // + // Another approach would be to do what og-ingester did: give each plugin a clone of the + // `TpHandle`, but this would likely mean moving the runtime module over to the `-types` crate. + // I'd rather not do this, mostly to help keep things more unit test friendly; channels can + // help keep the coupling more loose, with less stateful baggage. + // Starting with this just to keep the JS executor stuff here in the binary. + tokio::spawn(async move { + let tp = runtime::TpHandle::new(); + while let Some(TransformerJob { + payload, + script, + callback_tx, + }) = xform_rx.recv().await + { + let tp = tp.clone(); + tokio::spawn(async move { + let out = tp.run_script(payload.into(), script).await; + if callback_tx + .send(out.map_err(|e| tracing::error!("{}", e))) + .is_err() + { + // If the callback fails, the plugin is likely unwinding/dropping. + // Not a whole lot we can do about that. + tracing::error!("failed to send js output back to caller"); + } + }); + } + }); + let mut plugins = Vec::with_capacity(cfg.plugins.len()); for cc in cfg.plugins { - let consumer = cc.try_into().map_err(|e| { + let mut plugin: Box = cc.try_into().map_err(|e| { std::io::Error::new( std::io::ErrorKind::Other, format!("Failed to configure plugin: {}", e), ) })?; - plugins.push(consumer); + plugin.set_transformer(Some(xform_tx.clone())); + plugins.push(plugin); } if plugins.is_empty() { tracing::warn!("No plugins configured.") diff --git a/webhook-bridge/svix-webhook-bridge/src/runtime/mod.rs b/webhook-bridge/svix-webhook-bridge/src/runtime/mod.rs new file mode 100644 index 000000000..41d7db05e --- /dev/null +++ b/webhook-bridge/svix-webhook-bridge/src/runtime/mod.rs @@ -0,0 +1,49 @@ +use anyhow::Result; +use std::sync::Arc; + +use deno_core::{anyhow, serde_v8, v8, JsRuntime}; +use svix_webhook_bridge_types::{JsObject, JsReturn}; +use threadpool::ThreadPool; +use tokio::sync::{oneshot, Mutex}; + +#[derive(Clone, Debug)] +pub struct TpHandle(pub Arc>); + +impl TpHandle { + pub fn new() -> Self { + Self(Arc::new(Mutex::new(ThreadPool::default()))) + } + + pub async fn run_script(&self, input: serde_json::Value, script: String) -> Result { + let (tx, rx) = oneshot::channel(); + + self.0.lock().await.execute(move || { + let _ = tx.send(run_script_inner(&input, script)); + }); + + rx.await? + } +} + +fn run_script_inner(input: &serde_json::Value, script: String) -> Result { + let mut runtime = JsRuntime::new(Default::default()); + let res = runtime.execute_script( + "", + &format!("{script}\nhandler({})", serde_json::to_string(input)?), + ); + match res { + Ok(global) => { + let scope = &mut runtime.handle_scope(); + let local = v8::Local::new(scope, global); + match serde_v8::from_v8::(scope, local) { + Ok(v) => Ok(JsReturn::Object(v)), + Err(serde_v8::Error::ExpectedObject) => Ok(JsReturn::Invalid), + Err(e) => Err(e)?, + } + } + Err(err) => Err(anyhow::format_err!("Evaling error: {:?}", err)), + } +} + +#[cfg(test)] +mod tests; diff --git a/webhook-bridge/svix-webhook-bridge/src/runtime/tests.rs b/webhook-bridge/svix-webhook-bridge/src/runtime/tests.rs new file mode 100644 index 000000000..7bb528372 --- /dev/null +++ b/webhook-bridge/svix-webhook-bridge/src/runtime/tests.rs @@ -0,0 +1,56 @@ +use super::run_script_inner; +use serde_json::json; +use svix_webhook_bridge_types::JsReturn; + +// Really just trying to figure out if the deno runtime is working the way I hope. +#[test] +fn test_happy_fn() { + let src = r#" + function handler(input) { + return { "x": 123, ...input }; + } + "# + .to_string(); + let res = run_script_inner(&json!({ "y": 456 }), src).unwrap(); + match res { + JsReturn::Object(v) => { + assert_eq!(v["x"].as_i64(), Some(123)); + assert_eq!(v["y"].as_i64(), Some(456)); + } + JsReturn::Invalid => panic!("got unexpected return value"), + } +} + +#[test] +fn test_invalid_output_bool() { + let src = r#" + function handler(input) { + return false; + } + "# + .to_string(); + let res = run_script_inner(&json!({}), src).unwrap(); + match res { + JsReturn::Invalid => (), + JsReturn::Object(_) => panic!("got unexpected return value"), + } +} + +#[test] +// FIXME: serde decodes arrays with keys like "0", "1"... in this situation, failing the test. +#[ignore] +fn test_invalid_output_array() { + let src = r#" + function handler(input) { + return [1, 2]; + } + "# + .to_string(); + let res = run_script_inner(&json!({}), src).unwrap(); + match res { + JsReturn::Invalid => (), + JsReturn::Object(_) => { + panic!("got unexpected return value"); + } + } +} From aa702cc40a8a9aa74ef15bb7ca6b39aab3e5c2ea Mon Sep 17 00:00:00 2001 From: Owen Nelson Date: Mon, 15 May 2023 10:09:41 -0700 Subject: [PATCH 8/8] WIP: add kinesis support for bridge Very incomplete/untested. The sdk doesn't seem to have APIs for consuming. Follow for updates on the sdk side. --- webhook-bridge/Cargo.lock | 26 +++ webhook-bridge/generic-queue/Cargo.toml | 2 + webhook-bridge/generic-queue/src/kinesis.rs | 202 ++++++++++++++++++++ webhook-bridge/generic-queue/src/lib.rs | 2 + webhook-bridge/testing-docker-compose.yml | 7 + 5 files changed, 239 insertions(+) create mode 100644 webhook-bridge/generic-queue/src/kinesis.rs diff --git a/webhook-bridge/Cargo.lock b/webhook-bridge/Cargo.lock index 958cd454d..969e61118 100644 --- a/webhook-bridge/Cargo.lock +++ b/webhook-bridge/Cargo.lock @@ -401,6 +401,31 @@ dependencies = [ "tracing", ] +[[package]] +name = "aws-sdk-kinesis" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fda5450344773e1c2a5e5ff42d3364a38f4a7303c202c4d81da56887cf08d485" +dependencies = [ + "aws-credential-types", + "aws-endpoint", + "aws-http", + "aws-sig-auth", + "aws-smithy-async", + "aws-smithy-client", + "aws-smithy-http", + "aws-smithy-http-tower", + "aws-smithy-json", + "aws-smithy-types", + "aws-types", + "bytes", + "http", + "regex", + "tokio-stream", + "tower", + "tracing", +] + [[package]] name = "aws-sdk-sqs" version = "0.25.1" @@ -1456,6 +1481,7 @@ version = "0.1.0" dependencies = [ "async-trait", "aws-config", + "aws-sdk-kinesis", "aws-sdk-sqs", "bb8", "bb8-redis", diff --git a/webhook-bridge/generic-queue/Cargo.toml b/webhook-bridge/generic-queue/Cargo.toml index ebb51b838..e31045b3c 100644 --- a/webhook-bridge/generic-queue/Cargo.toml +++ b/webhook-bridge/generic-queue/Cargo.toml @@ -8,6 +8,7 @@ edition = "2021" [dependencies] async-trait = "0.1" aws-config = { version = "0.55", optional = true } +aws-sdk-kinesis = { version = "0.27.0", optional = true } aws-sdk-sqs = { version = "0.25", optional = true } bb8 = { version = "0.7.1", optional = true } bb8-redis = { version = "0.10.1", optional = true } @@ -30,6 +31,7 @@ tracing = "0.1" [features] default = ["gcp_pubsub", "memory_queue", "rabbitmq", "redis", "redis_cluster", "sqs"] +kinesis = ["dep:aws-config", "dep:aws-sdk-kinesis"] gcp_pubsub = ["dep:google-cloud-pubsub", "dep:google-cloud-googleapis", "dep:google-cloud-gax", "dep:tokio-util", "dep:futures-util", "dep:google-cloud-default", "dep:google-cloud-auth"] memory_queue = [] rabbitmq = ["dep:lapin"] diff --git a/webhook-bridge/generic-queue/src/kinesis.rs b/webhook-bridge/generic-queue/src/kinesis.rs new file mode 100644 index 000000000..044014e90 --- /dev/null +++ b/webhook-bridge/generic-queue/src/kinesis.rs @@ -0,0 +1,202 @@ +// Sharding -- how to represent this ? +// Docs indicate pulling records needs to visit shard after shard. +// Maybe we can hide this from the caller. + +use std::{marker::PhantomData, time::Duration}; + +use async_trait::async_trait; +use aws_sdk_kinesis::primitives::Blob; +use aws_sdk_kinesis::Client; +use serde::{de::DeserializeOwned, Serialize}; + +use crate::{Delivery, QueueError, TaskQueueBackend, TaskQueueReceive, TaskQueueSend}; + +pub struct KinesisConfig { + // Consuming + consumer_arn: String, + // Producing + stream_name: String, + partition_key: String, +} + +pub struct KinesisQueueBackend; + +#[async_trait] +impl TaskQueueBackend + for KinesisQueueBackend +{ + type PairConfig = KinesisConfig; + type Delivery = KinesisDelivery; + type Producer = KinesisQueueProducer; + type Consumer = KinesisQueueConsumer; + + async fn new_pair( + cfg: KinesisConfig, + ) -> Result<(KinesisQueueProducer, KinesisQueueConsumer), QueueError> { + let aws_cfg = aws_config::load_from_env().await; + + let client = Client::new(&aws_cfg); + + let producer = KinesisQueueProducer { + client: client.clone(), + partition_key: cfg.partition_key, + stream_name: cfg.stream_name, + }; + let consumer = KinesisQueueConsumer { + client, + consumer_arn: cfg.consumer_arn, + }; + + Ok((producer, consumer)) + } + + async fn producing_half(cfg: KinesisConfig) -> Result { + let aws_cfg = aws_config::load_from_env().await; + + let client = Client::new(&aws_cfg); + + let producer = KinesisQueueProducer { + client, + queue_dsn: cfg.queue_dsn, + }; + + Ok(producer) + } + + async fn consuming_half(cfg: KinesisConfig) -> Result { + let aws_cfg = if cfg.override_endpoint { + aws_config::from_env() + .endpoint_url(&cfg.queue_dsn) + .load() + .await + } else { + aws_config::load_from_env().await + }; + + let client = Client::new(&aws_cfg); + + let consumer = KinesisQueueConsumer { + client, + queue_dsn: cfg.queue_dsn, + }; + + Ok(consumer) + } +} + +pub struct KinesisDelivery { + ack_client: Client, + // FIXME: Cow/Arc this stuff? + queue_dsn: String, + body: String, + receipt_handle: Option, + _pd: PhantomData, +} + +#[async_trait] +impl Delivery for KinesisDelivery { + fn payload(&self) -> Result { + serde_json::from_str(&self.body).map_err(Into::into) + } + + async fn ack(self) -> Result<(), QueueError> { + if let Some(receipt_handle) = self.receipt_handle { + self.ack_client + .delete_message() + .queue_url(&self.queue_dsn) + .receipt_handle(receipt_handle) + .send() + .await + .map_err(QueueError::generic)?; + + Ok(()) + } else { + Err(QueueError::generic( + DeleteMessageError::ReceiptHandleIsInvalid( + ReceiptHandleIsInvalid::builder() + .message("receipt handle must be Some to be acked") + .build(), + ), + )) + } + } + + async fn nack(self) -> Result<(), QueueError> { + Ok(()) + } +} + +pub struct KinesisQueueProducer { + client: Client, + partition_key: String, + stream_name: String, +} + +#[async_trait] +impl TaskQueueSend for KinesisQueueProducer { + async fn send(&self, payload: T) -> Result<(), QueueError> { + let data = Blob::new(serde_json::to_string(&payload)?); + self.client + .put_record() + .data(data) + .partition_key(&self.partition_key) + .stream_name(&self.stream_name) + .send() + .await + .map_err(QueueError::generic)?; + + Ok(()) + } +} + +pub struct KinesisQueueConsumer { + client: Client, + consumer_arn: String, +} + +#[async_trait] +impl + TaskQueueReceive> for KinesisQueueConsumer +{ + async fn receive_all( + &mut self, + max_batch_size: usize, + timeout: Duration, + ) -> Result>, QueueError> { + // Ensure that there's at least one message before returning regardless of timeout + let out = loop { + let out = self + .client + .receive_message() + .set_wait_time_seconds(Some( + timeout.as_secs().try_into().map_err(QueueError::generic)?, + )) + .set_max_number_of_messages(Some( + max_batch_size.try_into().map_err(QueueError::generic)?, + )) + .queue_url(&self.queue_dsn) + .send() + .await + .map_err(QueueError::generic)?; + + if !out.messages().unwrap_or_default().is_empty() { + break out; + } + }; + + Ok(out + .messages() + .unwrap_or_default() + .iter() + .map(|message| -> Result, QueueError> { + Ok(KinesisDelivery { + ack_client: self.client.clone(), + queue_dsn: self.queue_dsn.clone(), + body: message.body().unwrap_or_default().to_owned(), + receipt_handle: message.receipt_handle().map(ToOwned::to_owned), + _pd: PhantomData, + }) + }) + .collect::, _>>()?) + } +} diff --git a/webhook-bridge/generic-queue/src/lib.rs b/webhook-bridge/generic-queue/src/lib.rs index 0229b1d10..b132723e4 100644 --- a/webhook-bridge/generic-queue/src/lib.rs +++ b/webhook-bridge/generic-queue/src/lib.rs @@ -8,6 +8,8 @@ use thiserror::Error; #[cfg(feature = "gcp_pubsub")] pub mod gcp_pubsub; +#[cfg(feature = "kinesis")] +pub mod kinesis; #[cfg(feature = "memory_queue")] pub mod memory_queue; #[cfg(feature = "rabbitmq")] diff --git a/webhook-bridge/testing-docker-compose.yml b/webhook-bridge/testing-docker-compose.yml index d49930795..4da1560e2 100644 --- a/webhook-bridge/testing-docker-compose.yml +++ b/webhook-bridge/testing-docker-compose.yml @@ -26,3 +26,10 @@ services: "--project", "local-project", "--host-port", "0.0.0.0:8085" ] + + kinesis: + # They don't seem to tag releases... might need to find a different image or build one of our own using the npm + # package. + image: instructure/kinesalite:latest + ports: + - "4567:4567"