From eed1ee053191ecaca21f82749da9fe443712206f Mon Sep 17 00:00:00 2001 From: Simon Schrottner Date: Thu, 6 Feb 2025 17:28:27 +0100 Subject: [PATCH] feat(flagd): migrate to new provider mode file and update e2e tests (#121) * feat(flagd-rpc): add caching with tests Signed-off-by: Simon Schrottner * fixup: using new test-harness Signed-off-by: Simon Schrottner * fixup(flagd): remove merge conflict error as stated by warber Signed-off-by: Simon Schrottner * feat(flagd): add graceful attempts Signed-off-by: Simon Schrottner * feat(flagd): add graceful attempts Signed-off-by: Simon Schrottner * fixup: rename method Signed-off-by: Simon Schrottner * fixup: naming linting Signed-off-by: Simon Schrottner * feat: better reconnect gherkins Signed-off-by: Simon Schrottner --------- Signed-off-by: Simon Schrottner --- CONTRIBUTING.md | 20 +- .../openfeature/test-harness | 2 +- .../openfeature-provider-flagd/pyproject.toml | 4 +- .../openfeature-provider-flagd/pytest.ini | 14 +- .../contrib/provider/flagd/config.py | 12 + .../contrib/provider/flagd/provider.py | 29 +- .../contrib/provider/flagd/resolvers/grpc.py | 11 +- .../provider/flagd/resolvers/in_process.py | 36 +- .../resolvers/process/connector/__init__.py | 11 + .../process/connector/file_watcher.py | 106 +++ .../flagd/resolvers/process/file_watcher.py | 91 --- .../provider/flagd/resolvers/process/flags.py | 36 + .../provider/flagd/resolvers/protocol.py | 47 ++ .../tests/e2e/__init__.py | 0 .../tests/e2e/conftest.py | 18 +- .../tests/e2e/file/__init__.py | 0 .../tests/e2e/file/conftest.py | 27 + .../tests/e2e/file/test_flaqd.py | 10 + .../tests/e2e/flagd_container.py | 28 +- .../tests/e2e/paths.py | 2 + .../tests/e2e/rpc/__init__.py | 0 .../tests/e2e/rpc/conftest.py | 19 + .../tests/e2e/rpc/test_flaqd.py | 10 + .../tests/e2e/step/_utils.py | 30 + .../{test_config.py => step/config_steps.py} | 43 +- .../tests/e2e/step/context_steps.py | 73 ++ .../tests/e2e/step/event_steps.py | 90 +++ .../tests/e2e/step/flag_step.py | 98 +++ .../tests/e2e/step/provider_steps.py | 142 ++++ .../tests/e2e/steps.py | 721 ------------------ .../tests/e2e/test_in_process_file.py | 99 --- .../tests/e2e/test_rpc.py | 33 - .../tests/e2e/test_rpc_reconnect.py | 31 - .../tests/e2e/test_rpc_ssl.py | 68 -- .../tests/e2e/testfilter.py | 125 +++ .../tests/flags/.gitignore | 1 + .../tests/test_config.py | 6 +- .../tests/test_file_store.py | 31 +- 38 files changed, 996 insertions(+), 1128 deletions(-) create mode 100644 providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/connector/__init__.py create mode 100644 providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/connector/file_watcher.py delete mode 100644 providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/file_watcher.py create mode 100644 providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/protocol.py create mode 100644 providers/openfeature-provider-flagd/tests/e2e/__init__.py create mode 100644 providers/openfeature-provider-flagd/tests/e2e/file/__init__.py create mode 100644 providers/openfeature-provider-flagd/tests/e2e/file/conftest.py create mode 100644 providers/openfeature-provider-flagd/tests/e2e/file/test_flaqd.py create mode 100644 providers/openfeature-provider-flagd/tests/e2e/paths.py create mode 100644 providers/openfeature-provider-flagd/tests/e2e/rpc/__init__.py create mode 100644 providers/openfeature-provider-flagd/tests/e2e/rpc/conftest.py create mode 100644 providers/openfeature-provider-flagd/tests/e2e/rpc/test_flaqd.py create mode 100644 providers/openfeature-provider-flagd/tests/e2e/step/_utils.py rename providers/openfeature-provider-flagd/tests/e2e/{test_config.py => step/config_steps.py} (72%) create mode 100644 providers/openfeature-provider-flagd/tests/e2e/step/context_steps.py create mode 100644 providers/openfeature-provider-flagd/tests/e2e/step/event_steps.py create mode 100644 providers/openfeature-provider-flagd/tests/e2e/step/flag_step.py create mode 100644 providers/openfeature-provider-flagd/tests/e2e/step/provider_steps.py delete mode 100644 providers/openfeature-provider-flagd/tests/e2e/steps.py delete mode 100644 providers/openfeature-provider-flagd/tests/e2e/test_in_process_file.py delete mode 100644 providers/openfeature-provider-flagd/tests/e2e/test_rpc.py delete mode 100644 providers/openfeature-provider-flagd/tests/e2e/test_rpc_reconnect.py delete mode 100644 providers/openfeature-provider-flagd/tests/e2e/test_rpc_ssl.py create mode 100644 providers/openfeature-provider-flagd/tests/e2e/testfilter.py create mode 100644 providers/openfeature-provider-flagd/tests/flags/.gitignore diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 8db34176..583f262f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -30,7 +30,13 @@ We use `pytest` for our unit testing, making use of `parametrized` to inject cas ### Integration tests -These are planned once the SDK has been stabilized and a Flagd provider implemented. At that point, we will utilize the [gherkin integration tests](https://github.com/open-feature/test-harness/blob/main/features/evaluation.feature) to validate against a live, seeded Flagd instance. +The Flagd provider utilizes the [gherkin integration tests](https://github.com/open-feature/test-harness/blob/main/features/evaluation.feature) to validate against a live, seeded Flagd instance. + +To run the integration tests you need to have a container runtime, like docker, ranger, etc. installed. + +```bash +hatch run test +``` ### Type checking @@ -52,6 +58,13 @@ Navigate to the repository folder cd python-sdk-contrib ``` +Checkout submodules + +```bash +git submodule update --init --recursive +``` + + Add your fork as an origin ```bash @@ -62,7 +75,8 @@ Ensure your development environment is all set up by building and testing ```bash cd -hatch run test +hatch build +hatch test ``` To start working on a new feature or bugfix, create a new branch and start working on it. @@ -70,7 +84,7 @@ To start working on a new feature or bugfix, create a new branch and start worki ```bash git checkout -b feat/NAME_OF_FEATURE # Make your changes -git commit +git commit -s -m "feat: my feature" git push fork feat/NAME_OF_FEATURE ``` diff --git a/providers/openfeature-provider-flagd/openfeature/test-harness b/providers/openfeature-provider-flagd/openfeature/test-harness index 8931c864..3f51fad5 160000 --- a/providers/openfeature-provider-flagd/openfeature/test-harness +++ b/providers/openfeature-provider-flagd/openfeature/test-harness @@ -1 +1 @@ -Subproject commit 8931c8645b8600e251d5e3ebbad42dff8ce4c78e +Subproject commit 3f51fad5816ecc259092c49d4623c7c4f617d635 diff --git a/providers/openfeature-provider-flagd/pyproject.toml b/providers/openfeature-provider-flagd/pyproject.toml index 0b692391..97aa5957 100644 --- a/providers/openfeature-provider-flagd/pyproject.toml +++ b/providers/openfeature-provider-flagd/pyproject.toml @@ -18,8 +18,8 @@ classifiers = [ keywords = [] dependencies = [ "openfeature-sdk>=0.6.0", - "grpcio>=1.68.0", - "protobuf>=4.25.2", + "grpcio>=1.68.1", + "protobuf>=4.29.2", "mmh3>=4.1.0", "panzi-json-logic>=1.0.1", "semver>=3,<4", diff --git a/providers/openfeature-provider-flagd/pytest.ini b/providers/openfeature-provider-flagd/pytest.ini index 66da895f..0aef5b75 100644 --- a/providers/openfeature-provider-flagd/pytest.ini +++ b/providers/openfeature-provider-flagd/pytest.ini @@ -1,10 +1,22 @@ [pytest] markers = rpc: tests for rpc mode. - in-process: tests for rpc mode. + in-process: tests for in-process mode. + file: tests for file mode. + unavailable: tests for unavailable providers. customCert: Supports custom certs. unixsocket: Supports unixsockets. + targetURI: Supports targetURI. + grace: Supports grace attempts. + targeting: Supports targeting. + fractional: Supports fractional. + string: Supports string. + semver: Supports semver. + reconnect: Supports reconnect. events: Supports events. sync: Supports sync. caching: Supports caching. offline: Supports offline. + os.linux: linux mark. + stream: Supports streams. +bdd_features_base_dir = tests/features diff --git a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/config.py b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/config.py index 68efb579..d5829c2a 100644 --- a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/config.py +++ b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/config.py @@ -7,6 +7,7 @@ class ResolverType(Enum): RPC = "rpc" IN_PROCESS = "in-process" + FILE = "file" class CacheType(Enum): @@ -158,6 +159,17 @@ def __init__( # noqa: PLR0913 else offline_flag_source_path ) + if ( + self.offline_flag_source_path is not None + and self.resolver is ResolverType.IN_PROCESS + ): + self.resolver = ResolverType.FILE + + if self.resolver is ResolverType.FILE and self.offline_flag_source_path is None: + raise AttributeError( + "Resolver Type 'FILE' requires a offlineFlagSourcePath" + ) + self.offline_poll_interval_ms: int = ( int( env_or_default( diff --git a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/provider.py b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/provider.py index 588e9c27..732bd413 100644 --- a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/provider.py +++ b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/provider.py @@ -26,8 +26,8 @@ from openfeature.evaluation_context import EvaluationContext from openfeature.flag_evaluation import FlagResolutionDetails +from openfeature.provider import AbstractProvider from openfeature.provider.metadata import Metadata -from openfeature.provider.provider import AbstractProvider from .config import CacheType, Config, ResolverType from .resolvers import AbstractResolver, GrpcResolver, InProcessResolver @@ -43,14 +43,14 @@ def __init__( # noqa: PLR0913 host: typing.Optional[str] = None, port: typing.Optional[int] = None, tls: typing.Optional[bool] = None, - deadline: typing.Optional[int] = None, + deadline_ms: typing.Optional[int] = None, timeout: typing.Optional[int] = None, retry_backoff_ms: typing.Optional[int] = None, resolver_type: typing.Optional[ResolverType] = None, offline_flag_source_path: typing.Optional[str] = None, stream_deadline_ms: typing.Optional[int] = None, keep_alive_time: typing.Optional[int] = None, - cache_type: typing.Optional[CacheType] = None, + cache: typing.Optional[CacheType] = None, max_cache_size: typing.Optional[int] = None, retry_backoff_max_ms: typing.Optional[int] = None, retry_grace_period: typing.Optional[int] = None, @@ -62,7 +62,7 @@ def __init__( # noqa: PLR0913 :param host: the host to make requests to :param port: the port the flagd service is available on :param tls: enable/disable secure TLS connectivity - :param deadline: the maximum to wait before a request times out + :param deadline_ms: the maximum to wait before a request times out :param timeout: the maximum time to wait before a request times out :param retry_backoff_ms: the number of milliseconds to backoff :param offline_flag_source_path: the path to the flag source file @@ -70,8 +70,8 @@ def __init__( # noqa: PLR0913 :param keep_alive_time: the number of milliseconds to keep alive :param resolver_type: the type of resolver to use """ - if deadline is None and timeout is not None: - deadline = timeout * 1000 + if deadline_ms is None and timeout is not None: + deadline_ms = timeout * 1000 warnings.warn( "'timeout' property is deprecated, please use 'deadline' instead, be aware that 'deadline' is in milliseconds", DeprecationWarning, @@ -82,7 +82,7 @@ def __init__( # noqa: PLR0913 host=host, port=port, tls=tls, - deadline_ms=deadline, + deadline_ms=deadline_ms, retry_backoff_ms=retry_backoff_ms, retry_backoff_max_ms=retry_backoff_max_ms, retry_grace_period=retry_grace_period, @@ -90,7 +90,7 @@ def __init__( # noqa: PLR0913 offline_flag_source_path=offline_flag_source_path, stream_deadline_ms=stream_deadline_ms, keep_alive_time=keep_alive_time, - cache=cache_type, + cache=cache, max_cache_size=max_cache_size, cert_path=cert_path, ) @@ -106,8 +106,17 @@ def setup_resolver(self) -> AbstractResolver: self.emit_provider_stale, self.emit_provider_configuration_changed, ) - elif self.config.resolver == ResolverType.IN_PROCESS: - return InProcessResolver(self.config, self) + elif ( + self.config.resolver == ResolverType.IN_PROCESS + or self.config.resolver == ResolverType.FILE + ): + return InProcessResolver( + self.config, + self.emit_provider_ready, + self.emit_provider_error, + self.emit_provider_stale, + self.emit_provider_configuration_changed, + ) else: raise ValueError( f"`resolver_type` parameter invalid: {self.config.resolver}" diff --git a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/grpc.py b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/grpc.py index cb345776..4eef2420 100644 --- a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/grpc.py +++ b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/grpc.py @@ -105,7 +105,11 @@ def initialize(self, evaluation_context: EvaluationContext) -> None: def shutdown(self) -> None: self.active = False + self.channel.unsubscribe(self._state_change_callback) self.channel.close() + if self.timer and self.timer.is_alive(): + logger.debug("gRPC error timer cancelled due to shutdown") + self.timer.cancel() if self.cache: self.cache.clear() @@ -179,21 +183,22 @@ def listen(self) -> None: if self.streamline_deadline_seconds > 0 else {} ) - call_args["wait_for_ready"] = True request = evaluation_pb2.EventStreamRequest() # defining a never ending loop to recreate the stream while self.active: try: logger.debug("Setting up gRPC sync flags connection") - for message in self.stub.EventStream(request, **call_args): + for message in self.stub.EventStream( + request, wait_for_ready=True, **call_args + ): if message.type == "provider_ready": - self.connected = True self.emit_provider_ready( ProviderEventDetails( message="gRPC sync connection established" ) ) + self.connected = True elif message.type == "configuration_change": data = MessageToDict(message)["data"] self.handle_changed_flags(data) diff --git a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/in_process.py b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/in_process.py index a14dbb8c..baa3a9a3 100644 --- a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/in_process.py +++ b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/in_process.py @@ -1,36 +1,47 @@ import typing +from openfeature.contrib.provider.flagd.resolvers.process.connector.file_watcher import ( + FileWatcher, +) from openfeature.evaluation_context import EvaluationContext +from openfeature.event import ProviderEventDetails from openfeature.exception import FlagNotFoundError, ParseError from openfeature.flag_evaluation import FlagResolutionDetails, Reason -from openfeature.provider import AbstractProvider from ..config import Config -from .process.file_watcher import FileWatcherFlagStore +from .process.connector import FlagStateConnector +from .process.flags import FlagStore from .process.targeting import targeting T = typing.TypeVar("T") class InProcessResolver: - def __init__(self, config: Config, provider: AbstractProvider): + def __init__( + self, + config: Config, + emit_provider_ready: typing.Callable[[ProviderEventDetails], None], + emit_provider_error: typing.Callable[[ProviderEventDetails], None], + emit_provider_stale: typing.Callable[[ProviderEventDetails], None], + emit_provider_configuration_changed: typing.Callable[ + [ProviderEventDetails], None + ], + ): self.config = config - self.provider = provider if not self.config.offline_flag_source_path: raise ValueError( "offline_flag_source_path must be provided when using in-process resolver" ) - self.flag_store = FileWatcherFlagStore( - self.config.offline_flag_source_path, - self.provider, - self.config.retry_backoff_ms * 0.001, + self.flag_store = FlagStore(emit_provider_configuration_changed) + self.connector: FlagStateConnector = FileWatcher( + self.config, self.flag_store, emit_provider_ready, emit_provider_error ) def initialize(self, evaluation_context: EvaluationContext) -> None: - pass + self.connector.initialize(evaluation_context) def shutdown(self) -> None: - self.flag_store.shutdown() + self.connector.shutdown() def resolve_boolean_details( self, @@ -54,7 +65,10 @@ def resolve_float_details( default_value: float, evaluation_context: typing.Optional[EvaluationContext] = None, ) -> FlagResolutionDetails[float]: - return self._resolve(key, default_value, evaluation_context) + result = self._resolve(key, default_value, evaluation_context) + if isinstance(result.value, int): + result.value = float(result.value) + return result def resolve_integer_details( self, diff --git a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/connector/__init__.py b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/connector/__init__.py new file mode 100644 index 00000000..07d49241 --- /dev/null +++ b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/connector/__init__.py @@ -0,0 +1,11 @@ +import typing + +from openfeature.evaluation_context import EvaluationContext + + +class FlagStateConnector(typing.Protocol): + def initialize( + self, evaluation_context: EvaluationContext + ) -> None: ... # pragma: no cover + + def shutdown(self) -> None: ... # pragma: no cover diff --git a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/connector/file_watcher.py b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/connector/file_watcher.py new file mode 100644 index 00000000..befc69ff --- /dev/null +++ b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/connector/file_watcher.py @@ -0,0 +1,106 @@ +import json +import logging +import os +import threading +import time +import typing + +import yaml + +from openfeature.contrib.provider.flagd.config import Config +from openfeature.contrib.provider.flagd.resolvers.process.connector import ( + FlagStateConnector, +) +from openfeature.contrib.provider.flagd.resolvers.process.flags import FlagStore +from openfeature.evaluation_context import EvaluationContext +from openfeature.event import ProviderEventDetails +from openfeature.exception import ParseError, ProviderNotReadyError + +logger = logging.getLogger("openfeature.contrib") + + +class FileWatcher(FlagStateConnector): + def __init__( + self, + config: Config, + flag_store: FlagStore, + emit_provider_ready: typing.Callable[[ProviderEventDetails], None], + emit_provider_error: typing.Callable[[ProviderEventDetails], None], + ): + if config.offline_flag_source_path is None: + raise ValueError( + f"`config.offline_flag_source_path` parameter invalid: {config.offline_flag_source_path}" + ) + else: + self.file_path = config.offline_flag_source_path + + self.emit_provider_ready = emit_provider_ready + self.emit_provider_error = emit_provider_error + self.deadline_seconds = config.deadline_ms * 0.001 + + self.last_modified = 0.0 + self.flag_store = flag_store + self.should_emit_ready_on_success = False + + def initialize(self, evaluation_context: EvaluationContext) -> None: + self.active = True + self.thread = threading.Thread( + target=self.refresh_file, daemon=True, name="FlagdFileWatcherWorkerThread" + ) + self.thread.start() + + # Let this throw exceptions so that provider status is set correctly + try: + self.should_emit_ready_on_success = True + self._load_data() + except Exception as err: + raise ProviderNotReadyError from err + + def shutdown(self) -> None: + self.active = False + + def refresh_file(self) -> None: + while self.active: + time.sleep(self.deadline_seconds) + logger.debug("checking for new flag store contents from file") + self.safe_load_data() + + def safe_load_data(self) -> None: + try: + last_modified = os.path.getmtime(self.file_path) + if last_modified > self.last_modified: + self._load_data(last_modified) + except FileNotFoundError: + self.handle_error("Provided file path not valid") + except json.JSONDecodeError: + self.handle_error("Could not parse JSON flag data from file") + except yaml.error.YAMLError: + self.handle_error("Could not parse YAML flag data from file") + except ParseError: + self.handle_error("Could not parse flag data using flagd syntax") + except Exception: + self.handle_error("Could not read flags from file") + + def _load_data(self, modified_time: typing.Optional[float] = None) -> None: + with open(self.file_path) as file: + if self.file_path.endswith(".yaml"): + data = yaml.safe_load(file) + else: + data = json.load(file) + + self.flag_store.update(data) + + if self.should_emit_ready_on_success: + self.emit_provider_ready( + ProviderEventDetails( + message="Reloading file contents recovered from error state" + ) + ) + self.should_emit_ready_on_success = False + + self.last_modified = modified_time or os.path.getmtime(self.file_path) + + def handle_error(self, error_message: str) -> None: + logger.exception(error_message) + self.should_emit_ready_on_success = True + self.emit_provider_error(ProviderEventDetails(message=error_message)) diff --git a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/file_watcher.py b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/file_watcher.py deleted file mode 100644 index 74835f94..00000000 --- a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/file_watcher.py +++ /dev/null @@ -1,91 +0,0 @@ -import json -import logging -import os -import re -import threading -import time -import typing - -import yaml - -from openfeature.event import ProviderEventDetails -from openfeature.exception import ParseError -from openfeature.provider.provider import AbstractProvider - -from .flags import Flag - -logger = logging.getLogger("openfeature.contrib") - - -class FileWatcherFlagStore: - def __init__( - self, - file_path: str, - provider: AbstractProvider, - poll_interval_seconds: float = 1.0, - ): - self.file_path = file_path - self.provider = provider - self.poll_interval_seconds = poll_interval_seconds - - self.last_modified = 0.0 - self.flag_data: typing.Mapping[str, Flag] = {} - self.load_data() - self.active = True - self.thread = threading.Thread(target=self.refresh_file, daemon=True) - self.thread.start() - - def shutdown(self) -> None: - self.active = False - pass - - def get_flag(self, key: str) -> typing.Optional[Flag]: - return self.flag_data.get(key) - - def refresh_file(self) -> None: - while self.active: - time.sleep(self.poll_interval_seconds) - logger.debug("checking for new flag store contents from file") - last_modified = os.path.getmtime(self.file_path) - if last_modified > self.last_modified: - self.load_data(last_modified) - - def load_data(self, modified_time: typing.Optional[float] = None) -> None: - try: - with open(self.file_path) as file: - if self.file_path.endswith(".yaml"): - data = yaml.safe_load(file) - else: - data = json.load(file) - - self.flag_data = self.parse_flags(data) - logger.debug(f"{self.flag_data=}") - self.provider.emit_provider_configuration_changed( - ProviderEventDetails(flags_changed=list(self.flag_data.keys())) - ) - self.last_modified = modified_time or os.path.getmtime(self.file_path) - except FileNotFoundError: - logger.exception("Provided file path not valid") - except json.JSONDecodeError: - logger.exception("Could not parse JSON flag data from file") - except yaml.error.YAMLError: - logger.exception("Could not parse YAML flag data from file") - except ParseError: - logger.exception("Could not parse flag data using flagd syntax") - except Exception: - logger.exception("Could not read flags from file") - - def parse_flags(self, flags_data: dict) -> dict: - flags = flags_data.get("flags", {}) - evaluators: typing.Optional[dict] = flags_data.get("$evaluators") - if evaluators: - transposed = json.dumps(flags) - for name, rule in evaluators.items(): - transposed = re.sub( - rf"{{\s*\"\$ref\":\s*\"{name}\"\s*}}", json.dumps(rule), transposed - ) - flags = json.loads(transposed) - - if not isinstance(flags, dict): - raise ParseError("`flags` key of configuration must be a dictionary") - return {key: Flag.from_dict(key, data) for key, data in flags.items()} diff --git a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/flags.py b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/flags.py index 0354ac42..d262f205 100644 --- a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/flags.py +++ b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/flags.py @@ -1,9 +1,45 @@ +import json +import re import typing from dataclasses import dataclass +from openfeature.event import ProviderEventDetails from openfeature.exception import ParseError +class FlagStore: + def __init__( + self, + emit_provider_configuration_changed: typing.Callable[ + [ProviderEventDetails], None + ], + ): + self.emit_provider_configuration_changed = emit_provider_configuration_changed + self.flags: typing.Mapping[str, Flag] = {} + + def get_flag(self, key: str) -> typing.Optional["Flag"]: + return self.flags.get(key) + + def update(self, flags_data: dict) -> None: + flags = flags_data.get("flags", {}) + evaluators: typing.Optional[dict] = flags_data.get("$evaluators") + if evaluators: + transposed = json.dumps(flags) + for name, rule in evaluators.items(): + transposed = re.sub( + rf"{{\s*\"\$ref\":\s*\"{name}\"\s*}}", json.dumps(rule), transposed + ) + flags = json.loads(transposed) + + if not isinstance(flags, dict): + raise ParseError("`flags` key of configuration must be a dictionary") + self.flags = {key: Flag.from_dict(key, data) for key, data in flags.items()} + + self.emit_provider_configuration_changed( + ProviderEventDetails(flags_changed=list(self.flags.keys())) + ) + + @dataclass class Flag: key: str diff --git a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/protocol.py b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/protocol.py new file mode 100644 index 00000000..c41e4e25 --- /dev/null +++ b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/protocol.py @@ -0,0 +1,47 @@ +import typing + +from typing_extensions import Protocol + +from openfeature.evaluation_context import EvaluationContext +from openfeature.flag_evaluation import FlagResolutionDetails + + +class AbstractResolver(Protocol): + def initialize(self, evaluation_context: EvaluationContext) -> None: ... + + def shutdown(self) -> None: ... + + def resolve_boolean_details( + self, + key: str, + default_value: bool, + evaluation_context: typing.Optional[EvaluationContext] = None, + ) -> FlagResolutionDetails[bool]: ... + + def resolve_string_details( + self, + key: str, + default_value: str, + evaluation_context: typing.Optional[EvaluationContext] = None, + ) -> FlagResolutionDetails[str]: ... + + def resolve_float_details( + self, + key: str, + default_value: float, + evaluation_context: typing.Optional[EvaluationContext] = None, + ) -> FlagResolutionDetails[float]: ... + + def resolve_integer_details( + self, + key: str, + default_value: int, + evaluation_context: typing.Optional[EvaluationContext] = None, + ) -> FlagResolutionDetails[int]: ... + + def resolve_object_details( + self, + key: str, + default_value: typing.Union[dict, list], + evaluation_context: typing.Optional[EvaluationContext] = None, + ) -> FlagResolutionDetails[typing.Union[dict, list]]: ... diff --git a/providers/openfeature-provider-flagd/tests/e2e/__init__.py b/providers/openfeature-provider-flagd/tests/e2e/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/providers/openfeature-provider-flagd/tests/e2e/conftest.py b/providers/openfeature-provider-flagd/tests/e2e/conftest.py index e80eb15b..d7ef2c72 100644 --- a/providers/openfeature-provider-flagd/tests/e2e/conftest.py +++ b/providers/openfeature-provider-flagd/tests/e2e/conftest.py @@ -1,17 +1,9 @@ import typing -from tests.e2e.steps import * # noqa: F403 +from tests.e2e.step.config_steps import * # noqa: F403 +from tests.e2e.step.context_steps import * # noqa: F403 +from tests.e2e.step.event_steps import * # noqa: F403 +from tests.e2e.step.flag_step import * # noqa: F403 +from tests.e2e.step.provider_steps import * # noqa: F403 JsonPrimitive = typing.Union[str, bool, float, int] - -TEST_HARNESS_PATH = "../../openfeature/test-harness" -SPEC_PATH = "../../openfeature/spec" - - -# running all gherkin tests, except the ones, not implemented -def pytest_collection_modifyitems(config): - marker = "not customCert and not unixsocket and not sync and not targetURI" - - # this seems to not work with python 3.8 - if hasattr(config.option, "markexpr") and config.option.markexpr == "": - config.option.markexpr = marker diff --git a/providers/openfeature-provider-flagd/tests/e2e/file/__init__.py b/providers/openfeature-provider-flagd/tests/e2e/file/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/providers/openfeature-provider-flagd/tests/e2e/file/conftest.py b/providers/openfeature-provider-flagd/tests/e2e/file/conftest.py new file mode 100644 index 00000000..f687cb17 --- /dev/null +++ b/providers/openfeature-provider-flagd/tests/e2e/file/conftest.py @@ -0,0 +1,27 @@ +import pytest + +from openfeature.contrib.provider.flagd.config import ResolverType +from tests.e2e.testfilter import TestFilter + +resolver = ResolverType.FILE +feature_list = { + "~targetURI", + "~customCert", + "~unixsocket", + "~reconnect", + "~sync", + "~caching", + "~grace", +} + + +def pytest_collection_modifyitems(config, items): + test_filter = TestFilter( + config, feature_list=feature_list, resolver=resolver.value, base_path=__file__ + ) + test_filter.filter_items(items) + + +@pytest.fixture() +def resolver_type() -> ResolverType: + return resolver diff --git a/providers/openfeature-provider-flagd/tests/e2e/file/test_flaqd.py b/providers/openfeature-provider-flagd/tests/e2e/file/test_flaqd.py new file mode 100644 index 00000000..6ca4433b --- /dev/null +++ b/providers/openfeature-provider-flagd/tests/e2e/file/test_flaqd.py @@ -0,0 +1,10 @@ +# as soon as we support all the features, we can actually remove this limitation to not run on Python 3.8 +# Python 3.8 does not fully support tagging, hence that it will run all cases +import sys + +from pytest_bdd import scenarios + +from tests.e2e.paths import TEST_HARNESS_PATH + +if sys.version_info >= (3, 9): + scenarios(f"{TEST_HARNESS_PATH}/gherkin") diff --git a/providers/openfeature-provider-flagd/tests/e2e/flagd_container.py b/providers/openfeature-provider-flagd/tests/e2e/flagd_container.py index 31045759..2a01d503 100644 --- a/providers/openfeature-provider-flagd/tests/e2e/flagd_container.py +++ b/providers/openfeature-provider-flagd/tests/e2e/flagd_container.py @@ -1,4 +1,6 @@ +import os.path import time +import typing from pathlib import Path import grpc @@ -6,21 +8,39 @@ from testcontainers.core.container import DockerContainer from testcontainers.core.waiting_utils import wait_container_is_ready, wait_for_logs +from openfeature.contrib.provider.flagd.config import ResolverType + HEALTH_CHECK = 8014 +LAUNCHPAD = 8080 class FlagdContainer(DockerContainer): def __init__( self, - image: str = "ghcr.io/open-feature/flagd-testbed", - port: int = 8013, + feature: typing.Optional[str] = None, **kwargs, ) -> None: + image: str = "ghcr.io/open-feature/flagd-testbed" + if feature is not None: + image = f"{image}-{feature}" path = Path(__file__).parents[2] / "openfeature/test-harness/version.txt" data = path.read_text().rstrip() super().__init__(f"{image}:v{data}", **kwargs) - self.port = port - self.with_exposed_ports(self.port, HEALTH_CHECK) + self.rpc = 8013 + self.ipr = 8015 + self.flagDir = Path("./flags") + self.flagDir.mkdir(parents=True, exist_ok=True) + self.with_exposed_ports(self.rpc, self.ipr, HEALTH_CHECK, LAUNCHPAD) + self.with_volume_mapping(os.path.abspath(self.flagDir.name), "/flags", "rw") + + def get_port(self, resolver_type: ResolverType): + if resolver_type == ResolverType.RPC: + return self.get_exposed_port(self.rpc) + else: + return self.get_exposed_port(self.ipr) + + def get_launchpad_url(self): + return f"http://localhost:{self.get_exposed_port(LAUNCHPAD)}" def start(self) -> "FlagdContainer": super().start() diff --git a/providers/openfeature-provider-flagd/tests/e2e/paths.py b/providers/openfeature-provider-flagd/tests/e2e/paths.py new file mode 100644 index 00000000..6a5022f1 --- /dev/null +++ b/providers/openfeature-provider-flagd/tests/e2e/paths.py @@ -0,0 +1,2 @@ +TEST_HARNESS_PATH = "../../openfeature/test-harness" +SPEC_PATH = "../../openfeature/spec" diff --git a/providers/openfeature-provider-flagd/tests/e2e/rpc/__init__.py b/providers/openfeature-provider-flagd/tests/e2e/rpc/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/providers/openfeature-provider-flagd/tests/e2e/rpc/conftest.py b/providers/openfeature-provider-flagd/tests/e2e/rpc/conftest.py new file mode 100644 index 00000000..77ca44b1 --- /dev/null +++ b/providers/openfeature-provider-flagd/tests/e2e/rpc/conftest.py @@ -0,0 +1,19 @@ +import pytest + +from openfeature.contrib.provider.flagd.config import ResolverType +from tests.e2e.testfilter import TestFilter + +resolver = ResolverType.RPC +feature_list = ["~targetURI", "~unixsocket", "~sync"] + + +def pytest_collection_modifyitems(config, items): + test_filter = TestFilter( + config, feature_list=feature_list, resolver=resolver.value, base_path=__file__ + ) + test_filter.filter_items(items) + + +@pytest.fixture() +def resolver_type() -> ResolverType: + return resolver diff --git a/providers/openfeature-provider-flagd/tests/e2e/rpc/test_flaqd.py b/providers/openfeature-provider-flagd/tests/e2e/rpc/test_flaqd.py new file mode 100644 index 00000000..747d91ad --- /dev/null +++ b/providers/openfeature-provider-flagd/tests/e2e/rpc/test_flaqd.py @@ -0,0 +1,10 @@ +import sys + +from pytest_bdd import scenarios + +from tests.e2e.paths import TEST_HARNESS_PATH + +# as soon as we support all the features, we can actually remove this limitation to not run on Python 3.8 +# Python 3.8 does not fully support tagging, hence that it will run all cases +if sys.version_info >= (3, 9): + scenarios(f"{TEST_HARNESS_PATH}/gherkin") diff --git a/providers/openfeature-provider-flagd/tests/e2e/step/_utils.py b/providers/openfeature-provider-flagd/tests/e2e/step/_utils.py new file mode 100644 index 00000000..befdc35d --- /dev/null +++ b/providers/openfeature-provider-flagd/tests/e2e/step/_utils.py @@ -0,0 +1,30 @@ +import json +import time +import typing + +from asserts import assert_true + + +def str2bool(v): + return v.lower() in ("yes", "true", "t", "1") + + +type_cast = { + "Integer": int, + "Float": float, + "String": str, + "Boolean": str2bool, + "Object": json.loads, +} + + +JsonObject = typing.Union[dict, list] +JsonPrimitive = typing.Union[str, bool, float, int, JsonObject] + + +def wait_for(pred, poll_sec=2, timeout_sec=10): + start = time.time() + while not (ok := pred()) and (time.time() - start < timeout_sec): + time.sleep(poll_sec) + assert_true(pred()) + return ok diff --git a/providers/openfeature-provider-flagd/tests/e2e/test_config.py b/providers/openfeature-provider-flagd/tests/e2e/step/config_steps.py similarity index 72% rename from providers/openfeature-provider-flagd/tests/e2e/test_config.py rename to providers/openfeature-provider-flagd/tests/e2e/step/config_steps.py index 238112d2..9fdd6ed9 100644 --- a/providers/openfeature-provider-flagd/tests/e2e/test_config.py +++ b/providers/openfeature-provider-flagd/tests/e2e/step/config_steps.py @@ -1,11 +1,9 @@ import re -import sys import typing import pytest -from asserts import assert_equal -from pytest_bdd import given, parsers, scenarios, then, when -from tests.e2e.conftest import TEST_HARNESS_PATH +from asserts import assert_equal, assert_true +from pytest_bdd import given, parsers, then, when from openfeature.contrib.provider.flagd.config import CacheType, Config, ResolverType @@ -37,16 +35,6 @@ def convert_resolver_type(val: typing.Union[str, ResolverType]) -> ResolverType: } -@pytest.fixture(autouse=True) -def container(): - pass - - -@pytest.fixture(autouse=True) -def setup_provider(request): - pass - - @pytest.fixture() def option_values() -> dict: return {} @@ -75,20 +63,26 @@ def env_with_value(monkeypatch, env: str, value: str): parsers.cfparse( "a config was initialized", ), - target_fixture="config", + target_fixture="config_or_error", ) def initialize_config(option_values): - return Config(**option_values) + try: + return Config(**option_values), False + except AttributeError: + return None, True @when( parsers.cfparse( 'a config was initialized for "{resolver_type}"', ), - target_fixture="config", + target_fixture="config_or_error", ) def initialize_config_for(resolver_type: str, option_values: dict): - return Config(resolver=ResolverType(resolver_type), **option_values) + try: + return Config(resolver=ResolverType(resolver_type), **option_values), False + except AttributeError: + return None, True @then( @@ -96,13 +90,18 @@ def initialize_config_for(resolver_type: str, option_values: dict): 'the option "{option}" of type "{type_info}" should have the value "{value}"', ) ) -def check_option_value(option, value, type_info, config): +def check_option_value(option, value, type_info, config_or_error): value = type_cast[type_info](value) value = value if value != "null" else None + config, _ = config_or_error assert_equal(config.__getattribute__(camel_to_snake(option)), value) -if sys.version_info >= (3, 9): - scenarios( - f"{TEST_HARNESS_PATH}/gherkin/config.feature", +@then( + parsers.cfparse( + "we should have an error", ) +) +def check_option_error(config_or_error): + _, error = config_or_error + assert_true(error) diff --git a/providers/openfeature-provider-flagd/tests/e2e/step/context_steps.py b/providers/openfeature-provider-flagd/tests/e2e/step/context_steps.py new file mode 100644 index 00000000..65f34d60 --- /dev/null +++ b/providers/openfeature-provider-flagd/tests/e2e/step/context_steps.py @@ -0,0 +1,73 @@ +import typing + +import pytest +from pytest_bdd import given, parsers, when +from tests.e2e.parsers import to_bool, to_list + +from openfeature.evaluation_context import EvaluationContext + +from ._utils import type_cast + + +@pytest.fixture +def evaluation_context() -> EvaluationContext: + return EvaluationContext() + + +@given( + parsers.cfparse( + 'a context containing a targeting key with value "{targeting_key}"' + ), +) +def assign_targeting_context(evaluation_context: EvaluationContext, targeting_key: str): + """a context containing a targeting key with value .""" + evaluation_context.targeting_key = targeting_key + + +@given( + parsers.cfparse( + 'a context containing a key "{key}", with type "{type_info}" and with value "{value}"' + ), +) +def update_context( + evaluation_context: EvaluationContext, key: str, type_info: str, value: str +): + """a context containing a key and value.""" + evaluation_context.attributes[key] = type_cast[type_info](value) + + +@when( + parsers.cfparse( + 'context contains keys {fields:s} with values "{svalue}", "{svalue2}", {ivalue:d}, "{bvalue:bool}"', + extra_types={"bool": to_bool, "s": to_list}, + ), +) +def assign_targeting_context_2( + evaluation_context: EvaluationContext, + fields: list, + svalue: str, + svalue2: str, + ivalue: int, + bvalue: bool, +): + evaluation_context.attributes[fields[0]] = svalue + evaluation_context.attributes[fields[1]] = svalue2 + evaluation_context.attributes[fields[2]] = ivalue + evaluation_context.attributes[fields[3]] = bvalue + + +@given( + parsers.cfparse( + 'a context containing a nested property with outer key "{outer}" and inner key "{inner}", with value "{value}"' + ), +) +def update_context_nested( + evaluation_context: EvaluationContext, + outer: str, + inner: str, + value: typing.Union[str, int], +): + """a context containing a nested property with outer key, and inner key, and value.""" + if outer not in evaluation_context.attributes: + evaluation_context.attributes[outer] = {} + evaluation_context.attributes[outer][inner] = value diff --git a/providers/openfeature-provider-flagd/tests/e2e/step/event_steps.py b/providers/openfeature-provider-flagd/tests/e2e/step/event_steps.py new file mode 100644 index 00000000..571fa948 --- /dev/null +++ b/providers/openfeature-provider-flagd/tests/e2e/step/event_steps.py @@ -0,0 +1,90 @@ +import logging +import time + +import pytest +from asserts import assert_greater +from pytest_bdd import given, parsers, then, when + +from openfeature.client import OpenFeatureClient +from openfeature.event import ProviderEvent + +events = { + "ready": ProviderEvent.PROVIDER_READY, + "error": ProviderEvent.PROVIDER_ERROR, + "stale": ProviderEvent.PROVIDER_STALE, + "change": ProviderEvent.PROVIDER_CONFIGURATION_CHANGED, +} + + +@pytest.fixture() +def event_handles() -> list: + return [] + + +@given( + parsers.cfparse( + "a {event_type} event handler", + ), +) +def add_event_handler(client: OpenFeatureClient, event_type: str, event_handles: list): + def handler(event): + logging.warning((event_type, event)) + event_handles.append( + { + "type": event_type, + "event": event, + } + ) + + client.add_handler(events[event_type], handler) + + logging.warning(("handler added", event_type)) + + +def assert_handlers(handles, event_type: str, max_wait: int = 2): + poll_interval = 1 + while max_wait > 0: + found = any(h["type"] == event_type for h in handles) + if not found: + max_wait -= poll_interval + time.sleep(poll_interval) + continue + break + return handles + + +@when( + parsers.cfparse( + "a {event_type} event was fired", + ), + target_fixture="event_details", +) +def pass_for_event_fired(event_type: str, event_handles): + events = assert_handlers(event_handles, event_type, 30000) + events = [e for e in events if e["type"] == event_type] + assert_greater(len(events), 0) + for event in event_handles: + event_handles.remove(event) + return events[0]["event"] + + +@then( + parsers.cfparse( + "the {event_type} event handler should have been executed", + ) +) +def assert_handler_run(event_type, event_handles): + assert_handler_run_within(event_type, event_handles, 30000) + + +@then( + parsers.cfparse( + "the {event_type} event handler should have been executed within {time:d}ms", + ) +) +def assert_handler_run_within(event_type, event_handles, time: int): + events = assert_handlers(event_handles, event_type, max_wait=int(time / 1000)) + assert_greater(len(events), 0) + + for event in event_handles: + event_handles.remove(event) diff --git a/providers/openfeature-provider-flagd/tests/e2e/step/flag_step.py b/providers/openfeature-provider-flagd/tests/e2e/step/flag_step.py new file mode 100644 index 00000000..821b3be9 --- /dev/null +++ b/providers/openfeature-provider-flagd/tests/e2e/step/flag_step.py @@ -0,0 +1,98 @@ +import typing + +import requests +from asserts import assert_equal +from pytest_bdd import given, parsers, then, when + +from openfeature.client import OpenFeatureClient +from openfeature.evaluation_context import EvaluationContext +from openfeature.event import EventDetails +from openfeature.flag_evaluation import FlagEvaluationDetails, Reason + +from ._utils import JsonPrimitive, type_cast + + +@given( + parsers.cfparse( + 'a {type_info}-flag with key "{key}" and a default value "{default}"' + ), + target_fixture="key_and_default_and_type", +) +def setup_key_and_default( + key: str, default: JsonPrimitive, type_info: str +) -> typing.Tuple[str, JsonPrimitive, str]: + return key, default, type_info + + +@when("the flag was evaluated with details", target_fixture="details") +def evaluate_with_details( + client: OpenFeatureClient, + key_and_default_and_type: tuple, + evaluation_context: EvaluationContext, +): + key, default, type_info = key_and_default_and_type + default = type_cast[type_info](default) + if type_info == "Boolean": + return client.get_boolean_details(key, default, evaluation_context) + elif type_info == "String": + return client.get_string_details(key, default, evaluation_context) + elif type_info == "Integer": + return client.get_integer_details(key, default, evaluation_context) + elif type_info == "Float": + return client.get_float_details(key, default, evaluation_context) + elif type_info == "Object": + return client.get_object_details(key, default, evaluation_context) + raise AssertionError("no valid object type") + + +@when("the flag was modified") +def assert_flag_change_event(container): + requests.post(f"{container.get_launchpad_url()}/change", timeout=1) + + +@then("the flag should be part of the event payload") +def assert_flag_change(key_and_default_and_type: tuple, event_details: EventDetails): + key, _, _ = key_and_default_and_type + assert key in event_details.flags_changed + + +@then( + parsers.cfparse('the resolved details value should be ""'), +) +def resolve_details_value_string( + details: FlagEvaluationDetails[JsonPrimitive], + key_and_default_and_type: tuple, +): + resolve_details_value(details, key_and_default_and_type, "") + + +@then( + parsers.cfparse('the resolved details value should be "{value}"'), +) +def resolve_details_value( + details: FlagEvaluationDetails[JsonPrimitive], + key_and_default_and_type: tuple, + value: str, +): + _, _, type_info = key_and_default_and_type + assert_equal(details.value, type_cast[type_info](value)) + + +@then( + parsers.cfparse('the variant should be "{variant}"'), +) +def resolve_details_variant( + details: FlagEvaluationDetails[JsonPrimitive], + variant: str, +): + assert_equal(details.variant, variant) + + +@then( + parsers.cfparse('the reason should be "{reason}"'), +) +def resolve_details_reason( + details: FlagEvaluationDetails[JsonPrimitive], + reason: str, +): + assert_equal(details.reason, Reason(reason)) diff --git a/providers/openfeature-provider-flagd/tests/e2e/step/provider_steps.py b/providers/openfeature-provider-flagd/tests/e2e/step/provider_steps.py new file mode 100644 index 00000000..b571aa0e --- /dev/null +++ b/providers/openfeature-provider-flagd/tests/e2e/step/provider_steps.py @@ -0,0 +1,142 @@ +import logging +import os +import time +import typing +from enum import Enum +from pathlib import Path + +import pytest +import requests +from pytest_bdd import given, parsers, when +from tests.e2e.flagd_container import FlagdContainer +from tests.e2e.step._utils import wait_for + +from openfeature import api +from openfeature.client import OpenFeatureClient +from openfeature.contrib.provider.flagd import FlagdProvider +from openfeature.contrib.provider.flagd.config import ResolverType +from openfeature.provider import ProviderStatus + +KEY_EVALUATORS = "$evaluators" + +KEY_FLAGS = "flags" + +MERGED_FILE = "merged_file" + + +class TestProviderType(Enum): + UNAVAILABLE = "unavailable" + STABLE = "stable" + UNSTABLE = "unstable" + SSL = "ssl" + SOCKET = "socket" + + +@given("a provider is registered", target_fixture="client") +def setup_provider_old( + container: FlagdContainer, + resolver_type: ResolverType, + option_values: dict, +) -> OpenFeatureClient: + setup_provider(container, resolver_type, "stable", dict) + + +def get_default_options_for_provider( + provider_type: str, resolver_type: ResolverType, container +) -> typing.Tuple[dict, bool]: + launchpad = "default" + t = TestProviderType(provider_type) + options: dict = { + "resolver_type": resolver_type, + "deadline_ms": 1000, + "stream_deadline_ms": 0, + "retry_backoff_ms": 1000, + "retry_grace_period": 3, + "port": container.get_port(resolver_type), + } + + if t == TestProviderType.UNAVAILABLE: + return {}, False + elif t == TestProviderType.SSL: + path = ( + Path(__file__).parents[3] + / "openfeature/test-harness/ssl/custom-root-cert.crt" + ) + options["cert_path"] = str(path.absolute()) + options["tls"] = True + launchpad = "ssl" + elif t == TestProviderType.SOCKET: + return options, True + + if resolver_type == ResolverType.FILE: + options["offline_flag_source_path"] = os.path.join( + container.flagDir.name, "allFlags.json" + ) + + requests.post( + f"{container.get_launchpad_url()}/start?config={launchpad}", timeout=1 + ) + time.sleep(0.1) + return options, True + + +@given( + parsers.cfparse("a {provider_type} flagd provider"), target_fixture="provider_type" +) +def setup_provider( + container: FlagdContainer, + resolver_type: ResolverType, + provider_type: str, + option_values: dict, +) -> OpenFeatureClient: + default_options, wait = get_default_options_for_provider( + provider_type, resolver_type, container + ) + + combined_options = {**default_options, **option_values} + api.set_provider( + FlagdProvider(**combined_options), + provider_type, + ) + client = api.get_client(provider_type) + + wait_for( + lambda: client.get_provider_status() == ProviderStatus.READY + ) if wait else None + return provider_type + + +@pytest.fixture() +def client(provider_type: str) -> OpenFeatureClient: + return api.get_client(provider_type) + + +@when(parsers.cfparse("the connection is lost for {seconds}s")) +def flagd_restart( + seconds, + container: FlagdContainer, + provider_type: str, + resolver_type: ResolverType, +): + requests.post( + f"{container.get_launchpad_url()}/restart?seconds={seconds}", timeout=2 + ) + pass + + +@pytest.fixture(autouse=True, scope="package") +def container(request): + container = FlagdContainer() + + container.start() + + def fin(): + try: + container.stop() + except: # noqa: E722 - we want to ensure all containers are stopped, even if we do have an exception here + logging.debug("container was not running anymore") + + # Teardown code + request.addfinalizer(fin) + + return container diff --git a/providers/openfeature-provider-flagd/tests/e2e/steps.py b/providers/openfeature-provider-flagd/tests/e2e/steps.py deleted file mode 100644 index aa0b64d7..00000000 --- a/providers/openfeature-provider-flagd/tests/e2e/steps.py +++ /dev/null @@ -1,721 +0,0 @@ -import logging -import threading -import time -import typing - -import pytest -from asserts import assert_equal, assert_in, assert_not_equal, assert_true -from pytest_bdd import given, parsers, then, when -from testcontainers.core.container import DockerContainer -from tests.e2e.flagd_container import FlagdContainer -from tests.e2e.parsers import to_bool, to_list - -from openfeature import api -from openfeature.client import OpenFeatureClient -from openfeature.contrib.provider.flagd import FlagdProvider -from openfeature.evaluation_context import EvaluationContext -from openfeature.event import ProviderEvent -from openfeature.flag_evaluation import ErrorCode, FlagEvaluationDetails, Reason -from openfeature.provider import ProviderStatus - -JsonObject = typing.Union[dict, list] -JsonPrimitive = typing.Union[str, bool, float, int, JsonObject] - - -@pytest.fixture -def evaluation_context() -> EvaluationContext: - return EvaluationContext() - - -@given("a flagd provider is set", target_fixture="client") -@given("a provider is registered", target_fixture="client") -def setup_provider( - container: FlagdContainer, resolver_type, client_name, port -) -> OpenFeatureClient: - try: - container.get_exposed_port(port) - except: # noqa: E722 - container.start() - - api.set_provider( - FlagdProvider( - resolver_type=resolver_type, - port=int(container.get_exposed_port(port)), - timeout=1, - retry_grace_period=3, - ), - client_name, - ) - client = api.get_client(client_name) - wait_for(lambda: client.get_provider_status() == ProviderStatus.READY) - return client - - -@when( - parsers.cfparse( - 'a {ignored:s?}boolean flag with key "{key}" is evaluated with {details:s?}default value "{default:bool}"', - extra_types={"bool": to_bool, "s": str}, - ), - target_fixture="key_and_default", -) -@when( - parsers.cfparse( - 'a {ignored:s?}string flag with key "{key}" is evaluated with {details:s?}default value "{default}"', - extra_types={"s": str}, - ), - target_fixture="key_and_default", -) -@when( - parsers.cfparse( - 'a{ignored:s?} integer flag with key "{key}" is evaluated with {details:s?}default value {default:d}', - extra_types={"s": str}, - ), - target_fixture="key_and_default", -) -@when( - parsers.cfparse( - 'a {ignored:s?}float flag with key "{key}" is evaluated with {details:s?}default value {default:f}', - extra_types={"s": str}, - ), - target_fixture="key_and_default", -) -@when( - parsers.cfparse( - 'a string flag with key "{key}" is evaluated as an integer, with details and a default value {default:d}', - ), - target_fixture="key_and_default", -) -@when( - parsers.cfparse( - 'a flag with key "{key}" is evaluated with default value "{default}"', - ), - target_fixture="key_and_default", -) -def setup_key_and_default( - key: str, default: JsonPrimitive -) -> typing.Tuple[str, JsonPrimitive]: - return (key, default) - - -@when( - parsers.cfparse( - 'a string flag with key "{key}" is evaluated with details', - ), - target_fixture="key_and_default", -) -def setup_key_without_default(key: str) -> typing.Tuple[str, JsonPrimitive]: - return setup_key_and_default(key, "") - - -@when( - parsers.cfparse( - 'an object flag with key "{key}" is evaluated with a null default value', - ), - target_fixture="key_and_default", -) -@when( - parsers.cfparse( - 'an object flag with key "{key}" is evaluated with details and a null default value', - ), - target_fixture="key_and_default", -) -def setup_key_and_default_for_object(key: str) -> typing.Tuple[str, JsonObject]: - return (key, {}) - - -@when( - parsers.cfparse( - 'a context containing a targeting key with value "{targeting_key}"' - ), -) -def assign_targeting_context(evaluation_context: EvaluationContext, targeting_key: str): - """a context containing a targeting key with value .""" - evaluation_context.targeting_key = targeting_key - - -@when( - parsers.cfparse( - 'context contains keys {fields:s} with values "{svalue}", "{svalue2}", {ivalue:d}, "{bvalue:bool}"', - extra_types={"bool": to_bool, "s": to_list}, - ), -) -def assign_targeting_context_2( - evaluation_context: EvaluationContext, - fields: list, - svalue: str, - svalue2: str, - ivalue: int, - bvalue: bool, -): - evaluation_context.attributes[fields[0]] = svalue - evaluation_context.attributes[fields[1]] = svalue2 - evaluation_context.attributes[fields[2]] = ivalue - evaluation_context.attributes[fields[3]] = bvalue - - -@when( - parsers.cfparse('a context containing a key "{key}", with value "{value}"'), -) -@when( - parsers.cfparse('a context containing a key "{key}", with value {value:d}'), -) -def update_context( - evaluation_context: EvaluationContext, key: str, value: JsonPrimitive -): - """a context containing a key and value.""" - evaluation_context.attributes[key] = value - - -@when( - parsers.cfparse( - 'a context containing a nested property with outer key "{outer}" and inner key "{inner}", with value "{value}"' - ), -) -@when( - parsers.cfparse( - 'a context containing a nested property with outer key "{outer}" and inner key "{inner}", with value {value:d}' - ), -) -def update_context_nested( - evaluation_context: EvaluationContext, - outer: str, - inner: str, - value: typing.Union[str, int], -): - """a context containing a nested property with outer key, and inner key, and value.""" - if outer not in evaluation_context.attributes: - evaluation_context.attributes[outer] = {} - evaluation_context.attributes[outer][inner] = value - - -@then( - parsers.cfparse( - 'the resolved boolean value should be "{expected_value:bool}"', - extra_types={"bool": to_bool}, - ) -) -@then( - parsers.cfparse( - 'the resolved boolean zero-value should be "{expected_value:bool}"', - extra_types={"bool": to_bool}, - ) -) -def assert_boolean_value( - client: OpenFeatureClient, - key_and_default: tuple, - expected_value: bool, - evaluation_context: EvaluationContext, -): - key, default = key_and_default - evaluation_result = client.get_boolean_value(key, default, evaluation_context) - assert_equal(evaluation_result, expected_value) - - -@then( - parsers.cfparse( - 'the resolved boolean details value should be "{expected_value:bool}", the variant should be "{variant}", and the reason should be "{reason}"', - extra_types={"bool": to_bool}, - ) -) -def assert_boolean_value_with_details( - client: OpenFeatureClient, - key_and_default: tuple, - expected_value: bool, - variant: str, - reason: str, - evaluation_context: EvaluationContext, -): - key, default = key_and_default - evaluation_result = client.get_boolean_details(key, default, evaluation_context) - assert_equal(evaluation_result.value, expected_value) - assert_equal(evaluation_result.reason, reason) - assert_equal(evaluation_result.variant, variant) - - -@then( - parsers.cfparse( - "the resolved integer {ignored:s?}value should be {expected_value:d}", - extra_types={"s": str}, - ) -) -@then(parsers.cfparse("the returned value should be {expected_value:d}")) -def assert_integer_value( - client: OpenFeatureClient, - key_and_default: tuple, - expected_value: bool, - evaluation_context: EvaluationContext, -): - key, default = key_and_default - evaluation_result = client.get_integer_value(key, default, evaluation_context) - assert_equal(evaluation_result, expected_value) - - -@then( - parsers.cfparse( - 'the resolved integer details value should be {expected_value:d}, the variant should be "{variant}", and the reason should be "{reason}"', - ) -) -def assert_integer_value_with_details( - client: OpenFeatureClient, - key_and_default: tuple, - expected_value: int, - variant: str, - reason: str, - evaluation_context: EvaluationContext, -): - key, default = key_and_default - evaluation_result = client.get_integer_details(key, default, evaluation_context) - assert_equal(evaluation_result.value, expected_value) - assert_equal(evaluation_result.reason, reason) - assert_equal(evaluation_result.variant, variant) - - -@then( - parsers.cfparse( - "the resolved float {ignored:s?}value should be {expected_value:f}", - extra_types={"s": str}, - ) -) -def assert_float_value( - client: OpenFeatureClient, - key_and_default: tuple, - expected_value: bool, - evaluation_context: EvaluationContext, -): - key, default = key_and_default - evaluation_result = client.get_float_value(key, default, evaluation_context) - assert_equal(evaluation_result, expected_value) - - -@then( - parsers.cfparse( - 'the resolved float details value should be {expected_value:f}, the variant should be "{variant}", and the reason should be "{reason}"', - ) -) -def assert_float_value_with_details( - client: OpenFeatureClient, - key_and_default: tuple, - expected_value: float, - variant: str, - reason: str, - evaluation_context: EvaluationContext, -): - key, default = key_and_default - evaluation_result = client.get_float_details(key, default, evaluation_context) - assert_equal(evaluation_result.value, expected_value) - assert_equal(evaluation_result.reason, reason) - assert_equal(evaluation_result.variant, variant) - - -@then(parsers.cfparse('the returned value should be "{expected_value}"')) -def assert_string_value( - client: OpenFeatureClient, - key_and_default: tuple, - expected_value: bool, - evaluation_context: EvaluationContext, -): - key, default = key_and_default - evaluation_details = client.get_string_details(key, default, evaluation_context) - assert_equal(evaluation_details.value, expected_value) - - -@then( - parsers.cfparse( - 'the resolved string zero-value should be ""', - ) -) -def assert_empty_string( - client: OpenFeatureClient, - key_and_default: tuple, - evaluation_context: EvaluationContext, -): - assert_string(client, key_and_default, evaluation_context, "") - - -@then( - parsers.cfparse( - 'the resolved string value should be "{expected_value}"', - ) -) -def assert_string( - client: OpenFeatureClient, - key_and_default: tuple, - evaluation_context: EvaluationContext, - expected_value: str, -): - key, default = key_and_default - evaluation_result = client.get_string_value(key, default, evaluation_context) - assert_equal(evaluation_result, expected_value) - - -@then( - parsers.cfparse( - 'the resolved string response should be "{expected_value}"', - ) -) -def assert_string_response( - client: OpenFeatureClient, - key_and_default: tuple, - evaluation_context: EvaluationContext, - expected_value: str, -): - key, default = key_and_default - evaluation_result = client.get_string_value(key, default, evaluation_context) - assert_equal(evaluation_result, expected_value) - - -@then( - parsers.cfparse( - 'the resolved flag value is "{expected_value}" when the context is empty', - ) -) -def assert_string_without_context( - client: OpenFeatureClient, - key_and_default: tuple, - evaluation_context: EvaluationContext, - expected_value: str, -): - key, default = key_and_default - evaluation_result = client.get_string_value(key, default, None) - assert_equal(evaluation_result, expected_value) - - -@then( - parsers.cfparse( - 'the resolved object {details:s?}value should be contain fields "{bool_field}", "{string_field}", and "{int_field}", with values "{bvalue:bool}", "{svalue}" and {ivalue:d}, respectively', - extra_types={"bool": to_bool, "s": str}, - ), - target_fixture="evaluation_details", -) -def assert_object( # noqa: PLR0913 - client: OpenFeatureClient, - key_and_default: tuple, - bool_field: str, - string_field: str, - int_field: str, - bvalue: bool, - svalue: str, - ivalue: int, - details: str, -) -> FlagEvaluationDetails: - key, default = key_and_default - if details: - evaluation_result = client.get_object_details(key, default) - value = evaluation_result.value - assert_in(bool_field, value) - assert_in(string_field, value) - assert_in(string_field, value) - assert_equal(value[bool_field], bvalue) - assert_equal(value[string_field], svalue) - assert_equal(value[int_field], ivalue) - return evaluation_result - else: - evaluation_result = client.get_object_value(key, default) - assert_in(bool_field, evaluation_result) - assert_in(string_field, evaluation_result) - assert_in(string_field, evaluation_result) - assert_equal(evaluation_result[bool_field], bvalue) - assert_equal(evaluation_result[string_field], svalue) - assert_equal(evaluation_result[int_field], ivalue) - assert_not_equal(evaluation_result, None) - - -@then( - parsers.cfparse( - 'the variant should be "{variant}", and the reason should be "{reason}"', - ) -) -def assert_for_variant_and_reason( - client: OpenFeatureClient, - evaluation_details: FlagEvaluationDetails, - variant: str, - reason: str, -): - assert_equal(evaluation_details.reason, Reason[reason]) - assert_equal(evaluation_details.variant, variant) - - -@then( - parsers.cfparse( - "the default string value should be returned", - ), - target_fixture="evaluation_details", -) -def assert_default_string( - client: OpenFeatureClient, - key_and_default: tuple, - evaluation_context: EvaluationContext, -) -> FlagEvaluationDetails[str]: - key, default = key_and_default - evaluation_result = client.get_string_details(key, default, evaluation_context) - assert_equal(evaluation_result.value, default) - return evaluation_result - - -@then( - parsers.cfparse( - "the default integer value should be returned", - ), - target_fixture="evaluation_details", -) -def assert_default_integer( - client: OpenFeatureClient, - key_and_default: tuple, - evaluation_context: EvaluationContext, -) -> FlagEvaluationDetails[int]: - key, default = key_and_default - evaluation_result = client.get_integer_details(key, default, evaluation_context) - assert_equal(evaluation_result.value, default) - return evaluation_result - - -@then( - parsers.cfparse( - 'the reason should indicate an error and the error code should indicate a missing flag with "{error}"', - ) -) -@then( - parsers.cfparse( - 'the reason should indicate an error and the error code should indicate a type mismatch with "{error}"', - ) -) -def assert_for_error( - client: OpenFeatureClient, - evaluation_details: FlagEvaluationDetails, - error: str, -): - assert_equal(evaluation_details.error_code, ErrorCode[error]) - assert_equal(evaluation_details.reason, Reason.ERROR) - - -@then( - parsers.cfparse( - 'the resolved string details value should be "{expected_value}", the variant should be "{variant}", and the reason should be "{reason}"', - extra_types={"bool": to_bool}, - ) -) -def assert_string_value_with_details( - client: OpenFeatureClient, - key_and_default: tuple, - expected_value: str, - variant: str, - reason: str, - evaluation_context: EvaluationContext, -): - key, default = key_and_default - evaluation_result = client.get_string_details(key, default, evaluation_context) - assert_equal(evaluation_result.value, expected_value) - assert_equal(evaluation_result.reason, reason) - assert_equal(evaluation_result.variant, variant) - - -@then(parsers.cfparse('the returned reason should be "{reason}"')) -def assert_reason( - client: OpenFeatureClient, - key_and_default: tuple, - evaluation_context: EvaluationContext, - reason: str, -): - """the returned reason should be .""" - key, default = key_and_default - evaluation_result = client.get_string_details(key, default, evaluation_context) - assert_equal(evaluation_result.reason, reason) - - -@pytest.fixture() -def event_handles() -> list: - return [] - - -@pytest.fixture() -def error_handles() -> list: - return [] - - -@given( - parsers.cfparse( - "a {event_type:ProviderEvent} handler is added", - extra_types={"ProviderEvent": ProviderEvent}, - ), -) -@when( - parsers.cfparse( - "a {event_type:ProviderEvent} handler is added", - extra_types={"ProviderEvent": ProviderEvent}, - ), -) -def add_event_handler( - client: OpenFeatureClient, event_type: ProviderEvent, event_handles: list -): - def handler(event): - logging.debug((event_type, event)) - event_handles.append( - { - "type": event_type, - "event": event, - } - ) - - client.add_handler(event_type, handler) - - -@pytest.fixture(scope="function") -def context(): - return {} - - -@when( - parsers.cfparse( - "a {event_type:ProviderEvent} handler and a {event_type2:ProviderEvent} handler are added", - extra_types={"ProviderEvent": ProviderEvent}, - ) -) -def add_event_handlers( - client: OpenFeatureClient, - event_type: ProviderEvent, - event_type2: ProviderEvent, - event_handles, - error_handles, -): - add_event_handler(client, event_type, event_handles) - add_event_handler(client, event_type2, error_handles) - - -def assert_handlers( - handles, event_type: ProviderEvent, max_wait: int = 2, num_events: int = 1 -): - poll_interval = 1 - while max_wait > 0: - if sum([h["type"] == event_type for h in handles]) < num_events: - max_wait -= poll_interval - time.sleep(poll_interval) - continue - break - - logging.info(f"asserting num({event_type}) >= {num_events}: {handles}") - actual_num_events = sum([h["type"] == event_type for h in handles]) - assert num_events <= actual_num_events, ( - f"Expected {num_events} but got {actual_num_events}: {handles}" - ) - - -@then( - parsers.cfparse( - "the {event_type:ProviderEvent} handler must run", - extra_types={"ProviderEvent": ProviderEvent}, - ) -) -@then( - parsers.cfparse( - "the {event_type:ProviderEvent} handler must run when the provider connects", - extra_types={"ProviderEvent": ProviderEvent}, - ) -) -def assert_handler_run(event_type: ProviderEvent, event_handles): - assert_handlers(event_handles, event_type, max_wait=30) - - -@then( - parsers.cfparse( - "the {event_type:ProviderEvent} handler must run when the provider's connection is lost", - extra_types={"ProviderEvent": ProviderEvent}, - ) -) -def assert_disconnect_handler(error_handles, event_type: ProviderEvent): - # docker sync upstream restarts every 5s, waiting 2 cycles reduces test noise - assert_handlers(error_handles, event_type, max_wait=30) - - -@when( - parsers.cfparse('a flag with key "{flag_key}" is modified'), - target_fixture="changed_flag", -) -def changed_flag( - flag_key: str, -): - return flag_key - - -@then( - parsers.cfparse( - "when the connection is reestablished the {event_type:ProviderEvent} handler must run again", - extra_types={"ProviderEvent": ProviderEvent}, - ) -) -def assert_disconnect_error( - client: OpenFeatureClient, event_type: ProviderEvent, event_handles: list -): - assert_handlers(event_handles, event_type, max_wait=30, num_events=2) - - -@then(parsers.cfparse('the event details must indicate "{key}" was altered')) -def assert_flag_changed(event_handles, key): - handle = None - for h in event_handles: - if ( - h["type"] == ProviderEvent.PROVIDER_CONFIGURATION_CHANGED - and key in h["event"].flags_changed - ): - handle = h - break - - assert handle is not None - assert key in handle["event"].flags_changed - - -def wait_for(pred, poll_sec=2, timeout_sec=10): - start = time.time() - while not (ok := pred()) and (time.time() - start < timeout_sec): - time.sleep(poll_sec) - assert_true(pred()) - return ok - - -@given("flagd is unavailable", target_fixture="client") -def flagd_unavailable(resolver_type): - api.set_provider( - FlagdProvider(resolver_type=resolver_type, port=99999, retry_grace_period=2), - "unavailable", - ) - return api.get_client("unavailable") - - -@when("a flagd provider is set and initialization is awaited") -def flagd_init(client: OpenFeatureClient, event_handles, error_handles): - add_event_handler(client, ProviderEvent.PROVIDER_ERROR, error_handles) - add_event_handler(client, ProviderEvent.PROVIDER_READY, event_handles) - - -@then("an error should be indicated within the configured deadline") -def flagd_error(error_handles): - assert_handlers(error_handles, ProviderEvent.PROVIDER_ERROR) - - -@when(parsers.cfparse("the connection is lost for {seconds}s")) -def flagd_restart(seconds, container): - def starting(): - container.start() - - container.stop() - threading.Timer(int(seconds), starting).start() - - -@pytest.fixture(autouse=True, scope="module") -def container(request, port, image): - container: DockerContainer = FlagdContainer( - image=image, - port=port, - ) - # Setup code - container = container.start() - - def fin(): - try: - container.stop() - except: # noqa: E722 - logging.debug("container was not running anymore") - - # Teardown code - request.addfinalizer(fin) - - return container diff --git a/providers/openfeature-provider-flagd/tests/e2e/test_in_process_file.py b/providers/openfeature-provider-flagd/tests/e2e/test_in_process_file.py deleted file mode 100644 index 278bd1ea..00000000 --- a/providers/openfeature-provider-flagd/tests/e2e/test_in_process_file.py +++ /dev/null @@ -1,99 +0,0 @@ -import json -import os -import tempfile -from os import listdir - -import pytest -import yaml -from pytest_bdd import given, scenario, scenarios -from tests.e2e.conftest import SPEC_PATH, TEST_HARNESS_PATH -from tests.e2e.steps import wait_for - -from openfeature import api -from openfeature.client import OpenFeatureClient -from openfeature.contrib.provider.flagd import FlagdProvider -from openfeature.contrib.provider.flagd.config import ResolverType -from openfeature.provider import ProviderStatus - -KEY_EVALUATORS = "$evaluators" - -KEY_FLAGS = "flags" - -MERGED_FILE = "merged_file" - - -@pytest.fixture(params=["json", "yaml"], scope="module") -def file_name(request): - extension = request.param - result = {KEY_FLAGS: {}, KEY_EVALUATORS: {}} - - path = os.path.abspath( - os.path.join(os.path.dirname(__file__), f"{TEST_HARNESS_PATH}/flags/") - ) - - for f in listdir(path): - with open(path + "/" + f, "rb") as infile: - loaded_json = json.load(infile) - result[KEY_FLAGS] = {**result[KEY_FLAGS], **loaded_json[KEY_FLAGS]} - if loaded_json.get(KEY_EVALUATORS): - result[KEY_EVALUATORS] = { - **result[KEY_EVALUATORS], - **loaded_json[KEY_EVALUATORS], - } - - with tempfile.NamedTemporaryFile( - "w", delete=False, suffix="." + extension - ) as outfile: - if extension == "json": - json.dump(result, outfile) - else: - yaml.dump(result, outfile) - - return outfile - - -@pytest.fixture(autouse=True, scope="module") -def client_name() -> str: - return "in-process" - - -@pytest.fixture(autouse=True, scope="module") -def resolver_type() -> ResolverType: - return ResolverType.IN_PROCESS - - -@pytest.fixture(autouse=True) -def container(): - pass - - -@pytest.fixture(autouse=True, scope="module") -def setup(request, client_name, file_name, resolver_type): - """nothing to boot""" - api.set_provider( - FlagdProvider( - resolver_type=resolver_type, offline_flag_source_path=file_name.name - ), - client_name, - ) - - -@given("a flagd provider is set", target_fixture="client") -@given("a provider is registered", target_fixture="client") -def setup_provider(client_name) -> OpenFeatureClient: - client = api.get_client(client_name) - wait_for(lambda: client.get_provider_status() == ProviderStatus.READY) - return client - - -@pytest.mark.skip(reason="Eventing not implemented") -@scenario(f"{TEST_HARNESS_PATH}/gherkin/flagd.feature", "Flag change event") -def test_flag_change_event(): - """not implemented""" - - -scenarios( - f"{TEST_HARNESS_PATH}/gherkin/flagd.feature", - f"{TEST_HARNESS_PATH}/gherkin/flagd-json-evaluator.feature", - f"{SPEC_PATH}/specification/assets/gherkin/evaluation.feature", -) diff --git a/providers/openfeature-provider-flagd/tests/e2e/test_rpc.py b/providers/openfeature-provider-flagd/tests/e2e/test_rpc.py deleted file mode 100644 index e3508cf1..00000000 --- a/providers/openfeature-provider-flagd/tests/e2e/test_rpc.py +++ /dev/null @@ -1,33 +0,0 @@ -import pytest -from pytest_bdd import scenarios -from tests.e2e.conftest import SPEC_PATH, TEST_HARNESS_PATH - -from openfeature.contrib.provider.flagd.config import ResolverType - - -@pytest.fixture(autouse=True, scope="module") -def client_name() -> str: - return "rpc" - - -@pytest.fixture(autouse=True, scope="module") -def resolver_type() -> ResolverType: - return ResolverType.RPC - - -@pytest.fixture(autouse=True, scope="module") -def port(): - return 8013 - - -@pytest.fixture(autouse=True, scope="module") -def image(): - return "ghcr.io/open-feature/flagd-testbed" - - -scenarios( - f"{TEST_HARNESS_PATH}/gherkin/flagd.feature", - f"{TEST_HARNESS_PATH}/gherkin/flagd-json-evaluator.feature", - f"{SPEC_PATH}/specification/assets/gherkin/evaluation.feature", - f"{TEST_HARNESS_PATH}/gherkin/flagd-rpc-caching.feature", -) diff --git a/providers/openfeature-provider-flagd/tests/e2e/test_rpc_reconnect.py b/providers/openfeature-provider-flagd/tests/e2e/test_rpc_reconnect.py deleted file mode 100644 index 36f526e2..00000000 --- a/providers/openfeature-provider-flagd/tests/e2e/test_rpc_reconnect.py +++ /dev/null @@ -1,31 +0,0 @@ -import pytest -from pytest_bdd import scenarios -from tests.e2e.conftest import TEST_HARNESS_PATH - -from openfeature.contrib.provider.flagd.config import ResolverType - - -@pytest.fixture(autouse=True, scope="module") -def client_name() -> str: - return "rpc-reconnect" - - -@pytest.fixture(autouse=True, scope="module") -def resolver_type() -> ResolverType: - return ResolverType.RPC - - -@pytest.fixture(autouse=True, scope="module") -def port(): - return 8013 - - -@pytest.fixture(autouse=True, scope="module") -def image(): - return "ghcr.io/open-feature/flagd-testbed-unstable" - - -scenarios( - f"{TEST_HARNESS_PATH}/gherkin/flagd-reconnect.feature", - f"{TEST_HARNESS_PATH}/gherkin/events.feature", -) diff --git a/providers/openfeature-provider-flagd/tests/e2e/test_rpc_ssl.py b/providers/openfeature-provider-flagd/tests/e2e/test_rpc_ssl.py deleted file mode 100644 index 3a3214b3..00000000 --- a/providers/openfeature-provider-flagd/tests/e2e/test_rpc_ssl.py +++ /dev/null @@ -1,68 +0,0 @@ -from pathlib import Path - -import pytest -from pytest_bdd import given, scenarios -from tests.e2e.conftest import SPEC_PATH -from tests.e2e.flagd_container import FlagdContainer -from tests.e2e.steps import wait_for - -from openfeature import api -from openfeature.client import OpenFeatureClient -from openfeature.contrib.provider.flagd import FlagdProvider -from openfeature.contrib.provider.flagd.config import ResolverType -from openfeature.provider import ProviderStatus - - -@pytest.fixture(autouse=True, scope="module") -def client_name() -> str: - return "rpc" - - -@pytest.fixture(autouse=True, scope="module") -def resolver_type() -> ResolverType: - return ResolverType.RPC - - -@pytest.fixture(autouse=True, scope="module") -def port(): - return 8013 - - -@pytest.fixture(autouse=True, scope="module") -def image(): - return "ghcr.io/open-feature/flagd-testbed-ssl" - - -@given("a flagd provider is set", target_fixture="client") -@given("a provider is registered", target_fixture="client") -def setup_provider( - container: FlagdContainer, resolver_type, client_name, port -) -> OpenFeatureClient: - try: - container.get_exposed_port(port) - except: # noqa: E722 - container.start() - - path = ( - Path(__file__).parents[2] / "openfeature/test-harness/ssl/custom-root-cert.crt" - ) - - api.set_provider( - FlagdProvider( - resolver_type=resolver_type, - port=int(container.get_exposed_port(port)), - timeout=1, - retry_grace_period=3, - tls=True, - cert_path=str(path.absolute()), - ), - client_name, - ) - client = api.get_client(client_name) - wait_for(lambda: client.get_provider_status() == ProviderStatus.READY) - return client - - -scenarios( - f"{SPEC_PATH}/specification/assets/gherkin/evaluation.feature", -) diff --git a/providers/openfeature-provider-flagd/tests/e2e/testfilter.py b/providers/openfeature-provider-flagd/tests/e2e/testfilter.py new file mode 100644 index 00000000..8937e153 --- /dev/null +++ b/providers/openfeature-provider-flagd/tests/e2e/testfilter.py @@ -0,0 +1,125 @@ +import logging +import os + + +class TestFilter: + def __init__(self, config, feature_list=None, resolver=None, base_path=None): + """ + Initialize the TestFilter. + + Args: + config: pytest config object. + feature_list: List of tags to include/exclude (e.g., ["rpc", "~slow"]). + resolver: Mode-specific tag to include (e.g., "rpc" or "in-process"). + base_path: Base path to scope filtering; defaults to the current working directory. + """ + self.config = config + self.include_tags, self.exclude_tags = ( + self._parse_tags(feature_list) if feature_list else (set(), set()) + ) + self.resolver = resolver + if resolver: + self.include_tags.add(resolver) + self.base_path = os.path.abspath(base_path) if base_path else os.getcwd() + self.base_path = os.path.abspath(os.path.join(self.base_path, os.pardir)) + + def filter_items(self, items): + """ + Filter collected items based on include/exclude tags and resolver. + + Args: + items: List of pytest test items. + + Returns: + None: Updates the `items` in place by deselecting unwanted tests. + """ + deselected_items = [] + selected_items = [] + + for item in items: + all_tags = self._get_item_tags(item) + + # Debug: Print collected tags for each item + logging.debug(f"Item: {item.nodeid}, Tags: {all_tags}") + + # Include-only logic: Skip items that do not match include_tags + if ( + self.include_tags + and not all_tags.intersection(self.include_tags) + and self._is_in_base_path(item) + ): + deselected_items.append(item) + continue + + # Exclude logic: Skip items that match any exclude_tags + if ( + self.exclude_tags + and all_tags.intersection(self.exclude_tags) + and self._is_in_base_path(item) + ): + deselected_items.append(item) + continue + + selected_items.append(item) + + # Apply deselection + if deselected_items: + self.config.hook.pytest_deselected(items=deselected_items) + items[:] = ( + selected_items # Update the collection to only include selected items + ) + + def _is_in_base_path(self, item): + """ + Check if a test item is within the specified base path. + """ + return os.path.abspath(os.path.join(item.fspath, os.pardir)) == self.base_path + + @staticmethod + def _parse_tags(tags_option): + """ + Parse the tags option to separate include and exclude tags. + """ + include_tags = set() + exclude_tags = set() + + for tag in tags_option: + if tag.startswith("~"): + exclude_tags.add(tag[1:]) + else: + include_tags.add(tag) + + return include_tags, exclude_tags + + @staticmethod + def _get_item_tags(item): + """ + Get all tags (markers) associated with a test item. + """ + tags = set() + if hasattr(item, "iter_markers"): + for marker in item.iter_markers(): # Newer pytest versions + tags.add(marker.name) + elif hasattr(item, "keywords"): + for marker in item.keywords: # Older pytest versions + tags.add(marker) + + scenario = getattr(item, "_obj", None) + if ( + scenario + and hasattr(scenario, "__scenario__") + and hasattr(scenario.__scenario__, "tags") + ): + tags.update(scenario.__scenario__.tags) + + return tags + + @staticmethod + def _get_feature_file(item): + """ + Get the path to the feature file for a given test item. + """ + scenario = getattr(item, "_obj", None) + if scenario and hasattr(scenario, "__scenario__"): + return scenario.__scenario__.feature.filename + return None diff --git a/providers/openfeature-provider-flagd/tests/flags/.gitignore b/providers/openfeature-provider-flagd/tests/flags/.gitignore new file mode 100644 index 00000000..f4b13f83 --- /dev/null +++ b/providers/openfeature-provider-flagd/tests/flags/.gitignore @@ -0,0 +1 @@ +allFlags.json diff --git a/providers/openfeature-provider-flagd/tests/test_config.py b/providers/openfeature-provider-flagd/tests/test_config.py index cc53a029..510009d0 100644 --- a/providers/openfeature-provider-flagd/tests/test_config.py +++ b/providers/openfeature-provider-flagd/tests/test_config.py @@ -1,5 +1,6 @@ import pytest +# not sure if we still need this test, as this is also covered with gherkin tests. from openfeature.contrib.provider.flagd.config import ( DEFAULT_CACHE, DEFAULT_CACHE_SIZE, @@ -20,7 +21,6 @@ ENV_VAR_KEEP_ALIVE_TIME_MS, ENV_VAR_OFFLINE_FLAG_SOURCE_PATH, ENV_VAR_PORT, - ENV_VAR_RESOLVER_TYPE, ENV_VAR_RETRY_BACKOFF_MS, ENV_VAR_STREAM_DEADLINE_MS, ENV_VAR_TLS, @@ -84,7 +84,6 @@ def test_overrides_defaults_with_environment(monkeypatch, resolver_type): # noq monkeypatch.setenv(ENV_VAR_KEEP_ALIVE_TIME_MS, str(keep_alive)) monkeypatch.setenv(ENV_VAR_OFFLINE_FLAG_SOURCE_PATH, offline_path) monkeypatch.setenv(ENV_VAR_PORT, str(port)) - monkeypatch.setenv(ENV_VAR_RESOLVER_TYPE, str(resolver_type.value)) monkeypatch.setenv(ENV_VAR_RETRY_BACKOFF_MS, str(retry_backoff)) monkeypatch.setenv(ENV_VAR_STREAM_DEADLINE_MS, str(stream_deadline)) monkeypatch.setenv(ENV_VAR_TLS, str(tls)) @@ -97,7 +96,6 @@ def test_overrides_defaults_with_environment(monkeypatch, resolver_type): # noq assert config.keep_alive_time == keep_alive assert config.offline_flag_source_path == offline_path assert config.port == port - assert config.resolver == resolver_type assert config.retry_backoff_ms == retry_backoff assert config.stream_deadline_ms == stream_deadline assert config.tls is tls @@ -122,7 +120,6 @@ def test_uses_arguments_over_environments_and_defaults(monkeypatch, resolver_typ monkeypatch.setenv(ENV_VAR_KEEP_ALIVE_TIME_MS, str(keep_alive) + "value") monkeypatch.setenv(ENV_VAR_OFFLINE_FLAG_SOURCE_PATH, offline_path + "value") monkeypatch.setenv(ENV_VAR_PORT, str(port) + "value") - monkeypatch.setenv(ENV_VAR_RESOLVER_TYPE, str(resolver_type) + "value") monkeypatch.setenv(ENV_VAR_RETRY_BACKOFF_MS, str(retry_backoff) + "value") monkeypatch.setenv(ENV_VAR_STREAM_DEADLINE_MS, str(stream_deadline) + "value") monkeypatch.setenv(ENV_VAR_TLS, str(tls) + "value") @@ -147,7 +144,6 @@ def test_uses_arguments_over_environments_and_defaults(monkeypatch, resolver_typ assert config.keep_alive_time == keep_alive assert config.offline_flag_source_path == offline_path assert config.port == port - assert config.resolver == resolver_type assert config.retry_backoff_ms == retry_backoff assert config.stream_deadline_ms == stream_deadline assert config.tls is tls diff --git a/providers/openfeature-provider-flagd/tests/test_file_store.py b/providers/openfeature-provider-flagd/tests/test_file_store.py index 5d07f62e..dea890ec 100644 --- a/providers/openfeature-provider-flagd/tests/test_file_store.py +++ b/providers/openfeature-provider-flagd/tests/test_file_store.py @@ -2,14 +2,14 @@ from unittest.mock import Mock import pytest -from src.openfeature.contrib.provider.flagd.resolvers.process.file_watcher import ( - FileWatcherFlagStore, -) -from src.openfeature.contrib.provider.flagd.resolvers.process.flags import Flag from openfeature import api from openfeature.contrib.provider.flagd import FlagdProvider -from openfeature.provider.provider import AbstractProvider +from openfeature.contrib.provider.flagd.config import Config +from openfeature.contrib.provider.flagd.resolvers.process.connector.file_watcher import ( + FileWatcher, +) +from openfeature.contrib.provider.flagd.resolvers.process.flags import Flag, FlagStore def create_client(provider: FlagdProvider): @@ -24,12 +24,23 @@ def create_client(provider: FlagdProvider): "basic-flag.yaml", ], ) -def test_file_load_errors(file_name: str): - provider = Mock(spec=AbstractProvider) +def test_file_load(file_name: str): + emit_provider_configuration_changed = Mock() + emit_provider_ready = Mock() + emit_provider_error = Mock() + flag_store = FlagStore(emit_provider_configuration_changed) path = os.path.abspath(os.path.join(os.path.dirname(__file__), "./flags/")) - file_store = FileWatcherFlagStore(f"{path}/{file_name}", provider) - - flag = file_store.flag_data.get("basic-flag") + file_watcher = FileWatcher( + Config( + offline_flag_source_path=f"{path}/{file_name}", + ), + flag_store, + emit_provider_ready, + emit_provider_error, + ) + file_watcher.initialize(None) + + flag = flag_store.get_flag("basic-flag") assert flag is not None assert isinstance(flag, Flag)