From 40ace39ad8803a84aae2ffc3b386b3d01c1f59d0 Mon Sep 17 00:00:00 2001 From: Jarkko Jaakola Date: Wed, 8 Jan 2025 12:34:34 +0200 Subject: [PATCH] chore: remove pylint --- .pre-commit-config.yaml | 7 - .pylintrc | 43 ------ container/compose.yml | 3 - .../anonymize_schemas/anonymize_avro.py | 2 +- src/karapace/auth.py | 5 +- src/karapace/avro_dataclasses/introspect.py | 2 +- src/karapace/avro_dataclasses/models.py | 2 - src/karapace/backup/api.py | 2 +- src/karapace/backup/backends/writer.py | 2 - src/karapace/backup/cli.py | 2 +- .../compatibility/jsonschema/checks.py | 2 +- .../compatibility/jsonschema/utils.py | 2 +- .../coordinator/schema_coordinator.py | 2 +- src/karapace/kafka/common.py | 17 +-- src/karapace/karapace_all.py | 2 +- src/karapace/protobuf/encoding_variants.py | 2 +- src/karapace/protobuf/io.py | 7 +- src/karapace/protobuf/known_dependency.py | 2 +- src/karapace/protobuf/message_element.py | 2 +- src/karapace/protobuf/proto_parser.py | 5 +- src/karapace/protobuf/proto_type.py | 2 +- src/karapace/protobuf/protobuf_to_dict.py | 7 +- src/karapace/protobuf/syntax_reader.py | 1 - src/karapace/rapu.py | 20 +-- src/karapace/sentry/sentry_client.py | 2 +- src/karapace/serialization.py | 6 +- src/karapace/statsd.py | 2 +- src/karapace/utils.py | 12 +- src/schema_registry/controller.py | 2 +- src/schema_registry/middlewares/__init__.py | 2 +- src/schema_registry/reader.py | 14 +- .../routers/master_availability.py | 4 +- tests/conftest.py | 2 +- tests/e2e/conftest.py | 4 +- tests/integration/backup/test_v3_backup.py | 2 +- tests/integration/conftest.py | 34 ++--- .../integration/test_schema_compatibility.py | 2 +- tests/integration/test_schema_coordinator.py | 124 +++++++++--------- tests/integration/test_schema_reader.py | 6 +- tests/integration/utils/kafka_server.py | 2 +- tests/integration/utils/zookeeper.py | 2 +- .../anonymize_schemas/test_anonymize_avro.py | 2 - tests/unit/backup/backends/test_v2.py | 4 +- tests/unit/backup/backends/v3/test_backend.py | 2 +- tests/unit/instrumentation/test_prometheus.py | 2 +- .../test_rest_proxy_cluster_metadata_cache.py | 1 - tests/unit/schema_registry/test_controller.py | 6 +- tests/unit/schema_registry/test_reader.py | 8 +- tests/unit/test_avro_compatibility.py | 3 +- tests/unit/test_forwarding_client.py | 8 +- tests/unit/test_rapu.py | 2 +- tests/unit/test_rest_auth.py | 1 - website/source/conf.py | 2 +- 53 files changed, 160 insertions(+), 246 deletions(-) delete mode 100644 .pylintrc diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cfcb550bf..18ba14208 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -40,13 +40,6 @@ repos: # https://github.com/hadolint/hadolint/issues/497 - --ignore=DL3042 -- repo: https://github.com/PyCQA/pylint - # Note: pre-commit autoupdate changes to an alpha version. Instead, manually find the - # latest stable version here: https://github.com/pylint-dev/pylint/releases - rev: v3.2.6 - hooks: - - id: pylint - - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. rev: v0.8.6 diff --git a/.pylintrc b/.pylintrc deleted file mode 100644 index e2c2f6242..000000000 --- a/.pylintrc +++ /dev/null @@ -1,43 +0,0 @@ -[MASTER] -jobs=4 -init-hook='import sys; sys.path.append(".")' - -[MESSAGES CONTROL] -enable= - useless-suppression, - -disable= - duplicate-code, - fixme, - import-outside-toplevel, - invalid-field-call, - invalid-name, - missing-docstring, - too-few-public-methods, - too-many-arguments, - too-many-branches, - too-many-instance-attributes, - too-many-lines, - too-many-locals, - too-many-nested-blocks, - too-many-public-methods, - too-many-statements, - wrong-import-order, - import-error, - consider-using-f-string, - use-implicit-booleaness-not-comparison, - unspecified-encoding, - no-name-in-module, - use-list-literal, - use-dict-literal, - no-value-for-parameter, - relative-beyond-top-level, - - -[FORMAT] -max-line-length=125 - -[REPORTS] -output-format=text -reports=no -score=no diff --git a/container/compose.yml b/container/compose.yml index 263685e2f..f50268096 100644 --- a/container/compose.yml +++ b/container/compose.yml @@ -143,10 +143,7 @@ services: - ../tests:/opt/karapace/tests - ../pytest.ini:/opt/karapace/pytest.ini - ../mypy.ini:/opt/karapace/mypy.ini - - ../.flake8:/opt/karapace/.flake8 - - ../.isort.cfg:/opt/karapace/.isort.cfg - ../.pre-commit-config.yaml:/opt/karapace/.pre-commit-config.yaml - - ../.pylintrc:/opt/karapace/.pylintrc - ../.coveragerc:/opt/karapace/.coveragerc - ../.coverage.3.10:/opt/karapace/coverage/.coverage.3.10 - ../.coverage.3.11:/opt/karapace/coverage/.coverage.3.11 diff --git a/src/karapace/anonymize_schemas/anonymize_avro.py b/src/karapace/anonymize_schemas/anonymize_avro.py index 79d3f5d1a..701fca858 100644 --- a/src/karapace/anonymize_schemas/anonymize_avro.py +++ b/src/karapace/anonymize_schemas/anonymize_avro.py @@ -99,7 +99,7 @@ def anonymize_element(m: re.Match) -> str: def anonymize(input_schema: Schema) -> Schema: - if not input_schema: # pylint: disable=no-else-return + if not input_schema: return input_schema elif isinstance(input_schema, str): if input_schema in ALL_TYPES: diff --git a/src/karapace/auth.py b/src/karapace/auth.py index 9c2a34218..1342aa5b8 100644 --- a/src/karapace/auth.py +++ b/src/karapace/auth.py @@ -115,8 +115,7 @@ class AuthenticatorAndAuthorizer(AuthenticateProtocol, AuthorizeProtocol): async def close(self) -> None: ... - async def start(self, stats: StatsClient) -> None: # pylint: disable=unused-argument - ... + async def start(self, stats: StatsClient) -> None: ... class NoAuthAndAuthz(AuthenticatorAndAuthorizer): @@ -237,7 +236,7 @@ async def _refresh_authfile() -> None: except asyncio.CancelledError: log.info("Closing schema registry ACL refresh task") return - except Exception as ex: # pylint: disable=broad-except + except Exception as ex: log.exception("Schema registry auth file could not be loaded") stats.unexpected_exception(ex=ex, where="schema_registry_authfile_reloader") return diff --git a/src/karapace/avro_dataclasses/introspect.py b/src/karapace/avro_dataclasses/introspect.py index 9eb4ec85f..34b7dbd15 100644 --- a/src/karapace/avro_dataclasses/introspect.py +++ b/src/karapace/avro_dataclasses/introspect.py @@ -57,7 +57,7 @@ def _field_type_array(field: Field, origin: type, type_: object) -> AvroType: sequence_types: Final = frozenset({tuple, list, Sequence}) -def _field_type(field: Field, type_: object) -> AvroType: # pylint: disable=too-many-return-statements +def _field_type(field: Field, type_: object) -> AvroType: # Handle primitives. if type_ is bool: return "boolean" diff --git a/src/karapace/avro_dataclasses/models.py b/src/karapace/avro_dataclasses/models.py index 719871920..720657d7b 100644 --- a/src/karapace/avro_dataclasses/models.py +++ b/src/karapace/avro_dataclasses/models.py @@ -76,8 +76,6 @@ def parse(value: object) -> object: def from_avro_value(type_: object) -> Parser | None: - # pylint: disable=too-many-return-statements - if isinstance(type_, type): if is_dataclass(type_): return partial(from_avro_dict, type_) diff --git a/src/karapace/backup/api.py b/src/karapace/backup/api.py index 7f74d7cb8..e52ccfc54 100644 --- a/src/karapace/backup/api.py +++ b/src/karapace/backup/api.py @@ -171,7 +171,7 @@ def before_sleep(it: RetryCallState) -> None: result = f"failed ({outcome.exception()})" else: result = f"returned {outcome.result()!r}" - LOG.info(f"{description} {result}, retrying... (Ctrl+C to abort)") # pylint: disable=logging-fstring-interpolation + LOG.info(f"{description} {result}, retrying... (Ctrl+C to abort)") return before_sleep diff --git a/src/karapace/backup/backends/writer.py b/src/karapace/backup/backends/writer.py index 78dd330e3..281b20c89 100644 --- a/src/karapace/backup/backends/writer.py +++ b/src/karapace/backup/backends/writer.py @@ -32,8 +32,6 @@ def _noop_context(path: T) -> Iterator[T]: class BackupWriter(Generic[B, F], abc.ABC): """Common interface and base class for all backup writer backends.""" - # pylint: disable=unused-argument - P = TypeVar("P", bound="StdOut | Path") def prepare_location( diff --git a/src/karapace/backup/cli.py b/src/karapace/backup/cli.py index 0f172da8e..47c9290ec 100644 --- a/src/karapace/backup/cli.py +++ b/src/karapace/backup/cli.py @@ -165,7 +165,7 @@ def main() -> None: # TODO: This specific treatment of StaleConsumerError looks quite misplaced # here, and should probably be pushed down into the (internal) API layer. except StaleConsumerError as e: - logger.error( # pylint: disable=logging-fstring-interpolation + logger.error( f"The Kafka consumer did not receive any records for partition {e.topic_partition.partition} of topic " f"{e.topic_partition.topic!r} " f"within the poll timeout ({e.poll_timeout} seconds) while trying to reach offset {e.end_offset:,} " diff --git a/src/karapace/compatibility/jsonschema/checks.py b/src/karapace/compatibility/jsonschema/checks.py index 582c7e8e1..81afd2ec7 100644 --- a/src/karapace/compatibility/jsonschema/checks.py +++ b/src/karapace/compatibility/jsonschema/checks.py @@ -834,7 +834,7 @@ def compatibility_subschemas( location: list[str], ) -> SchemaCompatibilityResult: # https://json-schema.org/draft/2020-12/json-schema-core.html#rfc.section.10 - # pylint: disable=too-many-return-statements + reader_subschemas_and_type = maybe_get_subschemas_and_type(reader_schema) writer_subschemas_and_type = maybe_get_subschemas_and_type(writer_schema) diff --git a/src/karapace/compatibility/jsonschema/utils.py b/src/karapace/compatibility/jsonschema/utils.py index fe1f1f467..2feae5925 100644 --- a/src/karapace/compatibility/jsonschema/utils.py +++ b/src/karapace/compatibility/jsonschema/utils.py @@ -258,7 +258,7 @@ def gt(left: int | None, right: int | None) -> bool: def lt(left: int | None, right: int | None) -> bool: - return gt(right, left) # pylint: disable=arguments-out-of-order + return gt(right, left) def ne(writer: T | None, reader: T | None) -> bool: diff --git a/src/karapace/coordinator/schema_coordinator.py b/src/karapace/coordinator/schema_coordinator.py index a7624380a..e699c0c82 100644 --- a/src/karapace/coordinator/schema_coordinator.py +++ b/src/karapace/coordinator/schema_coordinator.py @@ -583,7 +583,7 @@ async def ensure_coordinator_known(self) -> None: async def _coordination_routine(self) -> None: try: await self.__coordination_routine() - except asyncio.CancelledError: # pylint: disable=try-except-raise + except asyncio.CancelledError: raise except Exception as exc: LOG.error("Unexpected error in coordinator routine", exc_info=True) diff --git a/src/karapace/kafka/common.py b/src/karapace/kafka/common.py index fd5441d16..6662d2678 100644 --- a/src/karapace/kafka/common.py +++ b/src/karapace/kafka/common.py @@ -50,16 +50,16 @@ def translate_from_kafkaerror(error: KafkaError) -> Exception: """ code = error.code() if code in ( - KafkaError._NOENT, # pylint: disable=protected-access - KafkaError._UNKNOWN_PARTITION, # pylint: disable=protected-access - KafkaError._UNKNOWN_TOPIC, # pylint: disable=protected-access + KafkaError._NOENT, + KafkaError._UNKNOWN_PARTITION, + KafkaError._UNKNOWN_TOPIC, ): return UnknownTopicOrPartitionError() - if code == KafkaError._TIMED_OUT: # pylint: disable=protected-access + if code == KafkaError._TIMED_OUT: return KafkaTimeoutError() - if code == KafkaError._STATE: # pylint: disable=protected-access + if code == KafkaError._STATE: return IllegalStateError() - if code == KafkaError._RESOLVE: # pylint: disable=protected-access + if code == KafkaError._RESOLVE: return KafkaUnavailableError() return for_code(code) @@ -207,10 +207,7 @@ def _verify_connection(self) -> None: # to the callback function defined in the `error_cb` config self._activate_callbacks() self.log.info("Could not establish connection due to errors: %s", self._errors) - if any( - error.code() == KafkaError._AUTHENTICATION - for error in self._errors # pylint: disable=protected-access - ): + if any(error.code() == KafkaError._AUTHENTICATION for error in self._errors): raise AuthenticationFailedError() from exc continue else: diff --git a/src/karapace/karapace_all.py b/src/karapace/karapace_all.py index 3f243688a..0c9acf253 100644 --- a/src/karapace/karapace_all.py +++ b/src/karapace/karapace_all.py @@ -35,7 +35,7 @@ def main( try: prometheus.setup_metrics(app=app) app.run() # `close` will be called by the callback `close_by_app` set by `KarapaceBase` - except Exception as ex: # pylint: disable-broad-except + except Exception as ex: app.stats.unexpected_exception(ex=ex, where="karapace") raise return 0 diff --git a/src/karapace/protobuf/encoding_variants.py b/src/karapace/protobuf/encoding_variants.py index ba1e24232..736e02c99 100644 --- a/src/karapace/protobuf/encoding_variants.py +++ b/src/karapace/protobuf/encoding_variants.py @@ -37,7 +37,7 @@ def read_indexes(bio: BytesIO) -> list[int]: size: int = read_varint(bio) except EOFError: # TODO: change exception - # pylint: disable=raise-missing-from + raise IllegalArgumentException("problem with reading binary data") if size == 0: return [0] diff --git a/src/karapace/protobuf/io.py b/src/karapace/protobuf/io.py index ee22dacc1..1c0385001 100644 --- a/src/karapace/protobuf/io.py +++ b/src/karapace/protobuf/io.py @@ -43,7 +43,6 @@ def find_message_name(schema: ProtobufSchema, indexes: Iterable[int]) -> str: try: message = types[index] except IndexError: - # pylint: disable=raise-missing-from raise IllegalArgumentException(f"Invalid message indexes: {indexes}") if message and isinstance(message, MessageElement): @@ -185,7 +184,7 @@ def reader_process( reader_queue.put(protobuf_to_dict(read_data(config, writer_schema, reader_schema, bio), True)) # Reading happens in the forked process, catch is broad so exception will get communicated # back to calling process. - except BaseException as base_exception: # pylint: disable=broad-except + except BaseException as base_exception: reader_queue.put(base_exception) @@ -260,13 +259,13 @@ def writer_process( writer_queue.put(result) # Writing happens in the forked process, catch is broad so exception will get communicated # back to calling process. - except Exception as bare_exception: # pylint: disable=broad-exception-caught + except Exception as bare_exception: try: raise ProtobufTypeException(writer_schema, datum) from bare_exception except ProtobufTypeException as protobuf_exception: writer_queue.put(protobuf_exception) raise protobuf_exception - except BaseException as base_exception: # pylint: disable=broad-exception-caught + except BaseException as base_exception: writer_queue.put(base_exception) diff --git a/src/karapace/protobuf/known_dependency.py b/src/karapace/protobuf/known_dependency.py index bb250707b..a42576d52 100644 --- a/src/karapace/protobuf/known_dependency.py +++ b/src/karapace/protobuf/known_dependency.py @@ -16,7 +16,7 @@ def static_init(cls: Any) -> object: return cls -@static_init # pylint: disable=used-before-assignment +@static_init class KnownDependency: index: dict = dict() index_simple: dict = dict() diff --git a/src/karapace/protobuf/message_element.py b/src/karapace/protobuf/message_element.py index 79538231d..1f516ba6e 100644 --- a/src/karapace/protobuf/message_element.py +++ b/src/karapace/protobuf/message_element.py @@ -92,7 +92,7 @@ def to_schema(self) -> str: return "".join(result) def compare(self, other: TypeElement, result: CompareResult, types: CompareTypes) -> None: - from karapace.protobuf.compare_type_lists import compare_type_lists # pylint: disable=cyclic-import + from karapace.protobuf.compare_type_lists import compare_type_lists if not isinstance(other, MessageElement): result.add_modification(Modification.TYPE_ALTER) diff --git a/src/karapace/protobuf/proto_parser.py b/src/karapace/protobuf/proto_parser.py index 11e5fdf63..63f2c4f2c 100644 --- a/src/karapace/protobuf/proto_parser.py +++ b/src/karapace/protobuf/proto_parser.py @@ -169,7 +169,6 @@ def read_declaration( | GroupElement | FieldElement ) = None - # pylint no-else-return if label == "package" and context.permits_package(): self.package_name = self.reader.read_name() self.prefix = f"{self.package_name}." @@ -504,7 +503,7 @@ def read_reserved(self, location: Location, documentation: str) -> ReservedEleme values.append(KotlinRange(tag_start, tag_end)) ch = self.reader.read_char() - # pylint: disable=no-else-break + if ch == ";": break elif ch == ",": @@ -543,7 +542,7 @@ def read_extensions(self, location: Location, documentation: str) -> ExtensionsE values.append(KotlinRange(start, end)) ch = self.reader.read_char() - # pylint: disable=no-else-break + if ch == ";": break elif ch == ",": diff --git a/src/karapace/protobuf/proto_type.py b/src/karapace/protobuf/proto_type.py index 154cc18e7..a41d955f0 100644 --- a/src/karapace/protobuf/proto_type.py +++ b/src/karapace/protobuf/proto_type.py @@ -20,7 +20,7 @@ def static_init(cls) -> object: return cls -@static_init # pylint: disable=used-before-assignment +@static_init class ProtoType: @property def simple_name(self) -> str: diff --git a/src/karapace/protobuf/protobuf_to_dict.py b/src/karapace/protobuf/protobuf_to_dict.py index 8d2845f3a..20d11a891 100644 --- a/src/karapace/protobuf/protobuf_to_dict.py +++ b/src/karapace/protobuf/protobuf_to_dict.py @@ -33,8 +33,6 @@ def timestamp_to_datetime(ts): return dt -# pylint: enable=no-member - EXTENSION_CONTAINER = "___X" TYPE_CALLABLE_MAP = MappingProxyType( @@ -209,14 +207,12 @@ def _get_field_mapping(pb, dict_value, strict): try: ext_num = int(ext_num) except ValueError: - # pylint: disable=raise-missing-from raise ValueError("Extension keys must be integers.") - # pylint: disable=protected-access + if ext_num not in pb._extensions_by_number: if strict: raise KeyError(f"{pb} does not have a extension with number {key}. Perhaps you forgot to import it?") continue - # pylint: disable=protected-access ext_field = pb._extensions_by_number[ext_num] # noinspection PyUnusedLocal @@ -310,7 +306,6 @@ def _string_to_enum(field, input_value, strict=False): input_value = field.enum_type.values_by_name[input_value].number except KeyError: if strict: - # pylint: disable=raise-missing-from raise KeyError(f"`{input_value}` is not a valid value for field `{field.name}`") return _string_to_enum(field, input_value.upper(), strict=True) return input_value diff --git a/src/karapace/protobuf/syntax_reader.py b/src/karapace/protobuf/syntax_reader.py index 99ecaba79..8d5897c2d 100644 --- a/src/karapace/protobuf/syntax_reader.py +++ b/src/karapace/protobuf/syntax_reader.py @@ -224,7 +224,6 @@ def read_comment(self) -> str: buffer = [] start_of_line = True while self.pos + 1 < len(self.data): - # pylint: disable=no-else-break c: str = self.data[self.pos] if c == "*" and self.data[self.pos + 1] == "/": self.pos += 2 diff --git a/src/karapace/rapu.py b/src/karapace/rapu.py index f31e8aab9..34d6e697c 100644 --- a/src/karapace/rapu.py +++ b/src/karapace/rapu.py @@ -20,7 +20,7 @@ import aiohttp.web import aiohttp.web_exceptions import asyncio -import cgi # pylint: disable=deprecated-module +import cgi import hashlib import logging import re @@ -176,7 +176,7 @@ def _create_aiohttp_application(self, *, config: Config) -> aiohttp.web.Applicat return aiohttp.web.Application(client_max_size=config.http_request_max_size) return aiohttp.web.Application() - async def close_by_app(self, app: aiohttp.web.Application) -> None: # pylint: disable=unused-argument + async def close_by_app(self, app: aiohttp.web.Application) -> None: await self.close() async def close(self) -> None: @@ -191,7 +191,7 @@ async def close(self) -> None: self.stats.close() @staticmethod - def cors_and_server_headers_for_request(*, request, origin="*"): # pylint: disable=unused-argument + def cors_and_server_headers_for_request(*, request, origin="*"): return { "Access-Control-Allow-Origin": origin, "Access-Control-Allow-Methods": "DELETE, GET, OPTIONS, POST, PUT", @@ -292,17 +292,11 @@ async def _handle_request( body_string = body.decode(charset) rapu_request.json = json_decode(body_string) except UnicodeDecodeError: - raise HTTPResponse( # pylint: disable=raise-missing-from - body=f"Request body is not valid {charset}", status=HTTPStatus.BAD_REQUEST - ) + raise HTTPResponse(body=f"Request body is not valid {charset}", status=HTTPStatus.BAD_REQUEST) except LookupError: - raise HTTPResponse( # pylint: disable=raise-missing-from - body=f"Unknown charset {charset}", status=HTTPStatus.BAD_REQUEST - ) + raise HTTPResponse(body=f"Unknown charset {charset}", status=HTTPStatus.BAD_REQUEST) except ValueError: - raise HTTPResponse( # pylint: disable=raise-missing-from - body="Invalid request JSON body", status=HTTPStatus.BAD_REQUEST - ) + raise HTTPResponse(body="Invalid request JSON body", status=HTTPStatus.BAD_REQUEST) # Prevent string, int etc. going further from here if not isinstance(rapu_request.json, dict): @@ -404,7 +398,7 @@ async def _handle_request( self.log.debug(error_msg, exc_info=exc) # No response can be returned and written to client, aiohttp expects some response here. resp = aiohttp.web.Response(text=error_msg, status=HTTPStatus.SERVICE_UNAVAILABLE.value) - except Exception as ex: # pylint: disable=broad-except + except Exception as ex: self.stats.unexpected_exception(ex=ex, where="rapu_wrapped_callback") self.log.exception("Unexpected error handling user request: %s %s", request.method, request.url) resp = aiohttp.web.Response(text="Internal Server Error", status=HTTPStatus.INTERNAL_SERVER_ERROR.value) diff --git a/src/karapace/sentry/sentry_client.py b/src/karapace/sentry/sentry_client.py index 3ef328132..63e8e529d 100644 --- a/src/karapace/sentry/sentry_client.py +++ b/src/karapace/sentry/sentry_client.py @@ -8,7 +8,7 @@ from collections.abc import Mapping from karapace.sentry.sentry_client_api import KarapaceSentryConfig, SentryClientAPI -# The Sentry SDK is optional, omit pylint import error +# The Sentry SDK is optional import sentry_sdk diff --git a/src/karapace/serialization.py b/src/karapace/serialization.py index 987fe6646..80aa7a96f 100644 --- a/src/karapace/serialization.py +++ b/src/karapace/serialization.py @@ -71,16 +71,16 @@ class InvalidRecord(Exception): def topic_name_strategy( topic_name: str, - record_name: str | None, # pylint: disable=unused-argument + record_name: str | None, subject_type: SubjectType, ) -> Subject: return Subject(f"{topic_name}-{subject_type}") def record_name_strategy( - topic_name: str, # pylint: disable=unused-argument, + topic_name: str, record_name: str | None, - subject_type: SubjectType, # pylint: disable=unused-argument + subject_type: SubjectType, ) -> Subject: if record_name is None: raise InvalidRecord( diff --git a/src/karapace/statsd.py b/src/karapace/statsd.py index 4342ca29d..3215374f3 100644 --- a/src/karapace/statsd.py +++ b/src/karapace/statsd.py @@ -83,7 +83,7 @@ def _send(self, metric: str, metric_type: bytes, value: Any, tags: dict | None) parts.insert(1, f",{tag}={tag_value}".encode()) self._socket.sendto(b"".join(parts), self._dest_addr) - except Exception: # pylint: disable=broad-except + except Exception: LOG.exception("Unexpected exception in statsd send") def close(self) -> None: diff --git a/src/karapace/utils.py b/src/karapace/utils.py index 22eb98182..ec9232ceb 100644 --- a/src/karapace/utils.py +++ b/src/karapace/utils.py @@ -25,11 +25,11 @@ import time if importlib.util.find_spec("ujson"): - from ujson import JSONDecodeError # noqa: F401 pylint: disable=unused-import, useless-suppression + from ujson import JSONDecodeError # noqa: F401 import ujson as json else: - from json import JSONDecodeError # noqa: F401 pylint: disable=unused-import, useless-suppression + from json import JSONDecodeError # noqa: F401 import json @@ -134,7 +134,7 @@ def json_decode( content: AnyStr | IO[AnyStr], # This argument is only used to pass onto cast() via a type var, it has no runtime # usage. - assume_type: type[T] | None = None, # pylint: disable=unused-argument + assume_type: type[T] | None = None, ) -> JsonData | T: if isinstance(content, (str, bytes)): return cast("T | None", json.loads(content)) @@ -197,7 +197,7 @@ def convert_to_int(object_: dict, key: str, content_type: str) -> None: try: object_[key] = int(object_[key]) except ValueError: - from karapace.rapu import http_error # pylint: disable=cyclic-import + from karapace.rapu import http_error http_error( message=f"{key} is not a valid int: {object_[key]}", @@ -216,7 +216,7 @@ def log( self, request: BaseRequest, response: StreamResponse, - time: float, # pylint: disable=redefined-outer-name + time: float, ) -> None: try: fmt_info = self._format_line(request, response, time) @@ -235,7 +235,7 @@ def log( extra[k1] = dct self.logger.debug(self._log_format % tuple(values), extra=extra) - except Exception: # pylint: disable=broad-except + except Exception: self.logger.exception("Error in logging") diff --git a/src/schema_registry/controller.py b/src/schema_registry/controller.py index fe658cd10..d8924d9a7 100644 --- a/src/schema_registry/controller.py +++ b/src/schema_registry/controller.py @@ -813,7 +813,7 @@ async def subject_post( except (InvalidReferences, InvalidSchema, InvalidSchemaType) as exc: LOG.warning("Invalid schema: %r", schema_request.schema_str, exc_info=True) if isinstance(exc.__cause__, (SchemaParseException, JSONDecodeError, ProtobufUnresolvedDependencyException)): - human_error = f"{exc.__cause__.args[0]}" # pylint: disable=no-member + human_error = f"{exc.__cause__.args[0]}" else: from_body_schema_str = schema_request.schema_str human_error = ( diff --git a/src/schema_registry/middlewares/__init__.py b/src/schema_registry/middlewares/__init__.py index d86fbf40e..3f137b970 100644 --- a/src/schema_registry/middlewares/__init__.py +++ b/src/schema_registry/middlewares/__init__.py @@ -27,7 +27,7 @@ async def set_content_types(request: Request, call_next: Callable[[Request], Awa if request.headers.get("Content-Type") == "application/octet-stream": new_headers = request.headers.mutablecopy() new_headers["Content-Type"] = "application/json" - request._headers = new_headers # pylint: disable=protected-access + request._headers = new_headers request.scope.update(headers=request.headers.raw) response = await call_next(request) diff --git a/src/schema_registry/reader.py b/src/schema_registry/reader.py index 6535729cb..dfe5a547c 100644 --- a/src/schema_registry/reader.py +++ b/src/schema_registry/reader.py @@ -207,7 +207,7 @@ def run(self) -> None: LOG.warning("[Admin Client] Invalid configuration. Bailing") self._stop_schema_reader.set() raise - except Exception as e: # pylint: disable=broad-except + except Exception as e: LOG.exception("[Admin Client] Unexpected exception. Retrying") self.stats.unexpected_exception(ex=e, where="admin_client_instantiation") self._stop_schema_reader.wait(timeout=KAFKA_CLIENT_CREATION_TIMEOUT_SECONDS) @@ -225,7 +225,7 @@ def run(self) -> None: LOG.warning("[Consumer] Invalid configuration. Bailing") self._stop_schema_reader.set() raise - except Exception as e: # pylint: disable=broad-except + except Exception as e: LOG.exception("[Consumer] Unexpected exception. Retrying") self.stats.unexpected_exception(ex=e, where="consumer_instantiation") self._stop_schema_reader.wait(timeout=KAFKA_CLIENT_CREATION_TIMEOUT_SECONDS) @@ -271,7 +271,7 @@ def run(self) -> None: except KafkaUnavailableError: self.consecutive_unexpected_errors += 1 LOG.warning("Kafka cluster is unavailable or broker can't be resolved.") - except Exception as e: # pylint: disable=broad-except + except Exception as e: self.stats.unexpected_exception(ex=e, where="schema_reader_loop") self.consecutive_unexpected_errors += 1 if self.consecutive_unexpected_errors == 1: @@ -301,7 +301,7 @@ async def is_healthy(self) -> bool: topic = self.config.topic_name res = self.admin_client.describe_topics(TopicCollection([topic])) await asyncio.wrap_future(res[topic]) - except Exception as e: # pylint: disable=broad-except + except Exception as e: LOG.warning("Health check failed with %r", e) return False @@ -324,7 +324,7 @@ def _get_beginning_offset(self) -> int: LOG.warning("Topic does not yet exist.") except (LeaderNotAvailableError, NotLeaderForPartitionError): LOG.warning("Retrying to find leader for schema topic partition.") - except Exception as e: # pylint: disable=broad-except + except Exception as e: self.stats.unexpected_exception(ex=e, where="_get_beginning_offset") LOG.exception("Unexpected exception when reading begin offsets.") return OFFSET_UNINITIALIZED @@ -347,7 +347,7 @@ def _is_ready(self) -> bool: except (LeaderNotAvailableError, NotLeaderForPartitionError): LOG.warning("Retrying to find leader for schema topic partition.") return False - except Exception as e: # pylint: disable=broad-except + except Exception as e: self.stats.unexpected_exception(ex=e, where="_is_ready") LOG.exception("Unexpected exception when reading end offsets.") return False @@ -570,7 +570,7 @@ def _handle_msg_config(self, key: dict, value: dict | None) -> None: LOG.info("Setting global config to: %r, value: %r", value["compatibilityLevel"], value) self.config.compatibility = value["compatibilityLevel"] - def _handle_msg_delete_subject(self, key: dict, value: dict | None) -> None: # pylint: disable=unused-argument + def _handle_msg_delete_subject(self, key: dict, value: dict | None) -> None: if value is None: LOG.warning("DELETE_SUBJECT record does not have a value, should have") raise ValueError("DELETE_SUBJECT record does not have a value, should have") diff --git a/src/schema_registry/routers/master_availability.py b/src/schema_registry/routers/master_availability.py index 02d072afd..60ac6e30d 100644 --- a/src/schema_registry/routers/master_availability.py +++ b/src/schema_registry/routers/master_availability.py @@ -46,8 +46,8 @@ async def master_availability( if ( schema_registry.schema_reader.master_coordinator is not None - and schema_registry.schema_reader.master_coordinator._sc is not None # pylint: disable=protected-access - and schema_registry.schema_reader.master_coordinator._sc.is_master_assigned_to_myself() # pylint: disable=protected-access + and schema_registry.schema_reader.master_coordinator._sc is not None + and schema_registry.schema_reader.master_coordinator._sc.is_master_assigned_to_myself() ): return MasterAvailabilityResponse(master_available=are_we_master) diff --git a/tests/conftest.py b/tests/conftest.py index 6d54c4a6a..28f565b30 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -67,7 +67,7 @@ def split_by_comma(arg: str) -> list[str]: return arg.split(",") -def pytest_addoption(parser, pluginmanager) -> None: # pylint: disable=unused-argument +def pytest_addoption(parser, pluginmanager) -> None: # Configuration options for the services started by the test suite parser.addoption( KAFKA_VERION_OPT, diff --git a/tests/e2e/conftest.py b/tests/e2e/conftest.py index f99d8c71a..eee282b42 100644 --- a/tests/e2e/conftest.py +++ b/tests/e2e/conftest.py @@ -96,7 +96,7 @@ async def fixture_asyncconsumer( @pytest.fixture(scope="function", name="registry_cluster") async def fixture_registry_cluster( karapace_container: KarapaceContainer, - loop: asyncio.AbstractEventLoop, # pylint: disable=unused-argument + loop: asyncio.AbstractEventLoop, ) -> RegistryDescription: protocol = "http" endpoint = RegistryEndpoint( @@ -110,7 +110,7 @@ async def fixture_registry_async_client( request: SubRequest, basic_auth: BasicAuth, registry_cluster: RegistryDescription, - loop: asyncio.AbstractEventLoop, # pylint: disable=unused-argument + loop: asyncio.AbstractEventLoop, ) -> AsyncGenerator[Client, None]: client = Client( server_uri=registry_cluster.endpoint.to_url(), diff --git a/tests/integration/backup/test_v3_backup.py b/tests/integration/backup/test_v3_backup.py index f7c2b8dee..0bf4b53cf 100644 --- a/tests/integration/backup/test_v3_backup.py +++ b/tests/integration/backup/test_v3_backup.py @@ -653,7 +653,7 @@ class FailToSendProducer(KafkaProducer): def send(self, *args, **kwargs): raise BufferError() - def poll(self, timeout: float) -> None: # pylint: disable=unused-argument + def poll(self, timeout: float) -> None: return class FailToSendProducerContext: diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 057546b8a..10386e832 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -266,7 +266,7 @@ async def fixture_asyncconsumer( @pytest.fixture(scope="function", name="rest_async") async def fixture_rest_async( request: SubRequest, - loop: asyncio.AbstractEventLoop, # pylint: disable=unused-argument + loop: asyncio.AbstractEventLoop, kafka_servers: KafkaServers, registry_async_client: Client, ) -> AsyncIterator[KafkaRest | None]: @@ -298,7 +298,7 @@ async def fixture_rest_async( @pytest.fixture(scope="function", name="rest_async_client") async def fixture_rest_async_client( request: SubRequest, - loop: asyncio.AbstractEventLoop, # pylint: disable=unused-argument + loop: asyncio.AbstractEventLoop, rest_async: KafkaRest, aiohttp_client: AiohttpClient, ) -> AsyncIterator[Client]: @@ -309,7 +309,7 @@ async def fixture_rest_async_client( client = Client(server_uri=rest_url) else: - async def get_client(**kwargs) -> TestClient: # pylint: disable=unused-argument + async def get_client(**kwargs) -> TestClient: return await aiohttp_client(rest_async.app) client = Client(client_factory=get_client) @@ -333,7 +333,7 @@ async def get_client(**kwargs) -> TestClient: # pylint: disable=unused-argument @pytest.fixture(scope="function", name="rest_async_novalidation") async def fixture_rest_async_novalidation( request: SubRequest, - loop: asyncio.AbstractEventLoop, # pylint: disable=unused-argument + loop: asyncio.AbstractEventLoop, kafka_servers: KafkaServers, registry_async_client: Client, ) -> AsyncIterator[KafkaRest | None]: @@ -366,7 +366,7 @@ async def fixture_rest_async_novalidation( @pytest.fixture(scope="function", name="rest_async_novalidation_client") async def fixture_rest_async_novalidationclient( request: SubRequest, - loop: asyncio.AbstractEventLoop, # pylint: disable=unused-argument + loop: asyncio.AbstractEventLoop, rest_async_novalidation: KafkaRest, aiohttp_client: AiohttpClient, ) -> AsyncIterator[Client]: @@ -377,7 +377,7 @@ async def fixture_rest_async_novalidationclient( client = Client(server_uri=rest_url) else: - async def get_client(**kwargs) -> TestClient: # pylint: disable=unused-argument + async def get_client(**kwargs) -> TestClient: return await aiohttp_client(rest_async_novalidation.app) client = Client(client_factory=get_client) @@ -401,7 +401,7 @@ async def get_client(**kwargs) -> TestClient: # pylint: disable=unused-argument @pytest.fixture(scope="function", name="rest_async_registry_auth") async def fixture_rest_async_registry_auth( request: SubRequest, - loop: asyncio.AbstractEventLoop, # pylint: disable=unused-argument + loop: asyncio.AbstractEventLoop, kafka_servers: KafkaServers, registry_async_client_auth: Client, ) -> AsyncIterator[KafkaRest | None]: @@ -434,7 +434,7 @@ async def fixture_rest_async_registry_auth( @pytest.fixture(scope="function", name="rest_async_client_registry_auth") async def fixture_rest_async_client_registry_auth( request: SubRequest, - loop: asyncio.AbstractEventLoop, # pylint: disable=unused-argument + loop: asyncio.AbstractEventLoop, rest_async_registry_auth: KafkaRest, aiohttp_client: AiohttpClient, ) -> AsyncIterator[Client]: @@ -445,7 +445,7 @@ async def fixture_rest_async_client_registry_auth( client = Client(server_uri=rest_url) else: - async def get_client(**kwargs) -> TestClient: # pylint: disable=unused-argument + async def get_client(**kwargs) -> TestClient: return await aiohttp_client(rest_async_registry_auth.app) client = Client(client_factory=get_client) @@ -469,7 +469,7 @@ async def get_client(**kwargs) -> TestClient: # pylint: disable=unused-argument @pytest.fixture(scope="function", name="registry_async_pair") async def fixture_registry_async_pair( request: SubRequest, - loop: asyncio.AbstractEventLoop, # pylint: disable=unused-argument + loop: asyncio.AbstractEventLoop, session_logdir: Path, kafka_servers: KafkaServers, ) -> AsyncIterator[list[str]]: @@ -491,7 +491,7 @@ async def fixture_registry_async_pair( @pytest.fixture(scope="function", name="registry_cluster") async def fixture_registry_cluster( request: SubRequest, - loop: asyncio.AbstractEventLoop, # pylint: disable=unused-argument + loop: asyncio.AbstractEventLoop, session_logdir: Path, kafka_servers: KafkaServers, ) -> AsyncIterator[RegistryDescription]: @@ -522,7 +522,7 @@ async def fixture_registry_cluster( async def fixture_registry_async_client( request: SubRequest, registry_cluster: RegistryDescription, - loop: asyncio.AbstractEventLoop, # pylint: disable=unused-argument + loop: asyncio.AbstractEventLoop, ) -> AsyncGenerator[Client, None]: client = Client( server_uri=registry_cluster.endpoint.to_url(), @@ -576,7 +576,7 @@ def fixture_server_key(credentials_folder: str) -> str: @pytest.fixture(scope="function", name="registry_https_endpoint") async def fixture_registry_https_endpoint( request: SubRequest, - loop: asyncio.AbstractEventLoop, # pylint: disable=unused-argument + loop: asyncio.AbstractEventLoop, session_logdir: Path, kafka_servers: KafkaServers, server_cert: str, @@ -605,7 +605,7 @@ async def fixture_registry_https_endpoint( @pytest.fixture(scope="function", name="registry_async_client_tls") async def fixture_registry_async_client_tls( - loop: asyncio.AbstractEventLoop, # pylint: disable=unused-argument + loop: asyncio.AbstractEventLoop, registry_https_endpoint: str, server_ca: str, ) -> AsyncIterator[Client]: @@ -635,7 +635,7 @@ async def fixture_registry_async_client_tls( @pytest.fixture(scope="function", name="registry_http_auth_endpoint") async def fixture_registry_http_auth_endpoint( request: SubRequest, - loop: asyncio.AbstractEventLoop, # pylint: disable=unused-argument + loop: asyncio.AbstractEventLoop, session_logdir: Path, kafka_servers: KafkaServers, ) -> AsyncIterator[str]: @@ -662,7 +662,7 @@ async def fixture_registry_http_auth_endpoint( @pytest.fixture(scope="function", name="registry_async_client_auth") async def fixture_registry_async_client_auth( request: SubRequest, - loop: asyncio.AbstractEventLoop, # pylint: disable=unused-argument + loop: asyncio.AbstractEventLoop, registry_http_auth_endpoint: str, ) -> AsyncIterator[Client]: client = Client( @@ -695,7 +695,7 @@ async def fixture_registry_async_retry_client_auth(registry_async_client_auth: C @pytest.fixture(scope="function", name="registry_async_auth_pair") async def fixture_registry_async_auth_pair( request: SubRequest, - loop: asyncio.AbstractEventLoop, # pylint: disable=unused-argument + loop: asyncio.AbstractEventLoop, session_logdir: Path, kafka_servers: KafkaServers, ) -> AsyncIterator[list[str]]: diff --git a/tests/integration/test_schema_compatibility.py b/tests/integration/test_schema_compatibility.py index cf8c8fa01..3320c3a4c 100644 --- a/tests/integration/test_schema_compatibility.py +++ b/tests/integration/test_schema_compatibility.py @@ -102,7 +102,7 @@ async def _register_baseline_schemas_with_incompatibilities_and_a_deleted_schema async def _register_no_baseline_schemas( registry_async_client: Client, - subject: Subject, # pylint: disable=unused-argument + subject: Subject, ) -> None: pass diff --git a/tests/integration/test_schema_coordinator.py b/tests/integration/test_schema_coordinator.py index 2d8e2bae7..08839d667 100644 --- a/tests/integration/test_schema_coordinator.py +++ b/tests/integration/test_schema_coordinator.py @@ -55,7 +55,7 @@ def fixture_mocked_aiokafka_client() -> Iterator[AIOKafkaClient]: @pytest.fixture(scope="function", name="coordinator") async def fixture_admin( loop: asyncio.AbstractEventLoop, - mocked_client: AIOKafkaClient, # pylint: disable=unused-argument + mocked_client: AIOKafkaClient, ) -> AsyncGenerator: coordinator = SchemaCoordinator( mocked_client, @@ -87,7 +87,7 @@ async def _get_client(kafka_servers: KafkaServers) -> AIOKafkaClient: @pytest.fixture(scope="function", name="client") async def get_client( loop: asyncio.AbstractEventLoop, - kafka_servers: KafkaServers, # pylint: disable=unused-argument + kafka_servers: KafkaServers, ) -> AsyncGenerator: client = await _get_client(kafka_servers) yield client @@ -218,14 +218,14 @@ async def test_coordinator_workflow( async def test_failed_group_join(mocked_client: AIOKafkaClient, coordinator: SchemaCoordinator) -> None: coordinator.start() - assert coordinator._coordination_task is not None # pylint: disable=protected-access - assert coordinator._client is not None # pylint: disable=protected-access + assert coordinator._coordination_task is not None + assert coordinator._client is not None # disable for test - coordinator._coordination_task.cancel() # pylint: disable=protected-access + coordinator._coordination_task.cancel() with contextlib.suppress(asyncio.CancelledError): - await coordinator._coordination_task # pylint: disable=protected-access - coordinator._coordination_task = create_task(asyncio.sleep(0.1)) # pylint: disable=protected-access + await coordinator._coordination_task + coordinator._coordination_task = create_task(asyncio.sleep(0.1)) coordinator.coordinator_id = 15 async def _on_join_leader_test(_: Response) -> bytes | None: @@ -239,13 +239,13 @@ async def do_rebalance() -> Assignment | None: coordinator, coordinator.group_id, coordinator.coordinator_id, - coordinator._session_timeout_ms, # pylint: disable=protected-access - coordinator._retry_backoff_ms, # pylint: disable=protected-access + coordinator._session_timeout_ms, + coordinator._retry_backoff_ms, ) - rebalance._on_join_leader = _on_join_leader_mock # pylint: disable=protected-access + rebalance._on_join_leader = _on_join_leader_mock return await rebalance.perform_group_join() - coordinator._client.api_version = (0, 10, 1) # pylint: disable=protected-access + coordinator._client.api_version = (0, 10, 1) error_type = Errors.NoError async def send(*_, **__) -> JoinGroupRequest: @@ -296,7 +296,7 @@ async def send(*_, **__) -> JoinGroupRequest: assert coordinator.need_rejoin() assert coordinator.coordinator_id is None coordinator.coordinator_id = 15 - coordinator._coordinator_dead_fut = create_future() # pylint: disable=protected-access + coordinator._coordinator_dead_fut = create_future() async def _on_join_leader(_) -> bytes | None: return None @@ -319,14 +319,14 @@ async def _on_join_leader(_) -> bytes | None: async def test_failed_sync_group(mocked_client: AIOKafkaClient, coordinator: SchemaCoordinator) -> None: coordinator.start() - assert coordinator._coordination_task is not None # pylint: disable=protected-access - assert coordinator._client is not None # pylint: disable=protected-access + assert coordinator._coordination_task is not None + assert coordinator._client is not None # disable for test - coordinator._coordination_task.cancel() # pylint: disable=protected-access + coordinator._coordination_task.cancel() with contextlib.suppress(asyncio.CancelledError): - await coordinator._coordination_task # pylint: disable=protected-access - coordinator._coordination_task = create_task(asyncio.sleep(0.1)) # pylint: disable=protected-access + await coordinator._coordination_task + coordinator._coordination_task = create_task(asyncio.sleep(0.1)) coordinator.coordinator_id = 15 async def do_sync_group() -> bytes | None: @@ -334,12 +334,12 @@ async def do_sync_group() -> bytes | None: coordinator, coordinator.group_id, coordinator.coordinator_id, - coordinator._session_timeout_ms, # pylint: disable=protected-access - coordinator._retry_backoff_ms, # pylint: disable=protected-access + coordinator._session_timeout_ms, + coordinator._retry_backoff_ms, ) - await rebalance._on_join_follower() # pylint: disable=protected-access + await rebalance._on_join_follower() - coordinator._client.api_version = (0, 10, 1) # pylint: disable=protected-access + coordinator._client.api_version = (0, 10, 1) error_type = None async def send(*_, **__) -> SyncGroupResponse: @@ -366,7 +366,7 @@ async def send(*_, **__) -> SyncGroupResponse: assert coordinator.need_rejoin() coordinator.coordinator_id = 15 - coordinator._coordinator_dead_fut = create_future() # pylint: disable=protected-access + coordinator._coordinator_dead_fut = create_future() error_type = Errors.UnknownError() with pytest.raises(Errors.KafkaError): # Masked as some KafkaError @@ -391,9 +391,9 @@ async def test_generation_change_during_rejoin_sync() -> None: coordinator = mock.MagicMock(spec=SchemaCoordinator) member_assignment = mock.Mock(spec=Assignment) - coordinator._client = client # pylint: disable=protected-access - coordinator._rebalance_timeout_ms = 1000 # pylint: disable=protected-access - coordinator._send_req = mock.MagicMock() # pylint: disable=protected-access + coordinator._client = client + coordinator._rebalance_timeout_ms = 1000 + coordinator._send_req = mock.MagicMock() rebalance = SchemaCoordinatorGroupRebalance( coordinator, @@ -415,7 +415,7 @@ async def send_req(_) -> Response: request = mock.MagicMock() coordinator.generation = 1 coordinator.member_id = "member_id" - sync_req = asyncio.ensure_future(rebalance._send_sync_group_request(request)) # pylint: disable=protected-access + sync_req = asyncio.ensure_future(rebalance._send_sync_group_request(request)) await asyncio.sleep(0.05) coordinator.generation = -1 @@ -445,7 +445,7 @@ async def test_coordinator_metadata_update(client: AIOKafkaClient) -> None: ) coordinator.start() - _metadata_update = client._metadata_update # pylint: disable=protected-access + _metadata_update = client._metadata_update with mock.patch.object(client, "_metadata_update") as mocked: async def _new(*args, **kw) -> bool: @@ -526,13 +526,13 @@ async def test_coordinator_ensure_coordinator_known(client: AIOKafkaClient) -> N heartbeat_interval_ms=20000, ) coordinator.start() - assert coordinator._coordination_task is not None # pylint: disable=protected-access + assert coordinator._coordination_task is not None # disable for test - coordinator._coordination_task.cancel() # pylint: disable=protected-access + coordinator._coordination_task.cancel() with contextlib.suppress(asyncio.CancelledError): - await coordinator._coordination_task # pylint: disable=protected-access - coordinator._coordination_task = create_task(asyncio.sleep(0.1)) # pylint: disable=protected-access + await coordinator._coordination_task + coordinator._coordination_task = create_task(asyncio.sleep(0.1)) def force_metadata_update() -> asyncio.Future: fut = create_future() @@ -543,7 +543,7 @@ def force_metadata_update() -> asyncio.Future: client.force_metadata_update = mock.Mock() client.force_metadata_update.side_effect = force_metadata_update - async def ready(node_id: int, group: ConnectionGroup) -> bool: # pylint: disable=unused-argument + async def ready(node_id: int, group: ConnectionGroup) -> bool: if node_id == 0: return True return False @@ -608,12 +608,12 @@ async def test_coordinator__do_heartbeat(client: AIOKafkaClient) -> None: heartbeat_interval_ms=20000, ) coordinator.start() - assert coordinator._coordination_task is not None # pylint: disable=protected-access + assert coordinator._coordination_task is not None # disable for test - coordinator._coordination_task.cancel() # pylint: disable=protected-access + coordinator._coordination_task.cancel() with contextlib.suppress(asyncio.CancelledError): - await coordinator._coordination_task # pylint: disable=protected-access - coordinator._coordination_task = create_task(asyncio.sleep(0.1)) # pylint: disable=protected-access + await coordinator._coordination_task + coordinator._coordination_task = create_task(asyncio.sleep(0.1)) _orig_send_req = coordinator.send_req coordinator.send_req = mocked = mock.Mock() @@ -635,20 +635,20 @@ async def mock_send_req(request): coordinator.coordinator_id = 15 heartbeat_error = Errors.GroupCoordinatorNotAvailableError() - success = await coordinator._do_heartbeat() # pylint: disable=protected-access + success = await coordinator._do_heartbeat() assert not success assert coordinator.coordinator_id is None coordinator.rejoin_needed_fut = create_future() heartbeat_error = Errors.RebalanceInProgressError() - success = await coordinator._do_heartbeat() # pylint: disable=protected-access + success = await coordinator._do_heartbeat() assert success assert coordinator.rejoin_needed_fut.done() coordinator.member_id = "some_member" coordinator.rejoin_needed_fut = create_future() heartbeat_error = Errors.IllegalGenerationError() - success = await coordinator._do_heartbeat() # pylint: disable=protected-access + success = await coordinator._do_heartbeat() assert not success assert coordinator.rejoin_needed_fut.done() assert coordinator.member_id == UNKNOWN_MEMBER_ID @@ -656,28 +656,28 @@ async def mock_send_req(request): coordinator.member_id = "some_member" coordinator.rejoin_needed_fut = create_future() heartbeat_error = Errors.UnknownMemberIdError() - success = await coordinator._do_heartbeat() # pylint: disable=protected-access + success = await coordinator._do_heartbeat() assert not success assert coordinator.rejoin_needed_fut.done() assert coordinator.member_id == UNKNOWN_MEMBER_ID heartbeat_error = Errors.GroupAuthorizationFailedError() with pytest.raises(Errors.GroupAuthorizationFailedError) as exception_info: - await coordinator._do_heartbeat() # pylint: disable=protected-access + await coordinator._do_heartbeat() assert exception_info.value.args[0] == coordinator.group_id heartbeat_error = Errors.UnknownError() with pytest.raises(Errors.KafkaError): - await coordinator._do_heartbeat() # pylint: disable=protected-access + await coordinator._do_heartbeat() heartbeat_error = None send_req_error = Errors.RequestTimedOutError() - success = await coordinator._do_heartbeat() # pylint: disable=protected-access + success = await coordinator._do_heartbeat() assert not success heartbeat_error = Errors.NoError() send_req_error = None - success = await coordinator._do_heartbeat() # pylint: disable=protected-access + success = await coordinator._do_heartbeat() assert success finally: await coordinator.close() @@ -699,15 +699,15 @@ async def test_coordinator__heartbeat_routine(client: AIOKafkaClient) -> None: retry_backoff_ms=50, ) coordinator.start() - assert coordinator._coordination_task is not None # pylint: disable=protected-access + assert coordinator._coordination_task is not None # disable for test - coordinator._coordination_task.cancel() # pylint: disable=protected-access + coordinator._coordination_task.cancel() with contextlib.suppress(asyncio.CancelledError): - await coordinator._coordination_task # pylint: disable=protected-access - coordinator._coordination_task = create_task(asyncio.sleep(0.1)) # pylint: disable=protected-access + await coordinator._coordination_task + coordinator._coordination_task = create_task(asyncio.sleep(0.1)) mocked = mock.Mock() - coordinator._do_heartbeat = mocked # pylint: disable=protected-access + coordinator._do_heartbeat = mocked coordinator.coordinator_id = 15 coordinator.member_id = 17 coordinator.generation = 0 @@ -726,7 +726,7 @@ async def ensure_coordinator_known() -> None: coordinator.ensure_coordinator_known = mock.Mock() coordinator.ensure_coordinator_known.side_effect = ensure_coordinator_known - routine = create_task(coordinator._heartbeat_routine()) # pylint: disable=protected-access + routine = create_task(coordinator._heartbeat_routine()) # CASE: simple heartbeat success = True @@ -771,20 +771,18 @@ async def test_coordinator__coordination_routine(client: AIOKafkaClient) -> None ) def start_coordination(): - if coordinator._coordination_task: # pylint: disable=protected-access - coordinator._coordination_task.cancel() # pylint: disable=protected-access - coordinator._coordination_task = task = create_task( # pylint: disable=protected-access - coordinator._coordination_routine() # pylint: disable=protected-access - ) + if coordinator._coordination_task: + coordinator._coordination_task.cancel() + coordinator._coordination_task = task = create_task(coordinator._coordination_routine()) return task async def stop_coordination(): - if coordinator._coordination_task is not None: # pylint: disable=protected-access + if coordinator._coordination_task is not None: # disable for test - coordinator._coordination_task.cancel() # pylint: disable=protected-access + coordinator._coordination_task.cancel() with contextlib.suppress(asyncio.CancelledError): - await coordinator._coordination_task # pylint: disable=protected-access - coordinator._coordination_task = create_task(asyncio.sleep(0.1)) # pylint: disable=protected-access + await coordinator._coordination_task + coordinator._coordination_task = create_task(asyncio.sleep(0.1)) await stop_coordination() @@ -794,7 +792,7 @@ async def ensure_coordinator_known(): coordinator.ensure_coordinator_known = coord_mock = mock.Mock() coord_mock.side_effect = ensure_coordinator_known - coordinator._do_rejoin_group = rejoin_mock = mock.Mock() # pylint: disable=protected-access + coordinator._do_rejoin_group = rejoin_mock = mock.Mock() rejoin_ok = True async def do_rejoin(): @@ -806,15 +804,15 @@ async def do_rejoin(): rejoin_mock.side_effect = do_rejoin - coordinator._start_heartbeat_task = mock.Mock() # pylint: disable=protected-access + coordinator._start_heartbeat_task = mock.Mock() client.force_metadata_update = metadata_mock = mock.Mock() done_fut = create_future() done_fut.set_result(None) metadata_mock.side_effect = lambda: done_fut coordinator.rejoin_needed_fut = create_future() - coordinator._closing = create_future() # pylint: disable=protected-access - coordinator._coordinator_dead_fut = create_future() # pylint: disable=protected-access + coordinator._closing = create_future() + coordinator._coordinator_dead_fut = create_future() # CASE: coordination should coordinate and task get done # present diff --git a/tests/integration/test_schema_reader.py b/tests/integration/test_schema_reader.py index 578390d7e..7b14d3ebb 100644 --- a/tests/integration/test_schema_reader.py +++ b/tests/integration/test_schema_reader.py @@ -109,7 +109,7 @@ async def test_regression_soft_delete_schemas_should_be_registered( producer.flush() msg = future.result() - schema_reader._offset_watcher.wait_for_offset(msg.offset(), timeout=5) # pylint: disable=protected-access + schema_reader._offset_watcher.wait_for_offset(msg.offset(), timeout=5) schemas = database.find_subject_schemas(subject=subject, include_deleted=True) assert len(schemas) == 1, "Deleted schemas must have been registered" @@ -137,7 +137,7 @@ async def test_regression_soft_delete_schemas_should_be_registered( producer.flush() msg = future.result() - seen = schema_reader._offset_watcher.wait_for_offset(msg.offset(), timeout=5) # pylint: disable=protected-access + seen = schema_reader._offset_watcher.wait_for_offset(msg.offset(), timeout=5) assert seen is True assert database.global_schema_id == test_global_schema_id @@ -194,7 +194,7 @@ async def test_regression_config_for_inexisting_object_should_not_throw( producer.flush() msg = future.result() - seen = schema_reader._offset_watcher.wait_for_offset(msg.offset(), timeout=5) # pylint: disable=protected-access + seen = schema_reader._offset_watcher.wait_for_offset(msg.offset(), timeout=5) assert seen is True assert database.find_subject(subject=subject) is not None, "The above message should be handled gracefully" finally: diff --git a/tests/integration/utils/kafka_server.py b/tests/integration/utils/kafka_server.py index 2a6a67082..f018bc097 100644 --- a/tests/integration/utils/kafka_server.py +++ b/tests/integration/utils/kafka_server.py @@ -184,5 +184,5 @@ def configure_and_start_kafka( ), ) env: dict[bytes, bytes] = {} - proc = Popen(kafka_cmd, env=env) # pylint: disable=consider-using-with + proc = Popen(kafka_cmd, env=env) return proc diff --git a/tests/integration/utils/zookeeper.py b/tests/integration/utils/zookeeper.py index e08d0fed5..39c0c2302 100644 --- a/tests/integration/utils/zookeeper.py +++ b/tests/integration/utils/zookeeper.py @@ -59,5 +59,5 @@ def configure_and_start_zk(config: ZKConfig, kafka_description: KafkaDescription kafka_description, ) ) - proc = Popen(java_args, env=env) # pylint: disable=consider-using-with + proc = Popen(java_args, env=env) return proc diff --git a/tests/unit/anonymize_schemas/test_anonymize_avro.py b/tests/unit/anonymize_schemas/test_anonymize_avro.py index 1e69bbddc..9c9311a89 100644 --- a/tests/unit/anonymize_schemas/test_anonymize_avro.py +++ b/tests/unit/anonymize_schemas/test_anonymize_avro.py @@ -13,8 +13,6 @@ PRIMITIVE_TYPE_SCHEMA = json.loads('"int"') EXPECTED_PRIMITIVE_TYPE_SCHEMA = "int" -# Pylint issue: https://github.com/PyCQA/pylint/issues/3368 -# pylint: disable=line-too-long SCHEMA_WITH_NAME = json.loads('"io.aiven.myrecord"') EXPECTED_SCHEMA_WITH_NAME = "aa258230180d9c643f761089d7e33b8b52288ed3.ae02f26b082c5f3bc7027f72335dd1186a2cd382.afe8733e983101f1f4ff50d24152890d0da71418" diff --git a/tests/unit/backup/backends/test_v2.py b/tests/unit/backup/backends/test_v2.py index b525f30e7..72b175da2 100644 --- a/tests/unit/backup/backends/test_v2.py +++ b/tests/unit/backup/backends/test_v2.py @@ -91,7 +91,7 @@ def test_schema_backup_v2_roundtrip(tmp_path: Path) -> None: with backup_writer.safe_writer(file_path, False) as buffer: for record in records: backup_writer.store_record(buffer, record) - data_file = backup_writer.finalize_partition( # pylint: disable=assignment-from-no-return + data_file = backup_writer.finalize_partition( index=partition_index, filename=file_path.name, ) @@ -232,7 +232,7 @@ def test_anonymize_avro_roundtrip(tmp_path: Path) -> None: with backup_writer.safe_writer(file_path, False) as buffer: for record in records: backup_writer.store_record(buffer, record) - data_file = backup_writer.finalize_partition( # pylint: disable=assignment-from-no-return + data_file = backup_writer.finalize_partition( index=partition_index, filename=file_path.name, ) diff --git a/tests/unit/backup/backends/v3/test_backend.py b/tests/unit/backup/backends/v3/test_backend.py index 0ffad9fb5..83007f097 100644 --- a/tests/unit/backup/backends/v3/test_backend.py +++ b/tests/unit/backup/backends/v3/test_backend.py @@ -191,7 +191,7 @@ def test_reader_raises_invalid_checksum(tmp_path: Path) -> None: backup_writer.store_record(buffer, make_record(topic_name, partition_index, 0)) backup_writer.store_record(buffer, make_record(topic_name, partition_index, 1)) with mock.patch.object( - backup_writer._partition_stats[partition_index], # pylint: disable=protected-access + backup_writer._partition_stats[partition_index], "get_checkpoint", return_value=b"not what you expected!", autospec=True, diff --git a/tests/unit/instrumentation/test_prometheus.py b/tests/unit/instrumentation/test_prometheus.py index c4b3da8d9..57fb9292a 100644 --- a/tests/unit/instrumentation/test_prometheus.py +++ b/tests/unit/instrumentation/test_prometheus.py @@ -92,7 +92,7 @@ async def test_http_request_metrics_middleware( await prometheus.http_request_metrics_middleware(request=request, handler=handler) - assert handler.assert_awaited_once # extra assert is to ignore pylint [pointless-statement] + assert handler.assert_awaited_once request.__setitem__.assert_called_once_with(prometheus.START_TIME_REQUEST_KEY, 10) request.app[prometheus.karapace_http_requests_in_progress].labels.assert_has_calls( [ diff --git a/tests/unit/kafka_rest_apis/test_rest_proxy_cluster_metadata_cache.py b/tests/unit/kafka_rest_apis/test_rest_proxy_cluster_metadata_cache.py index d1227fbc2..eb4d51423 100644 --- a/tests/unit/kafka_rest_apis/test_rest_proxy_cluster_metadata_cache.py +++ b/tests/unit/kafka_rest_apis/test_rest_proxy_cluster_metadata_cache.py @@ -1,4 +1,3 @@ -# pylint: disable=protected-access """ Copyright (c) 2024 Aiven Ltd See LICENSE for details diff --git a/tests/unit/schema_registry/test_controller.py b/tests/unit/schema_registry/test_controller.py index e07f45fff..211958f28 100644 --- a/tests/unit/schema_registry/test_controller.py +++ b/tests/unit/schema_registry/test_controller.py @@ -33,12 +33,12 @@ async def test_validate_schema_request_body(schema_registry_container: SchemaRegistryContainer) -> None: - schema_registry_container.schema_registry_controller()._validate_schema_type( # pylint: disable=W0212 + schema_registry_container.schema_registry_controller()._validate_schema_type( {"schema": "{}", "schemaType": "JSON", "references": [], "metadata": {}, "ruleSet": {}} ) with pytest.raises(HTTPException) as exc_info: - schema_registry_container.schema_registry_controller()._validate_schema_type( # pylint: disable=W0212 + schema_registry_container.schema_registry_controller()._validate_schema_type( {"schema": "{}", "schemaType": "DOES_NOT_EXIST", "references": [], "unexpected_field_name": {}, "ruleSet": {}}, ) assert exc_info.type is HTTPException @@ -68,7 +68,7 @@ async def test_forward_when_not_ready(schema_registry_container: SchemaRegistryC mock_forward_func_future.set_exception(HTTPResponse({"mock": "response"})) mock_forward_func = Mock() mock_forward_func.return_value = mock_forward_func_future - controller._forward_request_remote = mock_forward_func # pylint: disable=protected-access + controller._forward_request_remote = mock_forward_func assert await controller.schemas_get( schema_id=1, diff --git a/tests/unit/schema_registry/test_reader.py b/tests/unit/schema_registry/test_reader.py index e1e9e0db5..842a74513 100644 --- a/tests/unit/schema_registry/test_reader.py +++ b/tests/unit/schema_registry/test_reader.py @@ -81,9 +81,7 @@ def produce() -> None: assert consumer.result() is None, "Thread should finish without errors" assert producer.result() is None, "Thread should finish without errors" - assert ( - watcher._greatest_offset == 99 # pylint: disable=protected-access - ), "Expected greatest offset is not one less than total count" + assert watcher._greatest_offset == 99, "Expected greatest offset is not one less than total count" assert produced_cnt == 100, "Did not produce expected amount of records" assert consumed_cnt == 100, "Did not consume expected amount of records" @@ -319,9 +317,7 @@ def test_handle_msg_delete_subject_logs(caplog: LogCaptureFixture, karapace_cont ) with caplog.at_level(logging.WARNING, logger="schema_registry.reader"): - schema_reader._handle_msg_schema_hard_delete( # pylint: disable=protected-access - key={"subject": "test-subject", "version": 2} - ) + schema_reader._handle_msg_schema_hard_delete(key={"subject": "test-subject", "version": 2}) for log in caplog.records: assert log.name == "schema_registry.reader" assert log.levelname == "WARNING" diff --git a/tests/unit/test_avro_compatibility.py b/tests/unit/test_avro_compatibility.py index 288fbeda5..771d8fc1c 100644 --- a/tests/unit/test_avro_compatibility.py +++ b/tests/unit/test_avro_compatibility.py @@ -642,12 +642,11 @@ def test_union_to_simple_comparison(field): INT_LIST_RECORD._fields = (INT_LIST_RECORD.fields[0], int_reader_field) LONG_LIST_RECORD._fields = (LONG_LIST_RECORD.fields[0], long_reader_field) -# pylint: disable=protected-access + INT_LIST_RECORD._field_map = INT_LIST_RECORD.fields_dict LONG_LIST_RECORD._field_map = LONG_LIST_RECORD.fields_dict INT_LIST_RECORD._props["fields"] = INT_LIST_RECORD._fields LONG_LIST_RECORD._props["fields"] = LONG_LIST_RECORD._fields -# pylint: enable=protected-access RECORD1_WITH_INT = parse_avro_schema_definition( json.dumps({"type": "record", "name": "Record1", "fields": [{"name": "field1", "type": "int"}]}) ) diff --git a/tests/unit/test_forwarding_client.py b/tests/unit/test_forwarding_client.py index 34df7869f..bbc449b27 100644 --- a/tests/unit/test_forwarding_client.py +++ b/tests/unit/test_forwarding_client.py @@ -41,7 +41,7 @@ def fixture_forward_client() -> ForwardClient: async def test_forward_client_close(forward_client: ForwardClient) -> None: await forward_client.close() - forward_client._forward_client.close.assert_awaited_once() # pylint: disable=protected-access + forward_client._forward_client.close.assert_awaited_once() @pytest.mark.parametrize( @@ -80,7 +80,7 @@ async def mock_aexit(_, __, ___, ____) -> None: mock_get_func.__aenter__ = mock_aenter mock_get_func.__aexit__ = mock_aexit - forward_client._forward_client.get.return_value = mock_get_func # pylint: disable=protected-access + forward_client._forward_client.get.return_value = mock_get_func response = await forward_client.forward_request_remote( request=mock_request, @@ -113,7 +113,7 @@ async def mock_aexit(_, __, ___, ____) -> None: mock_get_func.__aenter__ = mock_aenter mock_get_func.__aexit__ = mock_aexit - forward_client._forward_client.get.return_value = mock_get_func # pylint: disable=protected-access + forward_client._forward_client.get.return_value = mock_get_func response = await forward_client.forward_request_remote( request=mock_request, @@ -146,7 +146,7 @@ async def mock_aexit(_, __, ___, ____) -> None: mock_get_func.__aenter__ = mock_aenter mock_get_func.__aexit__ = mock_aexit - forward_client._forward_client.get.return_value = mock_get_func # pylint: disable=protected-access + forward_client._forward_client.get.return_value = mock_get_func response = await forward_client.forward_request_remote( request=mock_request, diff --git a/tests/unit/test_rapu.py b/tests/unit/test_rapu.py index b965dc09b..eb6185866 100644 --- a/tests/unit/test_rapu.py +++ b/tests/unit/test_rapu.py @@ -177,7 +177,7 @@ async def test_raise_connection_error_handling( app = KarapaceBase(config=karapace_container.config()) - response = await app._handle_request( # pylint: disable=protected-access + response = await app._handle_request( request=request_mock, path_for_stats="/", callback=callback_mock, diff --git a/tests/unit/test_rest_auth.py b/tests/unit/test_rest_auth.py index aa7bd41b6..ffe3ce4c2 100644 --- a/tests/unit/test_rest_auth.py +++ b/tests/unit/test_rest_auth.py @@ -1,4 +1,3 @@ -# pylint: disable=protected-access """ Copyright (c) 2023 Aiven Ltd See LICENSE for details diff --git a/website/source/conf.py b/website/source/conf.py index b1778d4ea..caa77b36b 100644 --- a/website/source/conf.py +++ b/website/source/conf.py @@ -25,7 +25,7 @@ # -- Project information ----------------------------------------------------- project = "Karapace" -copyright = "2022, Aiven" # pylint: disable=redefined-builtin +copyright = "2022, Aiven" author = "Aiven"