From 1b64d17c73e44e835841844213d35e252623fb6d Mon Sep 17 00:00:00 2001 From: Sri Harsha Singudasu <105226401+ssingudasu@users.noreply.github.com> Date: Wed, 24 Jan 2024 11:42:04 -0800 Subject: [PATCH] [DP-1901] - Convert Wurstmeister Kafka image to Bitnami for Kafka-go (#1255) * [DP-1901] - Convert Wurstmeister Kafka image to Bitnami for Kafka-go * [DP-1901] - removing duplicate env in config * [DP-1901] - adding KAFKA_VERSION * [DP-1901] - FIXING KAFKA_VERSION * [DP-1901] - minor fixtures to KAFKA_VERSION * [DP-1901] - minor fixtures in lint * [DP-1901] - fixing KAFKA_VERSION to 0.10.2.1 * [DP-1901] - minor fixtures to KAFKA_VERSION * [DP-1901] - fixing zookeeper connect * [DP-1901] - fixing KAFKA_VERSION to 0.10.2.1 * [DP-1901] - fixing kafka-011 * [DP-1901] - fixing kafka-011 environment * [DP-1901] - fixing zookeeper kafka-011 * [DP-1901] - fixing KAFKA_VERSION kafka-011 * [DP-1901] - fixing KAFKA_VERSION kafka-011 * [DP-1901] - fixing KAFKA_VERSION kafka-011 * [DP-1901] - Adding AUTHORIZER kafka-011 * [DP-1901] - reset kafka-011 * [DP-1901] - bitnami for kafka-011 * [DP-1901] - bitnami for kafka-011 zookeeper fixtures * [DP-1901] - fixtures to circleci and creating docker_compose_versions folder * [DP-1901] - zookeeper fix * [DP-1901] - fixtures to circleci. removed unsupported kafka * [DP-1901] - fixtures to circleci 2.3.1. fixing examples folder * [DP-1901] - examples docker-compose fix to bitnami * [DP-1901] - minor README.md fixtures * [DP-1901] - minor README.md fixtures * [DP-1901] - minor README.md fixtures * [DP-1901] - minor README.md fixtures * [DP-1901] - Grammatical fixtures in README.md * [DP-1901] - Adding support for v281 and v361 in circleci * [DP-1901] - touch README.md for circleci trigger * [DP-1901] - Creating v361docker and modify circleci * [DP-1901] - Creating v361 docker and modify circleci * [DP-1901] - touch README.md for circleci trigger * [DP-1901] - removing v361 from circleci --- .circleci/config.yml | 267 ++++++++---------- README.md | 7 +- docker-compose-241.yml | 32 --- docker-compose.010.yml | 29 -- docker-compose.yml | 64 +++-- docker_compose_versions/README.md | 152 ++++++++++ .../docker-compose-010.yml | 39 +++ .../docker-compose-011.yml | 36 +++ .../docker-compose-231.yml | 42 +++ .../docker-compose-241.yml | 39 +++ .../docker-compose-260.yml | 39 +++ .../docker-compose-270.yml | 39 +++ .../docker-compose-361.yml | 39 +++ examples/docker-compose.yaml | 20 +- examples/kafka/kafka-variables.env | 31 +- 15 files changed, 625 insertions(+), 250 deletions(-) delete mode 100644 docker-compose-241.yml delete mode 100644 docker-compose.010.yml create mode 100644 docker_compose_versions/README.md create mode 100644 docker_compose_versions/docker-compose-010.yml create mode 100644 docker_compose_versions/docker-compose-011.yml create mode 100644 docker_compose_versions/docker-compose-231.yml create mode 100644 docker_compose_versions/docker-compose-241.yml create mode 100644 docker_compose_versions/docker-compose-260.yml create mode 100644 docker_compose_versions/docker-compose-270.yml create mode 100644 docker_compose_versions/docker-compose-361.yml diff --git a/.circleci/config.yml b/.circleci/config.yml index 25fad54e3..329dec4fb 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -9,34 +9,40 @@ jobs: # The kafka 0.10 tests are maintained as a separate configuration because # kafka only supported plain text SASL in this version. + # NOTE: Bitnami does not have suport for kafka version 0.10.1.1. Hence we use 0.10.2.1 kafka-010: working_directory: &working_directory /go/src/github.com/segmentio/kafka-go - environment: - KAFKA_VERSION: "0.10.1" docker: - image: circleci/golang - - image: wurstmeister/zookeeper + - image: bitnami/zookeeper:latest ports: - 2181:2181 - - image: wurstmeister/kafka:0.10.1.1 + environment: + ALLOW_ANONYMOUS_LOGIN: yes + - image: bitnami/kafka:0.10.2.1 ports: - 9092:9092 - 9093:9093 environment: - KAFKA_BROKER_ID: '1' - KAFKA_CREATE_TOPICS: 'test-writer-0:3:1,test-writer-1:3:1' + KAFKA_BROKER_ID: 1 KAFKA_DELETE_TOPIC_ENABLE: 'true' KAFKA_ADVERTISED_HOST_NAME: 'localhost' KAFKA_ADVERTISED_PORT: '9092' - KAFKA_ZOOKEEPER_CONNECT: 'localhost:2181' + KAFKA_ZOOKEEPER_CONNECT: localhost:2181 KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true' KAFKA_MESSAGE_MAX_BYTES: '200000000' KAFKA_LISTENERS: 'PLAINTEXT://:9092,SASL_PLAINTEXT://:9093' KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://localhost:9092,SASL_PLAINTEXT://localhost:9093' - KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN' - KAFKA_OPTS: "-Djava.security.auth.login.config=/opt/kafka/config/kafka_server_jaas.conf" - CUSTOM_INIT_SCRIPT: |- - echo -e 'KafkaServer {\norg.apache.kafka.common.security.plain.PlainLoginModule required\n username="adminplain"\n password="admin-secret"\n user_adminplain="admin-secret";\n };' > /opt/kafka/config/kafka_server_jaas.conf; + KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN,SCRAM-SHA-256,SCRAM-SHA-512' + KAFKA_AUTHORIZER_CLASS_NAME: 'kafka.security.auth.SimpleAclAuthorizer' + KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: 'true' + KAFKA_OPTS: "-Djava.security.auth.login.config=/opt/bitnami/kafka/config/kafka_server_jaas.conf" + ALLOW_PLAINTEXT_LISTENER: yes + entrypoint: + - "/bin/bash" + - "-c" + - echo -e 'KafkaServer {\norg.apache.kafka.common.security.scram.ScramLoginModule required\n username="adminscram"\n password="admin-secret";\n org.apache.kafka.common.security.plain.PlainLoginModule required\n username="adminplain"\n password="admin-secret"\n user_adminplain="admin-secret";\n };' > /opt/bitnami/kafka/config/kafka_server_jaas.conf; /opt/bitnami/kafka/bin/kafka-configs.sh --zookeeper localhost:2181 --alter --add-config 'SCRAM-SHA-256=[password=admin-secret-256],SCRAM-SHA-512=[password=admin-secret-512]' --entity-type users --entity-name adminscram; exec /app-entrypoint.sh /start-kafka.sh + steps: &steps - checkout - restore_cache: @@ -59,156 +65,129 @@ jobs: working_directory: ./sasl/aws_msk_iam command: go test -race -cover ./... - # Starting at version 0.11, the kafka features and configuration remained - # mostly stable, so we can use this CI job configuration as template for other - # versions as well. kafka-011: working_directory: *working_directory environment: KAFKA_VERSION: "0.11.0" docker: - image: circleci/golang - - image: wurstmeister/zookeeper + - image: bitnami/zookeeper:latest ports: - 2181:2181 - - image: wurstmeister/kafka:2.11-0.11.0.3 + environment: + ALLOW_ANONYMOUS_LOGIN: yes + - image: bitnami/kafka:0.11.0-1-r1 ports: - 9092:9092 - 9093:9093 - environment: &environment - KAFKA_BROKER_ID: '1' - KAFKA_CREATE_TOPICS: 'test-writer-0:3:1,test-writer-1:3:1' + environment: + KAFKA_BROKER_ID: 1 KAFKA_DELETE_TOPIC_ENABLE: 'true' KAFKA_ADVERTISED_HOST_NAME: 'localhost' KAFKA_ADVERTISED_PORT: '9092' - KAFKA_ZOOKEEPER_CONNECT: 'localhost:2181' - KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true' - KAFKA_MESSAGE_MAX_BYTES: '200000000' + KAFKA_ZOOKEEPER_CONNECT: localhost:2181 KAFKA_LISTENERS: 'PLAINTEXT://:9092,SASL_PLAINTEXT://:9093' KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://localhost:9092,SASL_PLAINTEXT://localhost:9093' - KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN,SCRAM-SHA-256,SCRAM-SHA-512' - KAFKA_OPTS: "-Djava.security.auth.login.config=/opt/kafka/config/kafka_server_jaas.conf" - CUSTOM_INIT_SCRIPT: |- - apk add libgcc; - echo -e 'KafkaServer {\norg.apache.kafka.common.security.scram.ScramLoginModule required\n username="adminscram"\n password="admin-secret";\n org.apache.kafka.common.security.plain.PlainLoginModule required\n username="adminplain"\n password="admin-secret"\n user_adminplain="admin-secret";\n };' > /opt/kafka/config/kafka_server_jaas.conf; - /opt/kafka/bin/kafka-configs.sh --zookeeper localhost:2181 --alter --add-config 'SCRAM-SHA-256=[password=admin-secret-256],SCRAM-SHA-512=[password=admin-secret-512]' --entity-type users --entity-name adminscram - steps: *steps - - kafka-101: - working_directory: *working_directory - environment: - KAFKA_VERSION: "1.0.1" - docker: - - image: circleci/golang - - image: wurstmeister/zookeeper - ports: - - 2181:2181 - - image: wurstmeister/kafka:2.11-1.0.1 - ports: - - 9092:9092 - - 9093:9093 - environment: *environment - steps: *steps - - kafka-111: - working_directory: *working_directory - environment: - KAFKA_VERSION: "1.1.1" - docker: - - image: circleci/golang - - image: wurstmeister/zookeeper - ports: - - 2181:2181 - - image: wurstmeister/kafka:2.11-1.1.1 - ports: - - 9092:9092 - - 9093:9093 - environment: *environment + KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: 'true' + KAFKA_OPTS: "-Djava.security.auth.login.config=/opt/bitnami/kafka/config/kafka_server_jaas.conf" + ALLOW_PLAINTEXT_LISTENER: "yes" + entrypoint: + - "/bin/bash" + - "-c" + # 0.11.0 image is not honoring some configs required in server.properties + - echo -e '\nsasl.enabled.mechanisms=PLAIN,SCRAM-SHA-256,SCRAM-SHA-512\nmessage.max.bytes=200000000\nauto.create.topics.enable=true\nport=9092' >> /opt/bitnami/kafka/config/server.properties; echo -e 'KafkaServer {\norg.apache.kafka.common.security.scram.ScramLoginModule required\n username="adminscram"\n password="admin-secret";\n org.apache.kafka.common.security.plain.PlainLoginModule required\n username="adminplain"\n password="admin-secret"\n user_adminplain="admin-secret";\n };' > /opt/bitnami/kafka/config/kafka_server_jaas.conf; /opt/bitnami/kafka/bin/kafka-configs.sh --zookeeper localhost:2181 --alter --add-config 'SCRAM-SHA-256=[password=admin-secret-256],SCRAM-SHA-512=[password=admin-secret-512]' --entity-type users --entity-name adminscram; exec /app-entrypoint.sh /run.sh steps: *steps - kafka-201: + kafka-231: working_directory: *working_directory environment: - KAFKA_VERSION: "2.0.1" + KAFKA_VERSION: "2.3.1" docker: - image: circleci/golang - - image: wurstmeister/zookeeper + - image: bitnami/zookeeper:latest ports: - 2181:2181 - - image: wurstmeister/kafka:2.12-2.0.1 + environment: + ALLOW_ANONYMOUS_LOGIN: yes + - image: bitnami/kafka:2.3.1-ol-7-r61 ports: - 9092:9092 - 9093:9093 environment: - <<: *environment - KAFKA_AUTHORIZER_CLASS_NAME: 'kafka.security.auth.SimpleAclAuthorizer' - KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: 'true' + KAFKA_CFG_BROKER_ID: 1 + KAFKA_CFG_DELETE_TOPIC_ENABLE: 'true' + KAFKA_CFG_ADVERTISED_HOST_NAME: 'localhost' + KAFKA_CFG_ADVERTISED_PORT: '9092' + KAFKA_CFG_ZOOKEEPER_CONNECT: localhost:2181 + KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE: 'true' + KAFKA_CFG_MESSAGE_MAX_BYTES: '200000000' + KAFKA_CFG_LISTENERS: 'PLAINTEXT://:9092,SASL_PLAINTEXT://:9093' + KAFKA_CFG_ADVERTISED_LISTENERS: 'PLAINTEXT://localhost:9092,SASL_PLAINTEXT://localhost:9093' + KAFKA_CFG_SASL_ENABLED_MECHANISMS: 'PLAIN,SCRAM-SHA-256,SCRAM-SHA-512' + KAFKA_CFG_AUTHORIZER_CLASS_NAME: 'kafka.security.auth.SimpleAclAuthorizer' + KAFKA_CFG_ALLOW_EVERYONE_IF_NO_ACL_FOUND: 'true' + KAFKA_INTER_BROKER_USER: adminplain + KAFKA_INTER_BROKER_PASSWORD: admin-secret + KAFKA_BROKER_USER: adminplain + KAFKA_BROKER_PASSWORD: admin-secret + ALLOW_PLAINTEXT_LISTENER: yes + entrypoint: + - "/bin/bash" + - "-c" + - /opt/bitnami/kafka/bin/kafka-configs.sh --zookeeper localhost:2181 --alter --add-config "SCRAM-SHA-256=[password=admin-secret-256],SCRAM-SHA-512=[password=admin-secret-512]" --entity-type users --entity-name adminscram; exec /entrypoint.sh /run.sh steps: *steps - kafka-211: + kafka-241: working_directory: *working_directory environment: - KAFKA_VERSION: "2.1.1" - docker: - - image: circleci/golang - - image: wurstmeister/zookeeper - ports: - - 2181:2181 - - image: wurstmeister/kafka:2.12-2.1.1 - ports: - - 9092:9092 - - 9093:9093 - # recently, the base image for some newer versions of kafka switched from - # alpine to debian, which causes the "apk add ..." line to fail. The env - # map should be used for any versions that fail due to being part of this - # migration. - environment: &environmentDebian - <<: *environment - KAFKA_AUTHORIZER_CLASS_NAME: 'kafka.security.auth.SimpleAclAuthorizer' - KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: 'true' - CUSTOM_INIT_SCRIPT: |- - echo -e 'KafkaServer {\norg.apache.kafka.common.security.scram.ScramLoginModule required\n username="adminscram"\n password="admin-secret";\n org.apache.kafka.common.security.plain.PlainLoginModule required\n username="adminplain"\n password="admin-secret"\n user_adminplain="admin-secret";\n };' > /opt/kafka/config/kafka_server_jaas.conf; - /opt/kafka/bin/kafka-configs.sh --zookeeper localhost:2181 --alter --add-config 'SCRAM-SHA-256=[password=admin-secret-256],SCRAM-SHA-512=[password=admin-secret-512]' --entity-type users --entity-name adminscram - steps: *steps + KAFKA_VERSION: "2.4.1" - kafka-222: - working_directory: *working_directory - environment: - KAFKA_VERSION: "2.2.2" + # Need to skip nettest to avoid these kinds of errors: + # --- FAIL: TestConn/nettest (17.56s) + # --- FAIL: TestConn/nettest/PingPong (7.40s) + # conntest.go:112: unexpected Read error: [7] Request Timed Out: the request exceeded the user-specified time limit in the request + # conntest.go:118: mismatching value: got 77, want 78 + # conntest.go:118: mismatching value: got 78, want 79 + # ... + # + # TODO: Figure out why these are happening and fix them (they don't appear to be new). + KAFKA_SKIP_NETTEST: "1" docker: - image: circleci/golang - - image: wurstmeister/zookeeper + - image: bitnami/zookeeper:latest ports: - 2181:2181 - - image: wurstmeister/kafka:2.12-2.2.2 - ports: - - 9092:9092 - - 9093:9093 environment: - <<: *environmentDebian - steps: *steps - - kafka-231: - working_directory: *working_directory - environment: - KAFKA_VERSION: "2.3.1" - docker: - - image: circleci/golang - - image: wurstmeister/zookeeper - ports: - - 2181:2181 - - image: wurstmeister/kafka:2.12-2.3.1 + ALLOW_ANONYMOUS_LOGIN: yes + - image: bitnami/kafka:2.4.1 ports: - 9092:9092 - 9093:9093 - environment: - <<: *environmentDebian + environment: &environment + KAFKA_CFG_BROKER_ID: 1 + KAFKA_CFG_DELETE_TOPIC_ENABLE: 'true' + KAFKA_CFG_ADVERTISED_HOST_NAME: 'localhost' + KAFKA_CFG_ADVERTISED_PORT: '9092' + KAFKA_CFG_ZOOKEEPER_CONNECT: localhost:2181 + KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE: 'true' + KAFKA_CFG_MESSAGE_MAX_BYTES: '200000000' + KAFKA_CFG_LISTENERS: 'PLAINTEXT://:9092,SASL_PLAINTEXT://:9093' + KAFKA_CFG_ADVERTISED_LISTENERS: 'PLAINTEXT://localhost:9092,SASL_PLAINTEXT://localhost:9093' + KAFKA_CFG_SASL_ENABLED_MECHANISMS: 'PLAIN,SCRAM-SHA-256,SCRAM-SHA-512' + KAFKA_CFG_AUTHORIZER_CLASS_NAME: 'kafka.security.auth.SimpleAclAuthorizer' + KAFKA_CFG_ALLOW_EVERYONE_IF_NO_ACL_FOUND: 'true' + KAFKA_OPTS: "-Djava.security.auth.login.config=/opt/bitnami/kafka/config/kafka_jaas.conf" + ALLOW_PLAINTEXT_LISTENER: yes + entrypoint: &entrypoint + - "/bin/bash" + - "-c" + - echo -e 'KafkaServer {\norg.apache.kafka.common.security.scram.ScramLoginModule required\n username="adminscram"\n password="admin-secret";\n org.apache.kafka.common.security.plain.PlainLoginModule required\n username="adminplain"\n password="admin-secret"\n user_adminplain="admin-secret";\n };' > /opt/bitnami/kafka/config/kafka_jaas.conf; /opt/bitnami/kafka/bin/kafka-configs.sh --zookeeper localhost:2181 --alter --add-config "SCRAM-SHA-256=[password=admin-secret-256],SCRAM-SHA-512=[password=admin-secret-512]" --entity-type users --entity-name adminscram; exec /entrypoint.sh /run.sh steps: *steps - kafka-241: + kafka-260: working_directory: *working_directory environment: - KAFKA_VERSION: "2.4.1" + KAFKA_VERSION: "2.6.0" # Need to skip nettest to avoid these kinds of errors: # --- FAIL: TestConn/nettest (17.56s) @@ -222,21 +201,24 @@ jobs: KAFKA_SKIP_NETTEST: "1" docker: - image: circleci/golang - - image: wurstmeister/zookeeper + - image: bitnami/zookeeper:latest ports: - 2181:2181 - - image: wurstmeister/kafka:2.12-2.4.1 + environment: + ALLOW_ANONYMOUS_LOGIN: yes + - image: bitnami/kafka:2.6.0 ports: - 9092:9092 - 9093:9093 - environment: - <<: *environmentDebian + environment: *environment + entrypoint: *entrypoint steps: *steps - kafka-260: + # NOTE: Bitnami does not have suport for kafka version 2.7.1. Hence we use 2.7.0 + kafka-270: working_directory: *working_directory environment: - KAFKA_VERSION: "2.6.0" + KAFKA_VERSION: "2.7.0" # Need to skip nettest to avoid these kinds of errors: # --- FAIL: TestConn/nettest (17.56s) @@ -250,23 +232,23 @@ jobs: KAFKA_SKIP_NETTEST: "1" docker: - image: circleci/golang - - image: wurstmeister/zookeeper + - image: bitnami/zookeeper:latest ports: - 2181:2181 - - image: wurstmeister/kafka:2.13-2.6.0 + environment: + ALLOW_ANONYMOUS_LOGIN: yes + - image: bitnami/kafka:2.7.0 ports: - 9092:9092 - 9093:9093 - environment: - <<: *environment - KAFKA_AUTHORIZER_CLASS_NAME: 'kafka.security.authorizer.AclAuthorizer' - KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: 'true' + environment: *environment + entrypoint: *entrypoint steps: *steps - kafka-271: + kafka-281: working_directory: *working_directory environment: - KAFKA_VERSION: "2.7.1" + KAFKA_VERSION: "2.8.1" # Need to skip nettest to avoid these kinds of errors: # --- FAIL: TestConn/nettest (17.56s) @@ -280,17 +262,17 @@ jobs: KAFKA_SKIP_NETTEST: "1" docker: - image: circleci/golang - - image: wurstmeister/zookeeper + - image: bitnami/zookeeper:latest ports: - 2181:2181 - - image: wurstmeister/kafka:2.13-2.7.1 + environment: + ALLOW_ANONYMOUS_LOGIN: yes + - image: bitnami/kafka:2.8.1 ports: - 9092:9092 - 9093:9093 - environment: - <<: *environment - KAFKA_AUTHORIZER_CLASS_NAME: 'kafka.security.authorizer.AclAuthorizer' - KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: 'true' + environment: *environment + entrypoint: *entrypoint steps: *steps workflows: @@ -300,12 +282,13 @@ workflows: - lint - kafka-010 - kafka-011 - - kafka-101 - - kafka-111 - - kafka-201 - - kafka-211 - - kafka-222 + # - kafka-101 # Bitnami image fails for SCRAM. refer kafka-do/docker_compose_versions/README.md + # - kafka-111 # Bitnami image fails for SCRAM. refer kafka-do/docker_compose_versions/README.md + # - kafka-201 # Bitnami image fails for SCRAM. refer kafka-do/docker_compose_versions/README.md + # - kafka-211 # Bitnami image fails for SCRAM. refer kafka-do/docker_compose_versions/README.md + # - kafka-222 # Bitnami v222 not found. v221 Bitnami image fails for SCRAM. refer kafka-do/docker_compose_versions/README.md - kafka-231 - kafka-241 - kafka-260 - - kafka-271 + - kafka-270 + - kafka-281 diff --git a/README.md b/README.md index e17878825..4e6cd1229 100644 --- a/README.md +++ b/README.md @@ -108,7 +108,7 @@ if err := conn.Close(); err != nil { ``` ### To Create Topics -By default kafka has the `auto.create.topics.enable='true'` (`KAFKA_AUTO_CREATE_TOPICS_ENABLE='true'` in the wurstmeister/kafka kafka docker image). If this value is set to `'true'` then topics will be created as a side effect of `kafka.DialLeader` like so: +By default kafka has the `auto.create.topics.enable='true'` (`KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE='true'` in the bitnami/kafka kafka docker image). If this value is set to `'true'` then topics will be created as a side effect of `kafka.DialLeader` like so: ```go // to create topics when auto.create.topics.enable='true' conn, err := kafka.DialLeader(context.Background(), "tcp", "localhost:9092", "my-topic", 0) @@ -797,3 +797,8 @@ KAFKA_VERSION=2.3.1 \ KAFKA_SKIP_NETTEST=1 \ go test -race ./... ``` + +(or) to clean up the cached test results and run tests: +``` +go clean -cache && make test +``` diff --git a/docker-compose-241.yml b/docker-compose-241.yml deleted file mode 100644 index 6feb1844b..000000000 --- a/docker-compose-241.yml +++ /dev/null @@ -1,32 +0,0 @@ -version: "3" -services: - kafka: - image: wurstmeister/kafka:2.12-2.4.1 - restart: on-failure:3 - links: - - zookeeper - ports: - - 9092:9092 - - 9093:9093 - environment: - KAFKA_VERSION: '2.4.1' - KAFKA_BROKER_ID: '1' - KAFKA_CREATE_TOPICS: 'test-writer-0:3:1,test-writer-1:3:1' - KAFKA_DELETE_TOPIC_ENABLE: 'true' - KAFKA_ADVERTISED_HOST_NAME: 'localhost' - KAFKA_ADVERTISED_PORT: '9092' - KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181' - KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true' - KAFKA_MESSAGE_MAX_BYTES: '200000000' - KAFKA_LISTENERS: 'PLAINTEXT://:9092,SASL_PLAINTEXT://:9093' - KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://localhost:9092,SASL_PLAINTEXT://localhost:9093' - KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN,SCRAM-SHA-256,SCRAM-SHA-512' - KAFKA_OPTS: "-Djava.security.auth.login.config=/opt/kafka/config/kafka_server_jaas.conf" - CUSTOM_INIT_SCRIPT: |- - echo -e 'KafkaServer {\norg.apache.kafka.common.security.scram.ScramLoginModule required\n username="adminscram"\n password="admin-secret";\n org.apache.kafka.common.security.plain.PlainLoginModule required\n username="adminplain"\n password="admin-secret"\n user_adminplain="admin-secret";\n };' > /opt/kafka/config/kafka_server_jaas.conf; - /opt/kafka/bin/kafka-configs.sh --zookeeper zookeeper:2181 --alter --add-config 'SCRAM-SHA-256=[password=admin-secret-256],SCRAM-SHA-512=[password=admin-secret-512]' --entity-type users --entity-name adminscram - - zookeeper: - image: wurstmeister/zookeeper - ports: - - 2181:2181 diff --git a/docker-compose.010.yml b/docker-compose.010.yml deleted file mode 100644 index 56123f85c..000000000 --- a/docker-compose.010.yml +++ /dev/null @@ -1,29 +0,0 @@ -version: "3" -services: - kafka: - image: wurstmeister/kafka:0.10.1.1 - links: - - zookeeper - ports: - - 9092:9092 - - 9093:9093 - environment: - KAFKA_BROKER_ID: '1' - KAFKA_CREATE_TOPICS: 'test-writer-0:3:1,test-writer-1:3:1' - KAFKA_DELETE_TOPIC_ENABLE: 'true' - KAFKA_ADVERTISED_HOST_NAME: 'localhost' - KAFKA_ADVERTISED_PORT: '9092' - KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181' - KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true' - KAFKA_MESSAGE_MAX_BYTES: '200000000' - KAFKA_LISTENERS: 'PLAINTEXT://:9092,SASL_PLAINTEXT://:9093' - KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://localhost:9092,SASL_PLAINTEXT://localhost:9093' - KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN' - KAFKA_OPTS: "-Djava.security.auth.login.config=/opt/kafka/config/kafka_server_jaas.conf" - CUSTOM_INIT_SCRIPT: |- - echo -e 'KafkaServer {\norg.apache.kafka.common.security.plain.PlainLoginModule required\n username="adminplain"\n password="admin-secret"\n user_adminplain="admin-secret";\n };' > /opt/kafka/config/kafka_server_jaas.conf; - - zookeeper: - image: wurstmeister/zookeeper - ports: - - 2181:2181 diff --git a/docker-compose.yml b/docker-compose.yml index dc0c2e85e..f4235c9f0 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,34 +1,42 @@ -version: "3" +# See https://hub.docker.com/r/bitnami/kafka/tags for the complete list. +version: '3' services: + zookeeper: + container_name: zookeeper + hostname: zookeeper + image: bitnami/zookeeper:latest + ports: + - 2181:2181 + environment: + ALLOW_ANONYMOUS_LOGIN: yes kafka: - image: wurstmeister/kafka:2.12-2.3.1 + container_name: kafka + image: bitnami/kafka:2.3.1-ol-7-r61 restart: on-failure:3 links: - - zookeeper + - zookeeper ports: - - 9092:9092 - - 9093:9093 + - 9092:9092 + - 9093:9093 environment: - KAFKA_VERSION: '2.3.1' - KAFKA_BROKER_ID: '1' - KAFKA_CREATE_TOPICS: 'test-writer-0:3:1,test-writer-1:3:1' - KAFKA_DELETE_TOPIC_ENABLE: 'true' - KAFKA_ADVERTISED_HOST_NAME: 'localhost' - KAFKA_ADVERTISED_PORT: '9092' - KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181' - KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true' - KAFKA_MESSAGE_MAX_BYTES: '200000000' - KAFKA_LISTENERS: 'PLAINTEXT://:9092,SASL_PLAINTEXT://:9093' - KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://localhost:9092,SASL_PLAINTEXT://localhost:9093' - KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN,SCRAM-SHA-256,SCRAM-SHA-512' - KAFKA_AUTHORIZER_CLASS_NAME: 'kafka.security.auth.SimpleAclAuthorizer' - KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: 'true' - KAFKA_OPTS: "-Djava.security.auth.login.config=/opt/kafka/config/kafka_server_jaas.conf" - CUSTOM_INIT_SCRIPT: |- - echo -e 'KafkaServer {\norg.apache.kafka.common.security.scram.ScramLoginModule required\n username="adminscram"\n password="admin-secret";\n org.apache.kafka.common.security.plain.PlainLoginModule required\n username="adminplain"\n password="admin-secret"\n user_adminplain="admin-secret";\n };' > /opt/kafka/config/kafka_server_jaas.conf; - /opt/kafka/bin/kafka-configs.sh --zookeeper zookeeper:2181 --alter --add-config 'SCRAM-SHA-256=[password=admin-secret-256],SCRAM-SHA-512=[password=admin-secret-512]' --entity-type users --entity-name adminscram - - zookeeper: - image: wurstmeister/zookeeper - ports: - - 2181:2181 + KAFKA_CFG_BROKER_ID: 1 + KAFKA_CFG_DELETE_TOPIC_ENABLE: 'true' + KAFKA_CFG_ADVERTISED_HOST_NAME: 'localhost' + KAFKA_CFG_ADVERTISED_PORT: '9092' + KAFKA_CFG_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE: 'true' + KAFKA_CFG_MESSAGE_MAX_BYTES: '200000000' + KAFKA_CFG_LISTENERS: 'PLAINTEXT://:9092,SASL_PLAINTEXT://:9093' + KAFKA_CFG_ADVERTISED_LISTENERS: 'PLAINTEXT://localhost:9092,SASL_PLAINTEXT://localhost:9093' + KAFKA_CFG_SASL_ENABLED_MECHANISMS: 'PLAIN,SCRAM-SHA-256,SCRAM-SHA-512' + KAFKA_CFG_AUTHORIZER_CLASS_NAME: 'kafka.security.auth.SimpleAclAuthorizer' + KAFKA_CFG_ALLOW_EVERYONE_IF_NO_ACL_FOUND: 'true' + KAFKA_INTER_BROKER_USER: adminplain + KAFKA_INTER_BROKER_PASSWORD: admin-secret + KAFKA_BROKER_USER: adminplain + KAFKA_BROKER_PASSWORD: admin-secret + ALLOW_PLAINTEXT_LISTENER: yes + entrypoint: + - "/bin/bash" + - "-c" + - /opt/bitnami/kafka/bin/kafka-configs.sh --zookeeper zookeeper:2181 --alter --add-config "SCRAM-SHA-256=[password=admin-secret-256],SCRAM-SHA-512=[password=admin-secret-512]" --entity-type users --entity-name adminscram; exec /entrypoint.sh /run.sh diff --git a/docker_compose_versions/README.md b/docker_compose_versions/README.md new file mode 100644 index 000000000..9d3b1639a --- /dev/null +++ b/docker_compose_versions/README.md @@ -0,0 +1,152 @@ +# Bitnami Kafka + +This document outlines how to create a docker-compose file for a specific Bitnami Kafka version. + + +## Steps to create docker-compose + +- Refer to [docker-hub Bitnami Kafka tags](https://hub.docker.com/r/bitnami/kafka/tags) and sort by NEWEST to locate the image preferred, for example: `2.7.0` +- There is documentation in the (main branch)[https://github.com/bitnami/containers/blob/main/bitnami/kafka/README.md] for environment config setup information. Refer to the `Notable Changes` section. +- Sometimes there is a need to understand how the set up is being done. To locate the appropriate Kafka release in the repo [bitnami/containers](https://github.com/bitnami/containers), go through the [kafka commit history](https://github.com/bitnami/containers/commits/main/bitnami/kafka). +- Once a commit is located, Refer to README.md, Dockerfile, entrypoint and various init scripts to understand the environment variables to config server.properties mapping conventions. Alternatively, you can spin up the required Kafka image and refer the mapping inside the container. +- Ensure you follow the environment variable conventions in your docker-compose. Without proper environment variables, the Kafka cluster cannot start or can start with undesired configs. For example, Since Kafka version 2.3, all server.properties docker-compose environment configs start with `KAFKA_CFG_` +- Older versions of Bitnami Kafka have different conventions and limited docker-compose environment variables exposed for configs needed in server.properties + + +In kafka-go, for all the test cases to succeed, Kafka cluster should have following server.properties along with a relevant kafka_jaas.conf mentioned in the KAFKA_OPTS. Goal is to ensure that the docker-compose file generates below server.properties. + + +server.properties +``` +advertised.host.name=localhost +advertised.listeners=PLAINTEXT://localhost:9092,SASL_PLAINTEXT://localhost:9093 +advertised.port=9092 +auto.create.topics.enable=true +broker.id=1 +delete.topic.enable=true +group.initial.rebalance.delay.ms=0 +listeners=PLAINTEXT://:9092,SASL_PLAINTEXT://:9093 +log.dirs=/kafka/kafka-logs-1d5951569d78 +log.retention.check.interval.ms=300000 +log.retention.hours=168 +log.segment.bytes=1073741824 +message.max.bytes=200000000 +num.io.threads=8 +num.network.threads=3 +num.partitions=1 +num.recovery.threads.per.data.dir=1 +offsets.topic.replication.factor=1 +port=9092 +sasl.enabled.mechanisms=PLAIN,SCRAM-SHA-256,SCRAM-SHA-512 +socket.receive.buffer.bytes=102400 +socket.request.max.bytes=104857600 +socket.send.buffer.bytes=102400 +transaction.state.log.min.isr=1 +transaction.state.log.replication.factor=1 +zookeeper.connect=zookeeper:2181 +zookeeper.connection.timeout.ms=6000 +``` + + +## run docker-compose and test cases + +run docker-compose +``` +# docker-compose -f ./docker_compose_versions/docker-compose-.yml up -d +``` + + +run test cases +``` +# go clean -cache; KAFKA_SKIP_NETTEST=1 KAFKA_VERSION= go test -race -cover ./...; +``` + + +## Various Bitnami Kafka version issues observed in circleci + + +### Kafka v101, v111, v201, v211 and v221 + + +In kafka-go repo, all the tests require sasl.enabled.mechanisms as PLAIN,SCRAM-SHA-256,SCRAM-SHA-512 for the Kafka cluster. + + +It has been observed for Kafka v101, v111, v201, v211 and v221 which are used in the circleci for build have issues with SCRAM. + + +There is no way to override the config sasl.enabled.mechanisms causing Kafka cluster to start up as PLAIN. + + +There has been some attempts made to override sasl.enabled.mechanisms +- Modified entrypoint in docker-compose to append the server.properties with relevant configs sasl.enabled.mechanisms before running entrypoint.sh. This resulted in failures for Kafka v101, v111, v201, v211 and v221. Once Kafka server starts, server.properties gets appended with default value of sasl.enabled.mechanisms there by cluster to start with out PLAIN,SCRAM-SHA-256,SCRAM-SHA-512 +- Mounted a docker-compose volume for server.propeties. However, This also resulted in failures for Kafka v101, v111, v201, v211 and v221. Once Kafka server starts, server.properties gets appended with default value of sasl.enabled.mechanisms there by cluster to start with out PLAIN,SCRAM-SHA-256,SCRAM-SHA-512 + + +NOTE: +- Kafka v101, v111, v201, v211 and v221 have no docker-compose files since we need SCRAM for kafka-go test cases to succeed. +- There is no Bitnami Kafka image for v222 hence testing has been performed on v221 + + +### Kafka v231 + +In Bitnami Kafka v2.3, all server.properties docker-compose environment configs start with `KAFKA_CFG_`. However, it is not picking the custom populated kafka_jaas.conf. + + +After a lot of debugging, it has been noticed that there aren't enough privileges to create the kafka_jaas.conf. Hence the environment variables below need to be added in docker-compose to generate the kafka_jaas.conf. This issue is not noticed after kafka v2.3 + + +``` +KAFKA_INTER_BROKER_USER: adminplain +KAFKA_INTER_BROKER_PASSWORD: admin-secret +KAFKA_BROKER_USER: adminplain +KAFKA_BROKER_PASSWORD: admin-secret +``` + +There is a docker-compose file `docker-compose-231.yml` in the folder `kafka-go/docker_compose_versions` for reference. + + +## References + + +For user reference, please find the some of the older kafka versions commits from the [kafka commit history](https://github.com/bitnami/containers/commits/main/bitnami/kafka). For Kafka versions with no commit history, data is populated with the latest version available for the tag. + + +### Kafka v010: docker-compose reference: `kafka-go/docker_compose_versions/docker-compose-010.yml` +- [tag](https://hub.docker.com/r/bitnami/kafka/tags?page=1&ordering=last_updated&name=0.10.2.1) +- [kafka commit](https://github.com/bitnami/containers/tree/c4240f0525916a418245c7ef46d9534a7a212c92/bitnami/kafka) + + +### Kafka v011: docker-compose reference: `kafka-go/docker_compose_versions/docker-compose-011.yml` +- [tag](https://hub.docker.com/r/bitnami/kafka/tags?page=1&ordering=last_updated&name=0.11.0) +- [kafka commit](https://github.com/bitnami/containers/tree/7724adf655e4ca9aac69d606d41ad329ef31eeca/bitnami/kafka) + + +### Kafka v101: docker-compose reference: N/A +- [tag](https://hub.docker.com/r/bitnami/kafka/tags?page=1&ordering=last_updated&name=1.0.1) +- [kafka commit](https://github.com/bitnami/containers/tree/44cc8f4c43ead6edebd3758c8df878f4f9da82c2/bitnami/kafka) + + +### Kafka v111: docker-compose reference: N/A +- [tag](https://hub.docker.com/r/bitnami/kafka/tags?page=1&ordering=last_updated&name=1.1.1) +- [kafka commit](https://github.com/bitnami/containers/tree/cb593dc98c2eb7a39f2792641e741d395dbe50e7/bitnami/kafka) + + +### Kafka v201: docker-compose reference: N/A +- [tag](https://hub.docker.com/r/bitnami/kafka/tags?page=1&ordering=last_updated&name=2.0.1) +- [kafka commit](https://github.com/bitnami/containers/tree/9ff8763df265c87c8b59f8d7ff0cf69299d636c9/bitnami/kafka) + + +### Kafka v211: docker-compose reference: N/A +- [tag](https://hub.docker.com/r/bitnami/kafka/tags?page=1&ordering=last_updated&name=2.1.1) +- [kafka commit](https://github.com/bitnami/containers/tree/d3a9d40afc2b7e7de53486538a63084c1a565d43/bitnami/kafka) + + +### Kafka v221: docker-compose reference: N/A +- [tag](https://hub.docker.com/r/bitnami/kafka/tags?page=1&ordering=last_updated&name=2.2.1) +- [kafka commit](https://github.com/bitnami/containers/tree/f132ef830d1ba9b78392ec4619174b4640c276c9/bitnami/kafka) + + +### Kafka v231: docker-compose reference: `kafka-go/docker_compose_versions/docker-compose-231.yml` +- [tag](https://hub.docker.com/r/bitnami/kafka/tags?page=1&ordering=last_updated&name=2.3.1) +- [kafka commit](https://github.com/bitnami/containers/tree/ae572036b5281456b0086345fec0bdb74f7cf3a3/bitnami/kafka) + diff --git a/docker_compose_versions/docker-compose-010.yml b/docker_compose_versions/docker-compose-010.yml new file mode 100644 index 000000000..35df0b5c6 --- /dev/null +++ b/docker_compose_versions/docker-compose-010.yml @@ -0,0 +1,39 @@ +# See https://hub.docker.com/r/bitnami/kafka/tags for the complete list. +version: '3' +services: + zookeeper: + container_name: zookeeper + hostname: zookeeper + image: bitnami/zookeeper:latest + ports: + - 2181:2181 + environment: + ALLOW_ANONYMOUS_LOGIN: yes + kafka: + container_name: kafka + image: bitnami/kafka:0.10.2.1 + restart: on-failure:3 + links: + - zookeeper + ports: + - 9092:9092 + - 9093:9093 + environment: + KAFKA_BROKER_ID: 1 + KAFKA_DELETE_TOPIC_ENABLE: 'true' + KAFKA_ADVERTISED_HOST_NAME: 'localhost' + KAFKA_ADVERTISED_PORT: '9092' + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true' + KAFKA_MESSAGE_MAX_BYTES: '200000000' + KAFKA_LISTENERS: 'PLAINTEXT://:9092,SASL_PLAINTEXT://:9093' + KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://localhost:9092,SASL_PLAINTEXT://localhost:9093' + KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN,SCRAM-SHA-256,SCRAM-SHA-512' + KAFKA_AUTHORIZER_CLASS_NAME: 'kafka.security.auth.SimpleAclAuthorizer' + KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: 'true' + KAFKA_OPTS: "-Djava.security.auth.login.config=/opt/bitnami/kafka/config/kafka_server_jaas.conf" + ALLOW_PLAINTEXT_LISTENER: yes + entrypoint: + - "/bin/bash" + - "-c" + - echo -e 'KafkaServer {\norg.apache.kafka.common.security.scram.ScramLoginModule required\n username="adminscram"\n password="admin-secret";\n org.apache.kafka.common.security.plain.PlainLoginModule required\n username="adminplain"\n password="admin-secret"\n user_adminplain="admin-secret";\n };' > /opt/bitnami/kafka/config/kafka_server_jaas.conf; /opt/bitnami/kafka/bin/kafka-configs.sh --zookeeper zookeeper:2181 --alter --add-config 'SCRAM-SHA-256=[password=admin-secret-256],SCRAM-SHA-512=[password=admin-secret-512]' --entity-type users --entity-name adminscram; exec /app-entrypoint.sh /start-kafka.sh diff --git a/docker_compose_versions/docker-compose-011.yml b/docker_compose_versions/docker-compose-011.yml new file mode 100644 index 000000000..93b258e08 --- /dev/null +++ b/docker_compose_versions/docker-compose-011.yml @@ -0,0 +1,36 @@ +# See https://hub.docker.com/r/bitnami/kafka/tags for the complete list. +version: '3' +services: + zookeeper: + container_name: zookeeper + hostname: zookeeper + image: bitnami/zookeeper:latest + ports: + - 2181:2181 + environment: + ALLOW_ANONYMOUS_LOGIN: yes + kafka: + container_name: kafka + image: bitnami/kafka:0.11.0-1-r1 + restart: on-failure:3 + links: + - zookeeper + ports: + - 9092:9092 + - 9093:9093 + environment: + KAFKA_BROKER_ID: 1 + KAFKA_DELETE_TOPIC_ENABLE: 'true' + KAFKA_ADVERTISED_HOST_NAME: 'localhost' + KAFKA_ADVERTISED_PORT: '9092' + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_LISTENERS: 'PLAINTEXT://:9092,SASL_PLAINTEXT://:9093' + KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://localhost:9092,SASL_PLAINTEXT://localhost:9093' + KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: 'true' + KAFKA_OPTS: "-Djava.security.auth.login.config=/opt/bitnami/kafka/config/kafka_server_jaas.conf" + ALLOW_PLAINTEXT_LISTENER: "yes" + entrypoint: + - "/bin/bash" + - "-c" + # 0.11.0 image is not honoring some configs required in server.properties + - echo -e '\nsasl.enabled.mechanisms=PLAIN,SCRAM-SHA-256,SCRAM-SHA-512\nmessage.max.bytes=200000000\nauto.create.topics.enable=true\nport=9092' >> /opt/bitnami/kafka/config/server.properties; echo -e 'KafkaServer {\norg.apache.kafka.common.security.scram.ScramLoginModule required\n username="adminscram"\n password="admin-secret";\n org.apache.kafka.common.security.plain.PlainLoginModule required\n username="adminplain"\n password="admin-secret"\n user_adminplain="admin-secret";\n };' > /opt/bitnami/kafka/config/kafka_server_jaas.conf; /opt/bitnami/kafka/bin/kafka-configs.sh --zookeeper zookeeper:2181 --alter --add-config 'SCRAM-SHA-256=[password=admin-secret-256],SCRAM-SHA-512=[password=admin-secret-512]' --entity-type users --entity-name adminscram; exec /app-entrypoint.sh /run.sh diff --git a/docker_compose_versions/docker-compose-231.yml b/docker_compose_versions/docker-compose-231.yml new file mode 100644 index 000000000..f4235c9f0 --- /dev/null +++ b/docker_compose_versions/docker-compose-231.yml @@ -0,0 +1,42 @@ +# See https://hub.docker.com/r/bitnami/kafka/tags for the complete list. +version: '3' +services: + zookeeper: + container_name: zookeeper + hostname: zookeeper + image: bitnami/zookeeper:latest + ports: + - 2181:2181 + environment: + ALLOW_ANONYMOUS_LOGIN: yes + kafka: + container_name: kafka + image: bitnami/kafka:2.3.1-ol-7-r61 + restart: on-failure:3 + links: + - zookeeper + ports: + - 9092:9092 + - 9093:9093 + environment: + KAFKA_CFG_BROKER_ID: 1 + KAFKA_CFG_DELETE_TOPIC_ENABLE: 'true' + KAFKA_CFG_ADVERTISED_HOST_NAME: 'localhost' + KAFKA_CFG_ADVERTISED_PORT: '9092' + KAFKA_CFG_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE: 'true' + KAFKA_CFG_MESSAGE_MAX_BYTES: '200000000' + KAFKA_CFG_LISTENERS: 'PLAINTEXT://:9092,SASL_PLAINTEXT://:9093' + KAFKA_CFG_ADVERTISED_LISTENERS: 'PLAINTEXT://localhost:9092,SASL_PLAINTEXT://localhost:9093' + KAFKA_CFG_SASL_ENABLED_MECHANISMS: 'PLAIN,SCRAM-SHA-256,SCRAM-SHA-512' + KAFKA_CFG_AUTHORIZER_CLASS_NAME: 'kafka.security.auth.SimpleAclAuthorizer' + KAFKA_CFG_ALLOW_EVERYONE_IF_NO_ACL_FOUND: 'true' + KAFKA_INTER_BROKER_USER: adminplain + KAFKA_INTER_BROKER_PASSWORD: admin-secret + KAFKA_BROKER_USER: adminplain + KAFKA_BROKER_PASSWORD: admin-secret + ALLOW_PLAINTEXT_LISTENER: yes + entrypoint: + - "/bin/bash" + - "-c" + - /opt/bitnami/kafka/bin/kafka-configs.sh --zookeeper zookeeper:2181 --alter --add-config "SCRAM-SHA-256=[password=admin-secret-256],SCRAM-SHA-512=[password=admin-secret-512]" --entity-type users --entity-name adminscram; exec /entrypoint.sh /run.sh diff --git a/docker_compose_versions/docker-compose-241.yml b/docker_compose_versions/docker-compose-241.yml new file mode 100644 index 000000000..79d48da71 --- /dev/null +++ b/docker_compose_versions/docker-compose-241.yml @@ -0,0 +1,39 @@ +# See https://hub.docker.com/r/bitnami/kafka/tags for the complete list. +version: '3' +services: + zookeeper: + container_name: zookeeper + hostname: zookeeper + image: bitnami/zookeeper:latest + ports: + - 2181:2181 + environment: + ALLOW_ANONYMOUS_LOGIN: yes + kafka: + container_name: kafka + image: bitnami/kafka:2.4.1 + restart: on-failure:3 + links: + - zookeeper + ports: + - 9092:9092 + - 9093:9093 + environment: + KAFKA_CFG_BROKER_ID: 1 + KAFKA_CFG_DELETE_TOPIC_ENABLE: 'true' + KAFKA_CFG_ADVERTISED_HOST_NAME: 'localhost' + KAFKA_CFG_ADVERTISED_PORT: '9092' + KAFKA_CFG_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE: 'true' + KAFKA_CFG_MESSAGE_MAX_BYTES: '200000000' + KAFKA_CFG_LISTENERS: 'PLAINTEXT://:9092,SASL_PLAINTEXT://:9093' + KAFKA_CFG_ADVERTISED_LISTENERS: 'PLAINTEXT://localhost:9092,SASL_PLAINTEXT://localhost:9093' + KAFKA_CFG_SASL_ENABLED_MECHANISMS: 'PLAIN,SCRAM-SHA-256,SCRAM-SHA-512' + KAFKA_CFG_AUTHORIZER_CLASS_NAME: 'kafka.security.auth.SimpleAclAuthorizer' + KAFKA_CFG_ALLOW_EVERYONE_IF_NO_ACL_FOUND: 'true' + KAFKA_OPTS: "-Djava.security.auth.login.config=/opt/bitnami/kafka/config/kafka_jaas.conf" + ALLOW_PLAINTEXT_LISTENER: yes + entrypoint: + - "/bin/bash" + - "-c" + - echo -e 'KafkaServer {\norg.apache.kafka.common.security.scram.ScramLoginModule required\n username="adminscram"\n password="admin-secret";\n org.apache.kafka.common.security.plain.PlainLoginModule required\n username="adminplain"\n password="admin-secret"\n user_adminplain="admin-secret";\n };' > /opt/bitnami/kafka/config/kafka_jaas.conf; /opt/bitnami/kafka/bin/kafka-configs.sh --zookeeper zookeeper:2181 --alter --add-config "SCRAM-SHA-256=[password=admin-secret-256],SCRAM-SHA-512=[password=admin-secret-512]" --entity-type users --entity-name adminscram; exec /entrypoint.sh /run.sh diff --git a/docker_compose_versions/docker-compose-260.yml b/docker_compose_versions/docker-compose-260.yml new file mode 100644 index 000000000..10dea49f4 --- /dev/null +++ b/docker_compose_versions/docker-compose-260.yml @@ -0,0 +1,39 @@ +# See https://hub.docker.com/r/bitnami/kafka/tags for the complete list. +version: '3' +services: + zookeeper: + container_name: zookeeper + hostname: zookeeper + image: bitnami/zookeeper:latest + ports: + - 2181:2181 + environment: + ALLOW_ANONYMOUS_LOGIN: yes + kafka: + container_name: kafka + image: bitnami/kafka:2.6.0 + restart: on-failure:3 + links: + - zookeeper + ports: + - 9092:9092 + - 9093:9093 + environment: + KAFKA_CFG_BROKER_ID: 1 + KAFKA_CFG_DELETE_TOPIC_ENABLE: 'true' + KAFKA_CFG_ADVERTISED_HOST_NAME: 'localhost' + KAFKA_CFG_ADVERTISED_PORT: '9092' + KAFKA_CFG_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE: 'true' + KAFKA_CFG_MESSAGE_MAX_BYTES: '200000000' + KAFKA_CFG_LISTENERS: 'PLAINTEXT://:9092,SASL_PLAINTEXT://:9093' + KAFKA_CFG_ADVERTISED_LISTENERS: 'PLAINTEXT://localhost:9092,SASL_PLAINTEXT://localhost:9093' + KAFKA_CFG_SASL_ENABLED_MECHANISMS: 'PLAIN,SCRAM-SHA-256,SCRAM-SHA-512' + KAFKA_CFG_AUTHORIZER_CLASS_NAME: 'kafka.security.auth.SimpleAclAuthorizer' + KAFKA_CFG_ALLOW_EVERYONE_IF_NO_ACL_FOUND: 'true' + KAFKA_OPTS: "-Djava.security.auth.login.config=/opt/bitnami/kafka/config/kafka_jaas.conf" + ALLOW_PLAINTEXT_LISTENER: yes + entrypoint: + - "/bin/bash" + - "-c" + - echo -e 'KafkaServer {\norg.apache.kafka.common.security.scram.ScramLoginModule required\n username="adminscram"\n password="admin-secret";\n org.apache.kafka.common.security.plain.PlainLoginModule required\n username="adminplain"\n password="admin-secret"\n user_adminplain="admin-secret";\n };' > /opt/bitnami/kafka/config/kafka_jaas.conf; /opt/bitnami/kafka/bin/kafka-configs.sh --zookeeper zookeeper:2181 --alter --add-config "SCRAM-SHA-256=[password=admin-secret-256],SCRAM-SHA-512=[password=admin-secret-512]" --entity-type users --entity-name adminscram; exec /entrypoint.sh /run.sh diff --git a/docker_compose_versions/docker-compose-270.yml b/docker_compose_versions/docker-compose-270.yml new file mode 100644 index 000000000..de48cb290 --- /dev/null +++ b/docker_compose_versions/docker-compose-270.yml @@ -0,0 +1,39 @@ +# See https://hub.docker.com/r/bitnami/kafka/tags for the complete list. +version: '3' +services: + zookeeper: + container_name: zookeeper + hostname: zookeeper + image: bitnami/zookeeper:latest + ports: + - 2181:2181 + environment: + ALLOW_ANONYMOUS_LOGIN: yes + kafka: + container_name: kafka + image: bitnami/kafka:2.7.0 + restart: on-failure:3 + links: + - zookeeper + ports: + - 9092:9092 + - 9093:9093 + environment: + KAFKA_CFG_BROKER_ID: 1 + KAFKA_CFG_DELETE_TOPIC_ENABLE: 'true' + KAFKA_CFG_ADVERTISED_HOST_NAME: 'localhost' + KAFKA_CFG_ADVERTISED_PORT: '9092' + KAFKA_CFG_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE: 'true' + KAFKA_CFG_MESSAGE_MAX_BYTES: '200000000' + KAFKA_CFG_LISTENERS: 'PLAINTEXT://:9092,SASL_PLAINTEXT://:9093' + KAFKA_CFG_ADVERTISED_LISTENERS: 'PLAINTEXT://localhost:9092,SASL_PLAINTEXT://localhost:9093' + KAFKA_CFG_SASL_ENABLED_MECHANISMS: 'PLAIN,SCRAM-SHA-256,SCRAM-SHA-512' + KAFKA_CFG_AUTHORIZER_CLASS_NAME: 'kafka.security.auth.SimpleAclAuthorizer' + KAFKA_CFG_ALLOW_EVERYONE_IF_NO_ACL_FOUND: 'true' + KAFKA_OPTS: "-Djava.security.auth.login.config=/opt/bitnami/kafka/config/kafka_jaas.conf" + ALLOW_PLAINTEXT_LISTENER: yes + entrypoint: + - "/bin/bash" + - "-c" + - echo -e 'KafkaServer {\norg.apache.kafka.common.security.scram.ScramLoginModule required\n username="adminscram"\n password="admin-secret";\n org.apache.kafka.common.security.plain.PlainLoginModule required\n username="adminplain"\n password="admin-secret"\n user_adminplain="admin-secret";\n };' > /opt/bitnami/kafka/config/kafka_jaas.conf; /opt/bitnami/kafka/bin/kafka-configs.sh --zookeeper zookeeper:2181 --alter --add-config "SCRAM-SHA-256=[password=admin-secret-256],SCRAM-SHA-512=[password=admin-secret-512]" --entity-type users --entity-name adminscram; exec /entrypoint.sh /run.sh diff --git a/docker_compose_versions/docker-compose-361.yml b/docker_compose_versions/docker-compose-361.yml new file mode 100644 index 000000000..f91c1967d --- /dev/null +++ b/docker_compose_versions/docker-compose-361.yml @@ -0,0 +1,39 @@ +# See https://hub.docker.com/r/bitnami/kafka/tags for the complete list. +version: '3' +services: + zookeeper: + container_name: zookeeper + hostname: zookeeper + image: bitnami/zookeeper:latest + ports: + - 2181:2181 + environment: + ALLOW_ANONYMOUS_LOGIN: yes + kafka: + container_name: kafka + image: bitnami/kafka:3.6.1 + restart: on-failure:3 + links: + - zookeeper + ports: + - 9092:9092 + - 9093:9093 + environment: + KAFKA_CFG_BROKER_ID: 1 + KAFKA_CFG_DELETE_TOPIC_ENABLE: 'true' + KAFKA_CFG_ADVERTISED_HOST_NAME: 'localhost' + KAFKA_CFG_ADVERTISED_PORT: '9092' + KAFKA_CFG_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE: 'true' + KAFKA_CFG_MESSAGE_MAX_BYTES: '200000000' + KAFKA_CFG_LISTENERS: 'PLAINTEXT://:9092,SASL_PLAINTEXT://:9093' + KAFKA_CFG_ADVERTISED_LISTENERS: 'PLAINTEXT://localhost:9092,SASL_PLAINTEXT://localhost:9093' + KAFKA_CFG_SASL_ENABLED_MECHANISMS: 'PLAIN,SCRAM-SHA-256,SCRAM-SHA-512' + KAFKA_CFG_AUTHORIZER_CLASS_NAME: 'kafka.security.authorizer.AclAuthorizer' + KAFKA_CFG_ALLOW_EVERYONE_IF_NO_ACL_FOUND: 'true' + KAFKA_OPTS: "-Djava.security.auth.login.config=/opt/bitnami/kafka/config/kafka_jaas.conf" + ALLOW_PLAINTEXT_LISTENER: yes + entrypoint: + - "/bin/bash" + - "-c" + - echo -e 'KafkaServer {\norg.apache.kafka.common.security.scram.ScramLoginModule required\n username="adminscram"\n password="admin-secret";\n org.apache.kafka.common.security.plain.PlainLoginModule required\n username="adminplain"\n password="admin-secret"\n user_adminplain="admin-secret";\n };' > /opt/bitnami/kafka/config/kafka_jaas.conf; /opt/bitnami/kafka/bin/kafka-configs.sh --zookeeper zookeeper:2181 --alter --add-config "SCRAM-SHA-256=[password=admin-secret-256],SCRAM-SHA-512=[password=admin-secret-512]" --entity-type users --entity-name adminscram; exec /entrypoint.sh /run.sh diff --git a/examples/docker-compose.yaml b/examples/docker-compose.yaml index 01b60fdbf..f4d3d07c1 100644 --- a/examples/docker-compose.yaml +++ b/examples/docker-compose.yaml @@ -3,18 +3,27 @@ services: zookeeper: hostname: zookeeper - image: wurstmeister/zookeeper:3.4.6 + image: bitnami/zookeeper:latest + restart: always expose: - "2181" ports: - "2181:2181" + environment: + ALLOW_ANONYMOUS_LOGIN: yes kafka: - image: wurstmeister/kafka + hostname: kafka + image: bitnami/kafka:2.7.0 + restart: always env_file: - kafka/kafka-variables.env depends_on: - zookeeper + expose: + - "9092" + - "8082" + - "8083" ports: - '9092:9092' - '8082:8082' @@ -22,6 +31,7 @@ services: mongo-db: image: mongo:4.0 + restart: always expose: - "27017" ports: @@ -43,6 +53,7 @@ services: depends_on: - kafka - mongo-db + restart: always consumer-logger: build: @@ -53,6 +64,7 @@ services: GroupID: logger-group depends_on: - kafka + restart: always producer-random: build: @@ -62,6 +74,7 @@ services: topic: topic1 depends_on: - kafka + restart: always producer-api: build: @@ -74,4 +87,5 @@ services: ports: - "8080:8080" depends_on: - - kafka \ No newline at end of file + - kafka + restart: always \ No newline at end of file diff --git a/examples/kafka/kafka-variables.env b/examples/kafka/kafka-variables.env index dc19833ac..9d6ce8668 100644 --- a/examples/kafka/kafka-variables.env +++ b/examples/kafka/kafka-variables.env @@ -1,22 +1,23 @@ -KAFKA_ADVERTISED_HOST_NAME=kafka -KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181 -KAFKA_CONNECT_BOOTSTRAP_SERVERS=localhost:9092 +KAFKA_CFG_ADVERTISED_HOST_NAME=kafka +KAFKA_CFG_ZOOKEEPER_CONNECT=zookeeper:2181 +KAFKA_CFG_CONNECT_BOOTSTRAP_SERVERS=localhost:9092 -KAFKA_CONNECT_REST_PORT=8082 -KAFKA_CONNECT_REST_ADVERTISED_HOST_NAME="localhost" +KAFKA_CFG_CONNECT_REST_PORT=8082 +KAFKA_CFG_CONNECT_REST_ADVERTISED_HOST_NAME="localhost" -KAFKA_CONNECT_KEY_CONVERTER="org.apache.kafka.connect.json.JsonConverter" -KAFKA_CONNECT_VALUE_CONVERTER="org.apache.kafka.connect.json.JsonConverter" -KAFKA_CONNECT_KEY_CONVERTER_SCHEMAS_ENABLE=0 -KAFKA_CONNECT_VALUE_CONVERTER_SCHEMAS_ENABLE=0 +KAFKA_CFG_CONNECT_KEY_CONVERTER="org.apache.kafka.connect.json.JsonConverter" +KAFKA_CFG_CONNECT_VALUE_CONVERTER="org.apache.kafka.connect.json.JsonConverter" +KAFKA_CFG_CONNECT_KEY_CONVERTER_SCHEMAS_ENABLE=0 +KAFKA_CFG_CONNECT_VALUE_CONVERTER_SCHEMAS_ENABLE=0 -KAFKA_CONNECT_INTERNAL_KEY_CONVERTER="org.apache.kafka.connect.json.JsonConverter" -KAFKA_CONNECT_INTERNAL_VALUE_CONVERTER="org.apache.kafka.connect.json.JsonConverter" -KAFKA_CONNECT_INTERNAL_KEY_CONVERTER_SCHEMAS_ENABLE=0 -KAFKA_CONNECT_INTERNAL_VALUE_CONVERTER_SCHEMAS_ENABLE=0 +KAFKA_CFG_CONNECT_INTERNAL_KEY_CONVERTER="org.apache.kafka.connect.json.JsonConverter" +KAFKA_CFG_CONNECT_INTERNAL_VALUE_CONVERTER="org.apache.kafka.connect.json.JsonConverter" +KAFKA_CFG_CONNECT_INTERNAL_KEY_CONVERTER_SCHEMAS_ENABLE=0 +KAFKA_CFG_CONNECT_INTERNAL_VALUE_CONVERTER_SCHEMAS_ENABLE=0 -KAFKA_CONNECT_OFFSET_STORAGE_FILE_FILENAME="/tmp/connect.offsets" +KAFKA_CFG_CONNECT_OFFSET_STORAGE_FILE_FILENAME="/tmp/connect.offsets" # Flush much faster than normal, which is useful for testing/debugging -KAFKA_CONNECT_OFFSET_FLUSH_INTERVAL_MS=10000 +KAFKA_CFG_CONNECT_OFFSET_FLUSH_INTERVAL_MS=10000 +ALLOW_PLAINTEXT_LISTENER: yes