diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..7dc029206 --- /dev/null +++ b/.gitignore @@ -0,0 +1,93 @@ +HELP.md +/target/ +!.mvn/wrapper/maven-wrapper.jar + +### STS ### +.apt_generated +.classpath +.factorypath +.project +.settings +.springBeans +.sts4-cache + +### IntelliJ IDEA ### +.idea +*.iws +*.iml +*.ipr + +### NetBeans ### +/nbproject/private/ +/nbbuild/ +/dist/ +/nbdist/ +/.nb-gradle/ +/build/ + +### Infra ### +/infra/kafka-connect/users.csv +## FRONT ### +/front/node_modules + +# Logs +/front/logs +/front/*.log +/front/npm-debug.log* +/front/yarn-debug.log* +/front/yarn-error.log* + +# Runtime data +pids +/front/*.pid +/front/*.seed +/front/*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover +/front/lib-cov + +# Coverage directory used by tools like istanbul +/front/coverage + +# nyc test coverage +/front/.nyc_output + +# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) +/front/.grunt + +# Bower dependency directory (https://bower.io/) +/front/bower_components + +# node-waf configuration +/front/.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) +/front/build/Release + +# Dependency directories +/front/node_modules/ +/front/jspm_packages/ + +# Typescript v1 declaration files +/front/typings/ + +# Optional npm cache directory +/front/.npm + +# Optional eslint cache +/front/.eslintcache + +# Optional REPL history +/front/.node_repl_history + +# Output of 'npm pack' +/front/*.tgz + +# Yarn Integrity file +/front/.yarn-integrity + +# dotenv environment variables file +/front/.env + +# next.js build output +/front/.next diff --git a/.mvn/wrapper/MavenWrapperDownloader.java b/.mvn/wrapper/MavenWrapperDownloader.java new file mode 100644 index 000000000..47336fde7 --- /dev/null +++ b/.mvn/wrapper/MavenWrapperDownloader.java @@ -0,0 +1,114 @@ +/* +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +*/ + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.net.URL; +import java.nio.channels.Channels; +import java.nio.channels.ReadableByteChannel; +import java.util.Properties; + +public class MavenWrapperDownloader { + + /** + * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided. + */ + private static final String DEFAULT_DOWNLOAD_URL = + "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar"; + + /** + * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to + * use instead of the default one. + */ + private static final String MAVEN_WRAPPER_PROPERTIES_PATH = + ".mvn/wrapper/maven-wrapper.properties"; + + /** + * Path where the maven-wrapper.jar will be saved to. + */ + private static final String MAVEN_WRAPPER_JAR_PATH = + ".mvn/wrapper/maven-wrapper.jar"; + + /** + * Name of the property which should be used to override the default download url for the wrapper. + */ + private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl"; + + public static void main(String args[]) { + System.out.println("- Downloader started"); + File baseDirectory = new File(args[0]); + System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath()); + + // If the maven-wrapper.properties exists, read it and check if it contains a custom + // wrapperUrl parameter. + File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH); + String url = DEFAULT_DOWNLOAD_URL; + if(mavenWrapperPropertyFile.exists()) { + FileInputStream mavenWrapperPropertyFileInputStream = null; + try { + mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile); + Properties mavenWrapperProperties = new Properties(); + mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream); + url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url); + } catch (IOException e) { + System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'"); + } finally { + try { + if(mavenWrapperPropertyFileInputStream != null) { + mavenWrapperPropertyFileInputStream.close(); + } + } catch (IOException e) { + // Ignore ... + } + } + } + System.out.println("- Downloading from: : " + url); + + File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH); + if(!outputFile.getParentFile().exists()) { + if(!outputFile.getParentFile().mkdirs()) { + System.out.println( + "- ERROR creating output direcrory '" + outputFile.getParentFile().getAbsolutePath() + "'"); + } + } + System.out.println("- Downloading to: " + outputFile.getAbsolutePath()); + try { + downloadFileFromURL(url, outputFile); + System.out.println("Done"); + System.exit(0); + } catch (Throwable e) { + System.out.println("- Error downloading"); + e.printStackTrace(); + System.exit(1); + } + } + + private static void downloadFileFromURL(String urlString, File destination) throws Exception { + URL website = new URL(urlString); + ReadableByteChannel rbc; + rbc = Channels.newChannel(website.openStream()); + FileOutputStream fos = new FileOutputStream(destination); + fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); + fos.close(); + rbc.close(); + } + +} diff --git a/.mvn/wrapper/maven-wrapper.jar b/.mvn/wrapper/maven-wrapper.jar new file mode 100644 index 000000000..01e679973 Binary files /dev/null and b/.mvn/wrapper/maven-wrapper.jar differ diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties new file mode 100644 index 000000000..cd0d451cc --- /dev/null +++ b/.mvn/wrapper/maven-wrapper.properties @@ -0,0 +1 @@ +distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.6.0/apache-maven-3.6.0-bin.zip diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 000000000..b6cf300f8 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,7 @@ +FROM openjdk:11-jre-slim + +COPY infra/start.sh /opt/start.sh +RUN chmod +x /opt/start.sh +COPY target/trabalhe*.jar /opt/app.jar +EXPOSE 8080 +CMD /opt/start.sh \ No newline at end of file diff --git a/README.md b/README.md index 6fcb5a302..9a9aa8b59 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,78 @@ +# Tecnologias + +* Java 11 +* Spring boot 2.1.3 +* Kafka +* ElasticSearch +* Docker +* React + +# Endpoint + + | Metodo | URI | Status | + |--------| ----|--------| + | GET | /v1/users | 200 | + +# QueryParms + + | Nome | Tipo | Obrigatório | + | ------- | ------- | ----------- | + | keyword | string | Sim | + | page | integer | Não | + | size | integer | Não | + +# Exemplo de execução + + | User | Pass | + | ----- | ----- | + | admin | admin | +```bash +$ curl -H 'Authorization: Basic YWRtaW46YWRtaW4=' http://localhost:8080/v1/users?keyword=adr&page=0&size=200 +``` + +# Front-end +Para executar a pagina de teste é necessario acessar pelo endereço ```http://localhost:8080/index.html``` e digitar o login e senha no popup que vai abrir + + + +# Como executar + +Basta executar o arquivo ```.start.sh``` que tudo rodara automaticamente. + +## Sistemas testado + * Linux ubuntu 14,16,18 + * Linux ec2 da aws + * Linux ubuntu server + * Mac OSx + + *Linux RedHat7, openSuse apresentou instabilidade. + +# Importante +O projeto usa vários programas pesados para atender os requisitos de performance do teste, portanto é aconselhável rodar em um ambiente Linux + + +### Mac OSx + +O docker no Mac faz a emulação do ambiente portanto é importante alocar uma parte da memória e processador para o docker, o projeto exige uma quantidade rasurável de recursos do sistema. + +Para o teste foi usado um Mac 2013, i5 - 8gb ram e ssd. A configuração ficou em 7164Mb ram e 4 cores reservado para o docker + +### Linux + +Foram executados dois testes, um em um i7 e 16Gb ram e outro em um i5 com 8Gb ram. + +O primeiro rodou tudo em aproximadamente 40 minutos com toda a carga do Elastic, o segundo demorou mais de 2 horas + + + +```É possivel executar os testes enquando o banco é populado``` +# Fonte +* [Escrevendo 1MM/sec](https://medium.appbase.io/benchmarking-elasticsearch-1-million-writes-per-sec-bf37e7ca8a4c) +* [Kafka connect](https://medium.appbase.io/benchmarking-elasticsearch-1-million-writes-per-sec-bf37e7ca8a4c) + + +--- + ![PicPay](https://user-images.githubusercontent.com/1765696/26998603-711fcf30-4d5c-11e7-9281-0d9eb20337ad.png) # Teste Backend diff --git a/infra/docker-compose.yml b/infra/docker-compose.yml new file mode 100644 index 000000000..af041f56b --- /dev/null +++ b/infra/docker-compose.yml @@ -0,0 +1,61 @@ +version: '3' +services: + app: + build: + context: ../ + dockerfile: Dockerfile + ports: + - "8080:8080" + zookeeper: + image: wurstmeister/zookeeper + ports: + - "2181:2181" + depends_on: + - elasticsearch + kafka: + build: + context: ./kafka-docker + ports: + - "9092:9092" + environment: + KAFKA_ADVERTISED_HOST_NAME: ${DOCKER_HOST_IP} + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + volumes: + - /var/run/docker.sock:/var/run/docker.sock + depends_on: + - zookeeper + manager: + image: hlebalbau/kafka-manager:stable + environment: + ZK_HOSTS: zookeeper:2181 + ports: + - "9000:9000" + depends_on: + - zookeeper + - kafka + connect: + build: + context: ./kafka-connect + ports: + - "8086:8086" + depends_on: + - zookeeper + - kafka + connect-ui: + image: landoop/kafka-connect-ui + ports: + - "8000:8000" + environment: + CONNECT_URL: http://connect:8086 + depends_on: + - zookeeper + - kafka + - manager + - connect + elasticsearch: + image: docker.elastic.co/elasticsearch/elasticsearch:6.4.3 + ports: + - "9200:9200" + container_name: elasticsearch + environment: + - discovery.type=single-node \ No newline at end of file diff --git a/infra/kafka-connect/Dockerfile b/infra/kafka-connect/Dockerfile new file mode 100644 index 000000000..b8a050235 --- /dev/null +++ b/infra/kafka-connect/Dockerfile @@ -0,0 +1,31 @@ +FROM infra_kafka + +COPY worker.properties /tmp/worker.properties +COPY distributed-worker.properties /tmp/distributed-worker.properties +COPY file-stream.properties /tmp/file-stream.properties +COPY elastic-sink.properties /tmp/elastic-sink.properties +COPY relevant-list1.properties /tmp/relevant-list1.properties +COPY relevant-list2.properties /tmp/relevant-list2.properties +COPY kafka-connect-spooldir.tar.gz /tmp/plugins/kafka-connect-spooldir.tar.gz +COPY kafka-connect-elasticsearch.tar.gz /tmp/plugins/kafka-connect-elasticsearch.tar.gz +COPY lista_relevancia_1.txt /tmp/lista_relevancia_1.txt +COPY lista_relevancia_2.txt /tmp/lista_relevancia_2.txt +#Setup +RUN mkdir -p /tmp/plugins/kafka-connect-spooldir \ + && mkdir -p /tmp/plugins/kafka-connect-elasticsearch \ + && tar -xzvf /tmp/plugins/kafka-connect-spooldir.tar.gz -C /tmp/plugins/kafka-connect-spooldir/ \ + && tar -xzvf /tmp/plugins/kafka-connect-elasticsearch.tar.gz -C /tmp/plugins/kafka-connect-elasticsearch/ \ + && curl https://s3.amazonaws.com/careers-picpay/users.csv.gz --output /tmp/users.csv.gz \ + && gzip -d /tmp/users.csv.gz \ + && mkdir -p /tmp/in \ + && mkdir -p /tmp/out \ + && mkdir -p /tmp/err \ + && mv /tmp/users.csv /tmp/in/users.csv +#Topic config +#CMD connect-distributed.sh /tmp/distributed-worker.properties +CMD connect-standalone.sh \ + /tmp/worker.properties \ + /tmp/file-stream.properties \ + /tmp/elastic-sink.properties \ + /tmp/relevant-list1.properties \ + /tmp/relevant-list2.properties \ No newline at end of file diff --git a/infra/kafka-connect/distributed-worker.properties b/infra/kafka-connect/distributed-worker.properties new file mode 100644 index 000000000..208638398 --- /dev/null +++ b/infra/kafka-connect/distributed-worker.properties @@ -0,0 +1,31 @@ +# from more information, visit: http://docs.confluent.io/3.2.0/connect/userguide.html#common-worker-configs +bootstrap.servers=kafka:9092 + +group.id=connect-cluster-2 + +key.converter=org.apache.kafka.connect.json.JsonConverter +value.converter=org.apache.kafka.connect.json.JsonConverter +key.converter.schemas.enable=true +value.converter.schemas.enable=true +internal.key.converter=org.apache.kafka.connect.json.JsonConverter +internal.value.converter=org.apache.kafka.connect.json.JsonConverter +internal.key.converter.schemas.enable=false +internal.value.converter.schemas.enable=false +offset.storage.topic=connect-offsets +offset.storage.replication.factor=1 +offset.storage.partitions=25 + +config.storage.topic=connect-configs +config.storage.replication.factor=1 + +status.storage.topic=connect-status +status.storage.replication.factor=1 +status.storage.partitions=5 + +offset.flush.interval.ms=10000 + +rest.port=8086 + +plugin.path=/tmp/plugins/ + + diff --git a/infra/kafka-connect/elastic-sink.properties b/infra/kafka-connect/elastic-sink.properties new file mode 100644 index 000000000..d33f8ef2d --- /dev/null +++ b/infra/kafka-connect/elastic-sink.properties @@ -0,0 +1,11 @@ +name=ElasticsearchSinkConnector +connector.class=io.confluent.connect.elasticsearch.ElasticsearchSinkConnector +type.name=user +transforms.createKey.type=org.apache.kafka.connect.transforms.ValueToKey +transforms.extract.field=id +transforms.extract.type=org.apache.kafka.connect.transforms.ExtractField$Key +topics=trabalhe-conosco-backend-dev.users +tasks.max=1 +transforms.createKey.fields=id +transforms=extract +connection.url=http://elasticsearch:9200/ \ No newline at end of file diff --git a/infra/kafka-connect/file-stream.properties b/infra/kafka-connect/file-stream.properties new file mode 100644 index 000000000..9156b52be --- /dev/null +++ b/infra/kafka-connect/file-stream.properties @@ -0,0 +1,12 @@ +name=CsvSpoolDir +finished.path=/tmp/out +tasks.max=1 +error.path=/tmp/err +input.path=/tmp/in +connector.class=com.github.jcustenborder.kafka.connect.spooldir.SpoolDirCsvSourceConnector +input.file.pattern=users.csv +halt.on.error=false +topic=trabalhe-conosco-backend-dev.users +key.schema={"name":"br.com.vtferrari.model.UserKey","type":"STRUCT","isOptional":false,"fieldSchemas":{"id":{"type":"STRING","isOptional":false}}} +value.schema={"name" : "br.com.vtferrari.model.User","type" : "STRUCT","isOptional" : false,"fieldSchemas" : {"id" : {"type" : "STRING","isOptional" : false},"name" : {"type" : "STRING","isOptional" : true},"username" : {"type" : "STRING","isOptional" : true}}} +csv.first.row.as.header=false \ No newline at end of file diff --git a/infra/kafka-connect/kafka-connect-elasticsearch.tar.gz b/infra/kafka-connect/kafka-connect-elasticsearch.tar.gz new file mode 100644 index 000000000..ebb4bd640 Binary files /dev/null and b/infra/kafka-connect/kafka-connect-elasticsearch.tar.gz differ diff --git a/infra/kafka-connect/kafka-connect-spooldir.tar.gz b/infra/kafka-connect/kafka-connect-spooldir.tar.gz new file mode 100644 index 000000000..3c4a56cd3 Binary files /dev/null and b/infra/kafka-connect/kafka-connect-spooldir.tar.gz differ diff --git a/infra/kafka-connect/lista_relevancia_1.txt b/infra/kafka-connect/lista_relevancia_1.txt new file mode 100644 index 000000000..01797ba7a --- /dev/null +++ b/infra/kafka-connect/lista_relevancia_1.txt @@ -0,0 +1,107 @@ +fba0be35-7111-43c5-8111-b326360da4d0 +7354ff5e-cc72-4cc7-a8d0-279f3349c52b +4096545a-3d93-476d-9a25-ae486a12a720 +c9a749fb-2213-47a7-b56d-ebcad7f02bdf +328edd8c-5a54-453a-a9a8-694a4d46e898 +1bec9b05-3641-4fe5-8d6f-43835dbafc47 +d542a5b1-fb3b-44c7-92d0-64b89565a93e +f7754e98-121d-447f-80c7-a246b1250141 +c0c9aa83-ddc4-4088-84ba-095402ea7f16 +ceaa2306-41c1-413d-875f-e99bc2e908be +8f065ff7-6fb7-4373-9fb4-3e5daa7d802e +c72710fb-0a47-4c0f-b906-1abe3da9c4ef +277c3e15-c275-4f6c-8345-f2010c58bfac +add2607d-6ada-44c9-a89a-eecc5c225c4f +1a2093a9-0bdb-43c7-acf3-f2d2decb9f25 +766c9e3e-5baf-40ac-9455-db6f55dec9e5 +636b8bac-2d38-4a59-acdf-14fa4f165d31 +8f934250-2d8e-4f2e-a316-d0bbc177cfb8 +7d30aad3-ea80-400f-878f-130f2944695a +049dc170-b13f-41bd-b822-6889a3619c91 +230b6cd8-7463-424a-b5f5-e3c181a870bc +99b1705c-f1c6-4535-bf22-da2d541c797c +4bc2fb42-7610-47e2-8574-0e7ef9bc6503 +2943dfeb-6542-4269-86d9-ce3d97f99c5b +a30e39fb-7684-4886-8247-09cda8c7a469 +05815a4b-1571-48c0-80d7-ab290e69b767 +f8f92179-d36a-4c37-9c15-72af4a7932eb +a8a2ffa7-c7ee-4747-af94-bafc453ee6e5 +e83736a1-a908-46e8-b235-987c8e261cd4 +3a03df17-1153-435f-8bf9-904b27faae46 +9758d6c4-40dc-4138-8b1a-ef999cf1c540 +ef108646-a30c-495f-ab32-0ac1052c57fe +4217eb38-f6b2-48c5-bf25-3ca8a9462385 +52f3cd9f-f01b-46ed-8bb4-dde64028da06 +7a880f89-d2e6-4fa2-b377-a559d25e516c +af2b0d62-748d-40e7-ad6a-77c7872d59fc +f4b9b827-8221-436b-adaa-3115aec3c26b +6d81db15-2f3f-4122-9ae8-1c2dc1350e60 +2923fdfe-9455-4758-9225-08abd60b6a58 +1b658968-2326-4532-a3c6-df59815a3a4b +e84a504b-29fc-48fe-aed7-95bca464d1c1 +c475c9ae-e561-4bc2-a719-6c6f0c1fa2dc +0bea8a99-f178-4cfe-9170-6ee1620a2720 +7fb422d3-965c-4dfd-946c-aa76b393e2f3 +805015db-9eaa-44fa-be36-c01099f2025c +3173ca97-7ee8-4001-a4f3-234e5a541679 +874134ce-a930-40fb-8a6e-8614af78fccd +cbb5586d-a3ae-4d74-b373-6cc273a797a6 +c7727e41-13c2-42fa-b388-3ade3eac53dd +960b225c-48d4-413a-8584-f71163cb4f98 +cb8080d9-eafb-4261-ab5e-ba0a4b7b4c50 +0a28b0ca-73c0-40b7-9502-e685ba47a6f4 +b3954c82-05db-4016-a2d3-38ac632477f7 +24630f58-5d96-42d6-bc9f-8fb90007bfbc +c3908d8d-c76b-4ee1-8210-6f141a3dc852 +40d76306-6824-498e-9b9c-88369a539cde +706f761f-f847-4e6f-a057-92b7d293ab40 +6a63b532-7b1d-468a-90f6-bbb48546f581 +30904389-92c8-4711-8eb7-ba0e8c7dc220 +bfc1fde7-731a-4cbd-a9e8-3d05a8a3d155 +70e4208d-1d45-450c-873a-5cd445b7c792 +b0a8d02a-2f18-43ad-948c-8cff49bede40 +adcf617b-333e-4e72-9155-a51b0c6ed2bd +a84cef6c-54f6-4dc6-b4f4-afb35fed1923 +128af6cc-a7db-4a62-90c4-889848cd3c06 +2f423f40-dbbb-45fc-9f89-79b891838c6c +24d9f7b5-9419-443c-8a4d-8a7334aebe90 +714effca-4ddb-4f23-a28c-10359e17e6ac +5a0c015b-4c23-44bb-9cea-00b32c40481b +20276d68-7ad6-4dc6-beeb-c0520650a66f +b4d231c3-18ca-41dd-b73e-37f536988fc1 +ebd96f97-f4a6-41e6-9f8a-96a060a73b74 +ce8887b4-317a-4975-9857-46e40bd1657d +de7bc1f1-039a-49ca-afda-7a3a5d4bfe02 +2427ffe5-479c-49de-8a47-1644fae77ca5 +a7d48fe0-a037-4230-a546-3fe804b245a4 +cd4491ef-89f5-4e9c-ad4c-76f9d3ead1f8 +5dd17a55-7233-4ad7-bdb1-61bc8a27ab7e +ff1adc5d-c411-4c9e-9d8b-26cebd17e7be +e9f6340f-1327-49d5-8bcf-43646e6c2194 +45868a33-058c-41b8-9a23-b23253ae6216 +2a43e34f-1afc-4610-8f99-f58a3cf19631 +0252ce83-b1e7-4d1e-ac4b-b0322dfdaead +a3f77b6e-9170-409b-921a-0b2c2894c296 +a4079877-a08d-4db3-80cc-fe23cb90b10a +a4312fa0-860f-44a2-b4ba-3026714cd72f +189453fc-011b-4ea4-a964-f2b0479e1ce6 +27128444-55d4-4aff-8c74-bded3563a8dc +a596bc4d-a0cd-4607-ae64-61d3c043ccdf +7f58ae38-adfa-4b4e-9f42-b4f9ff448255 +3c1bc731-f7cb-46a1-b90a-47eea1f80623 +0c00d00f-4183-4c17-b6cf-fa41420c86af +d6f6ec2b-6d24-4e6b-8591-87725c380e6c +9202652f-1b29-42a0-b3f6-b607045e7d4a +152bfd83-88a3-4786-95d5-a7b263fa4449 +bcdd8bd7-7967-4ac0-ad10-eb67e5c3238e +55a8afd7-50b5-48b4-8287-8f22d8ef0a65 +a0cd9cb6-288b-4fd3-bc14-54fac89755e8 +0a92a468-79ff-4334-94db-d5212ec515da +156ed6eb-7777-4b85-8516-fabce190921d +632b0f74-5ada-408c-85c4-7b127683b954 +3403fc1a-9560-41db-84d1-ffb337e73aa3 +f2effce6-7c1d-4b70-842d-f39ace03972e +10f54225-07af-4c04-bb0f-c4d25bc4ce4b +ad9be504-26f3-4d08-95aa-c2cc26869a26 +64894fc5-2dc3-4311-86a4-917d9068fa75 +64bb9706-4d5f-4b61-9167-e0ee0bf2c51f \ No newline at end of file diff --git a/infra/kafka-connect/lista_relevancia_2.txt b/infra/kafka-connect/lista_relevancia_2.txt new file mode 100644 index 000000000..54d4f2dbd --- /dev/null +++ b/infra/kafka-connect/lista_relevancia_2.txt @@ -0,0 +1,308 @@ +a6a80da2-de32-48ed-b131-cd1f922d7cfa +e8ce0d26-08da-4b04-8f66-bec320b72b99 +55b4575a-c02f-4c7a-8811-a8ee30a5f597 +de730e99-c6a8-47af-9904-bfbaf5c1581c +ea9d2d2c-6f3c-42aa-be87-c5df99caf3d0 +cb627c9c-344a-4b10-b83f-39323912beda +98a58620-390b-4c12-be56-34acf9ff1884 +80d41c6f-e231-4d36-8225-f93029226e82 +329cde61-af32-400e-856e-d310f27d785f +2d2e57f3-5553-4968-8f69-1d4a4642e21c +4154c7af-64ae-4de0-827e-1e366ceb06fe +c4d43627-53df-432f-85c1-01705d71071e +389b926e-9b0a-4ec3-8de5-a887b84793fa +4c3d45cb-fbaf-4781-ab53-ce98b55f6714 +46983191-3efe-4b94-bccd-6d6f411c02d0 +c51f4734-6e30-456b-8f75-086c3437e578 +b695c5a6-6ae0-46b9-a48f-6e8f27a160a1 +f142b48b-ea21-437a-ae4f-cdd75fbc3b8a +af98d04a-6f50-4192-941f-490c21f76158 +0911698b-2753-47d9-9513-3993b5d04e33 +c3e6ac87-0301-40e3-b54f-22c6c3990fb3 +3c97ab03-0acc-49d8-b26a-96533de3718f +f1c01392-84e0-4dc5-9ab5-cf66c94f5b0f +669a1345-6d8c-428e-8eed-af1e989942fa +252b7528-c8ee-4c73-be56-0cf2e30eedc5 +1970802a-90ce-4e76-901e-94ca346f1e47 +934086ee-0ed3-492c-b455-66af01befd43 +6326df59-5acc-4323-8080-c99a2cca668e +4bec5e4b-83c7-4dfe-a406-c020ad88e752 +5dda7d1c-0e95-43a7-9dc2-11008c53ae01 +367d5e0c-cfb9-4431-be5b-22fdf6c4c23a +3697a2c2-f3e5-4001-8c54-9f4047ff75d5 +286e35d7-6731-46d4-8364-371729782704 +860768c0-b5f3-46c5-bb1b-2c462ca6db34 +365df9cc-062e-485b-ba8a-cc708cbed4ff +7240d2a9-ba4e-4d0b-842c-d51762e67308 +47d81586-4026-4793-bcf1-0244328dad04 +9b559f2a-bb07-4a44-aa5c-5bd6fe12ede2 +b0db3f6a-c012-45e6-a601-6861adc46f9b +cc504a00-6b6e-4431-80a4-d8ee34afd0c5 +387dbdf0-9509-4785-ab5a-57b1fcc17564 +dd0f60f0-ca01-4512-b782-66f18979b14a +8020a295-553b-4cd3-82e5-18bef53b2137 +99b1c71f-a9e6-43ee-8a6c-6daf1cbd6819 +26c37802-1a41-4f33-8d66-1b8c779e12b3 +02f903f3-9fb7-45ed-9c49-cf92634088cf +4a872afa-653a-40aa-958e-40b0e6e1e055 +aa72ca86-d4ce-4b65-9a9e-439ac64feca2 +6306ea46-9cec-4f66-8277-09c15f85bca3 +6238db81-8f61-4b5e-8294-71e210f7e424 +9c138ccc-ecc5-40dc-8269-11e9f676e45e +02014677-032f-48eb-89a9-6e0a4074fa50 +55dc062b-22c3-4cf3-8d0b-a6b97c97970a +460fe931-8835-443a-8a9f-c9f6ced9665b +136381a0-78f8-4d73-aa94-55893cc7bede +223797ef-56a9-43c9-ac7b-29e4c721fd4f +d54d03cd-d164-4f13-8f2a-defa9fec031f +8c3f73db-4cb7-4e51-a24b-9b2644278d06 +487bd2b3-f7de-45d2-8c49-b9bd7f2e25ef +b84ee64d-2f80-43a8-9d19-7b69e8907b9c +06fafc64-67a5-4452-ace7-a951ab849b77 +6093869b-2675-4b91-88f9-41c0e690584f +361b190f-2a5e-449c-a235-91cc7715afca +61dcdc8e-6e45-480a-9af4-2bf686899680 +fe24c279-dc07-4389-9fea-97cea22ea42f +9122471e-7da8-427d-96e9-e560d4bee6b6 +6cc718bc-a3cb-4015-b685-72aa1f2a36fe +ed776d02-4f38-45f2-bd72-de09c5098729 +64cf7ba5-0256-42ad-998b-8135849226b8 +3c2318f5-48f9-4d5f-b334-12330080f5f6 +57e3d315-1373-452e-832f-fcbce5184600 +cc214ff7-6e3e-4820-9b73-ca20da69bd94 +293db107-5536-40ea-b8d9-dfb9244f4500 +c042df02-e315-49c8-993a-025261821f7c +787722ad-965c-412c-9a5c-5839059c79eb +14082775-408e-4e00-a038-08fdb02a8036 +404a3495-3908-4528-92e3-7dfa97732902 +6a4a8ac3-c6d2-4d84-8255-3c4bf8ad755e +df536db6-1f84-45cb-95a4-86cb6665383c +749af096-cae9-4c2b-a5c2-1453dd19a980 +0e5fa699-a7cb-46e9-a133-8e945656d509 +c24d78c7-ed7e-49dc-9e62-71b5b582ac65 +4bb1e65c-412c-4f1a-92bf-c30cc07ff197 +66531019-3a18-4e61-86ec-0919337f97c5 +4ee7a795-0eeb-45e0-b67f-4e7a27ae18f6 +a0a9fb37-662c-4ad3-969b-8a949a164b0d +c3c4f5d1-d3c1-423f-89b2-352a6de3009e +73eb6848-744e-4738-a4fc-f49b7ae67745 +1ca054b9-a8a5-4f0a-ae0e-64f29dc2df2d +6203aa39-5ee7-4469-9bd3-a825f904c8c0 +ebe22f7b-e718-41af-8a5b-e6e5c22e5998 +77dfb4db-336e-4d5d-b589-a8e79d8e0f56 +e2b7ca87-b351-4759-a727-64ccdbf96c00 +c1a08f8b-6daf-4e36-86ef-a4489ecf0adb +ebc748f9-6a08-46a1-81ba-bf6b35d7ee6b +357c27f1-dec6-48dc-bfaa-befe051b40f9 +520d1eaa-6572-4749-ad2b-a459cb709849 +202e119f-fdd7-4bda-9e78-55879cfc10a7 +2b7e71ee-8cb1-4888-b56f-89a925551358 +b8c5b054-e501-4240-9c12-5f41c194f278 +36958e6c-4d4a-4c0b-841c-a9a6b1b1ae17 +fa0234fb-840d-4cd7-b723-a72121b53716 +7c0758e4-d7e3-440a-8ed0-a090b120a06d +65c34357-5801-4f73-b596-c78b08a791c5 +a92fa490-dba9-4d31-98b5-1f3d6fb93975 +35de4172-fccb-4f71-9f42-f6df1972ae6e +32d1178b-ee98-4391-b6cf-00027ed9b934 +e1622cb3-4574-4085-adb0-57113807e33a +995f6323-a880-44a7-a756-6b47cbf9d147 +0160e03d-3030-4e23-8785-298b01a6656a +c3bb8b2d-433d-41b0-a935-83ea4977789c +c259fab3-60c5-457c-86b6-a6714def01d7 +6ba4a2e5-bfdc-481d-90e7-c8875a997d28 +1bc95188-e843-45e7-b4a1-71aee29d864d +27d9c917-21ae-4edb-bb94-3321c583428d +77ae4daa-6c2f-4064-b251-88279b82b577 +8b42abce-bc5e-45e1-a8d8-a041ca0cf33d +05931d60-7301-4e93-aab6-45ef34006721 +449bcd35-f2c4-4211-91bd-2010a3968a47 +99c80098-66ad-4966-a17f-5e65a75da4e8 +fc23472e-9af8-418e-b0f7-bc0964a7dd4b +46bec514-0b97-4a1d-8a3c-b4d15b4f8209 +c8c43388-96be-4e9b-bf6a-774170f991e5 +d4fa2a5f-408e-48a7-9490-c5bd4382e8fe +dec5699a-17f7-4e5c-a9bd-f724e257ebd6 +c48ec50d-fe5a-4c67-a366-a49b2ea4b1ca +0c129140-b6e8-4314-b9e5-041b7072d583 +9e31a4b7-257e-48d2-a62c-db15b029282d +b186cc91-0449-4caf-a31c-f964c0ae68e8 +6a4c60a4-8674-469f-8c3a-8700b67733ee +26dedf2f-0f03-450b-99be-2df1f6aaebf8 +95f59c19-be83-46a5-abea-4a7bd014477f +7f4e9dbd-8200-468e-9500-74ada2728678 +1f376047-7212-47f2-91a4-53bccbce0787 +c21ab96f-856c-4601-9862-faed92c88b44 +770f9a0c-3cb2-4f29-a590-26f47fd4bec2 +b472de54-5b43-4c61-a1a9-3080b8801c4d +1aa1c271-a7cc-4909-9446-ea0fa9d3493a +21243674-ccdc-45e3-81c7-d6d60da984c4 +a53c363e-0bd7-41e4-a5e6-a648be5ef6a1 +621e99e0-fa21-47bd-9051-a096b6a891ab +ebc3510f-a01b-44a0-9a20-ca759c6c28d6 +795c69ed-fc7d-442c-bb7b-2a4335422fde +0fb90986-3a38-485c-ba52-617bc2a58030 +aa9c304d-7862-4fe0-a8c0-14f3d26005bb +67dae8e1-239c-4d0b-9375-9506707d7db8 +050e5fe5-6650-4ed7-9bd1-c5fae8af21ca +5f807341-a460-4b36-ad22-d57f249ddbb0 +6be92eb1-f10a-4c89-b2cf-2cf1a512b529 +328852e4-fed1-4646-bc1b-38ef9508cd4d +061c4f03-7dfe-44e6-860c-72aa5440aea4 +2890d2d4-9182-42e7-8e5e-2715688ebf11 +fafa2ad2-8ca5-44ca-9ebf-be9b7e5aae03 +8e94128e-49bb-43d4-b414-de93f6871fe8 +a73f8976-f1a8-4989-ab89-19235acaebb5 +54e0d9e9-efae-46e1-bd44-3090b3cc0add +d411cd8d-c449-42fb-8040-a8a794c07b6a +d0382606-bc22-4ca8-948a-bbe06976d378 +20ebe517-29f1-410b-9b66-7ee90081edc2 +66730e7d-267a-4aba-98b6-f765975d5c26 +79e4cdc0-d33d-494e-b468-37ffe998131b +3235a26b-d787-458e-a954-4a0320a9581f +44735197-1fd5-4298-b75c-33805b94a2b7 +b6c37409-d480-47f4-aa8f-fdab32e0e906 +82031ed6-b2bb-477d-b61f-6e3c53924e6a +7aeed43a-4efd-4b49-8cab-43456632a24b +46b94093-e69b-44ae-ab6f-80f945ddadd5 +2ad0940b-fea7-43c7-8667-969ae34c749a +c4499f56-3a69-47b8-9795-fbb7cb6795a9 +ace397f1-568b-4c07-8b7d-04faef5f3e14 +32773547-767d-49cc-a668-bd3822c1c25d +890eac67-4a71-47e3-93bb-8fdbb56254f1 +0c2af121-5c9a-42e7-af68-7a4e1e00f882 +a731a7e9-d67f-4c3e-90f6-5569b3d950ea +fe7ce910-0e8f-43e5-ae22-93aaf692f35c +3a6bcd06-6568-4510-9bc6-878ed36e9178 +c8f45582-52b0-4808-9450-0f3eb9476bec +13e3ff5e-c460-4111-871f-e40bb1e0ee9b +04613a0c-7a26-4167-8161-419e1b196447 +d70d0f7f-9fb5-46e6-aaf9-15b5c084367d +56ee5fa6-da30-47e8-9b5c-196ccfeefe7a +8cc06d65-899c-4862-a6f5-ed70d83eded8 +b547d482-389e-411b-b392-8bb875f7f81f +a1d89008-4c0e-415c-befb-1eb24376bae5 +c7db775a-9f7a-4976-8251-c27e05e9013c +1937ab75-601a-4133-837a-ece4266bf539 +eec187df-e560-4311-ba35-428a564a5169 +2245b578-9062-412a-977a-f6f58d572522 +c7c8d1df-6c34-4481-ae65-b20913f51dcd +2f624fbe-7526-4162-8753-44f813ba2f0c +2afc4cf9-f6fc-4920-8990-f4d40974b515 +3edb184d-a685-430c-84c5-936f7c879803 +28d779af-c51e-4168-888e-4be0a1b10c48 +a5716c26-4bf6-4397-ae62-3cece051f100 +8fc4589f-b821-4b2d-b00a-9831f22bafe8 +c359786d-1f3b-4064-b430-6b6184a59708 +0f59d0c5-147a-44a6-93d0-d659c895391e +16e4d481-09d5-4163-aeda-4048924b3a53 +a2a2dc75-59dd-47f7-8bbe-51d2de12d69d +0d7b2db3-176d-46f0-9356-4682965f2b80 +3a3137c8-08c5-4654-a867-09f6a891554d +780640aa-12b5-4ef2-b9ab-d64df6142d5c +df5b1b98-f3a9-4002-9505-ef758df49e0b +49e6f375-3acd-4de8-ab42-1c308aa0af09 +eaffcbba-9537-42c7-b854-df39bc97cb77 +2252e7c0-2e06-4ca8-b651-3157bf1a33c6 +109320ab-daa9-43e8-b84e-09be36ad8d87 +48a42495-afce-4696-87d0-5b76b78dbbe5 +86db546d-dfa3-4643-a77b-a53b2a7a55b0 +b99caaff-5499-4d2a-b40f-67d5ec16ea23 +c6c19088-3784-426a-8f0d-7acf3a848977 +06a86f0e-dd6e-46d2-b980-27af0fc75e72 +52eef971-d447-493c-8768-71b6dded88ee +cf401e32-f9c1-4009-8f6c-b0b738991886 +1777130b-e03c-4a6a-af24-a378a28048ef +9d00d4d0-d5f5-4fe8-81fa-9f2d71c56656 +3c80ad2c-53c7-4c9d-ad64-b8a9e1c922db +73861d9c-cafd-45a3-b057-cfe4e94efa0d +ebe90236-ba84-4ca3-b2a1-0b168e9f1dbd +e13fc9ad-0340-4c98-b99e-e353b194e920 +d0e4de3f-2bb0-429c-b1dd-4eebe9fd7df7 +12f6f8bd-925e-4944-8ec9-754d300c6bb6 +30e80def-c31d-4705-aa07-b54b3643a99e +bb260e4c-ecc1-4bcc-b278-c6f4af318f90 +b7aa01f5-bdf1-4433-80e9-75eb28fa698d +2523bcce-696d-42d6-8b82-1f96e268d230 +7d85cd83-f3b6-4035-9f52-303d2c4f79e6 +59be63a5-3078-4a8d-aec0-e40047060d4b +7e3ab6f4-eb02-491b-8715-48302c93bad1 +252bc161-205d-4c0f-a243-1fc66165d316 +9855fc79-1a55-4337-a7a3-3f63323f3aaa +ec9037e9-d8fd-450b-aa6e-84daf8c455b1 +d405b166-2de7-4575-86f2-276cca0a763d +d783cdef-c55f-45d0-8691-c6511fa3e544 +2c647d96-0374-4970-ab4a-3ff0ca89cf9d +4a7fd2d3-7bd8-4ae4-880e-3617f8f175b3 +3ea8322d-f6b9-4a05-a219-f00499407c5b +1290ea16-8fb8-4dbf-b7fb-0a436df779f9 +abdf4f0d-9165-4e68-bd44-485428da9fc4 +dab6a718-03ef-4167-9e19-ae61c5334a29 +66886f32-c31b-49d5-928b-12a63c10549d +77953b62-b100-42f1-93b3-59e71514f441 +22ab94ce-b8e2-403e-8441-5be79687e2c7 +500b8ff5-4bb6-4c21-af1b-cbfcfa518e95 +008d55f7-4cef-4ce1-8709-3f83c8bbcfeb +8a79f5c7-d693-4739-9f76-b30d62fc9954 +a6648e48-ce72-4fb4-9bab-e54fcd7f4d42 +0bb34b3a-2b5a-4b04-8aaa-09f6b584f767 +67e12fcd-e1be-4dda-802d-470642dd71e9 +97fecef8-18c4-42f9-92c9-d525d27292f0 +4c2c8c93-45b2-4b26-8b9a-11f015feee09 +51f25713-7a45-4049-9f3a-82db9226e0c5 +c9842fae-755f-4bf3-826a-726735049950 +0c6ef4f6-bfdd-486a-949a-38ae02f7fb91 +e143a3c3-e4fc-4e13-85c6-21c3493b3bbd +7798f429-c59c-4f10-bc59-85656ccaa0bf +0cbcd046-0099-4883-b0ae-75d05e30a1fa +55565b96-27d5-4234-97d0-2f2633b80157 +cd9a3fae-74fc-4389-91a1-c6142588aa40 +bdcc6177-5217-4e3d-83d1-592e86195021 +02bbc288-f190-48e2-8b62-8fc2f88f2c61 +51b10cbf-6cb5-4ac5-99d7-0e52085857c5 +4d933bd3-4202-4376-8f85-6eebd23af668 +b4e82b1c-1d4b-444b-8e18-2e0073880de0 +76ecd4d1-dad5-4a00-adaf-47d9240df6ca +f30ca053-a1ae-46ac-80bb-8a6ea56eaf22 +aa13fda7-7833-4302-8bd7-64fea17d7b6e +a795ac7b-e2f1-4e88-ba00-40a8157ed806 +c034eae7-9a36-4d5d-87b6-9e64285deee2 +36212d74-9d6c-42f0-8307-ea94b8d6a504 +2d70a50f-d4ee-4bf2-8301-73c655f4faec +52151f53-8a06-4a3b-b1bf-231cf41bbe9b +a1aaefac-f42c-493e-8116-34be9ec60f3f +38d89476-cdaf-4a7a-9ccc-72903d88cb23 +c2d547c0-02a8-417b-97e0-0bd2566362a9 +db94d39d-c703-4651-a29f-d4ac08938f07 +4d36aed7-a353-44c2-b4f4-6e50cc5f2da2 +418e44e8-f7bd-4348-90c2-fb44086bf6ae +91cc59a6-ecb2-4009-86e1-e796c8b03165 +4e72fe55-e77a-4b12-ae1f-b88c8916f515 +a71e2d09-ee44-44d2-82dc-e3b0b36a4f23 +701d8d22-e251-4481-a88e-a6b229e9e547 +7e2f5732-8c0f-4de7-8cb5-d49a37b5f053 +603c2b8b-a1b8-4980-a0f2-8b9b2ce4bc32 +7033f59d-e2b1-4e4e-91e8-2e9fe4e6b086 +a7337c93-4230-48c3-8c3c-25aaa425b725 +18072a9e-4e6b-4c79-8604-6a681fc2a00b +f56b092f-724a-4c82-a62e-8a9665ce7260 +c0fd8dad-54a9-43e4-8958-8150780c150a +44623059-9b7e-4258-9796-75258267f804 +0b7fdff0-388e-419d-80aa-abfaa19398a9 +f4a02a4e-1146-4dd0-8bc2-f007740b33d7 +0b83778a-eada-46ed-be7d-d20a2caa8a99 +911fc945-0b68-48f2-9516-87ab84205f45 +d58aba9f-398f-492a-93fb-c69a33a7130c +b4222ec0-2692-4a29-8bf9-28b5c477ca42 +b6d811a8-729c-41c8-8b72-4e16c485de90 +36c6873d-c9c9-4a26-841e-bb9a3b77e510 +2dd5bdbe-71de-4ae7-b3ce-15e2f26256c3 +a13264cd-f234-49ae-89d8-0ca3055ced07 +63e924af-3aa6-4db0-8402-932a74fabafa +2c1ac0d2-b796-4c23-9f6c-e86bb546bed3 +3280898a-d58f-4707-82ec-4c4b2f2cb8c5 +056d324e-cf2d-4e7d-93fe-17401383d9ee +e5e1f9bf-628a-4a16-be50-82100d10c745 +b2b14b84-6bdc-4a54-81e7-5fbe25f2c41b +919d269c-09f4-4ec3-971a-00433bec409f +8fab32b1-d575-405e-bdb7-8ac0cc3cb886 \ No newline at end of file diff --git a/infra/kafka-connect/relevant-list1.properties b/infra/kafka-connect/relevant-list1.properties new file mode 100644 index 000000000..2c3370668 --- /dev/null +++ b/infra/kafka-connect/relevant-list1.properties @@ -0,0 +1,7 @@ +# These are standard kafka connect parameters, need for ALL connectors +name=RelevantList1 +connector.class=org.apache.kafka.connect.file.FileStreamSourceConnector +tasks.max=1 +# Parameters can be found here: https://github.com/apache/kafka/blob/trunk/connect/file/src/main/java/org/apache/kafka/connect/file/FileStreamSourceConnector.java +file=/tmp/lista_relevancia_1.txt +topic=trabalhe-conosco-backend-dev.relevant.list.1 diff --git a/infra/kafka-connect/relevant-list2.properties b/infra/kafka-connect/relevant-list2.properties new file mode 100644 index 000000000..a7d74e5fd --- /dev/null +++ b/infra/kafka-connect/relevant-list2.properties @@ -0,0 +1,7 @@ +# These are standard kafka connect parameters, need for ALL connectors +name=RelevantList2 +connector.class=org.apache.kafka.connect.file.FileStreamSourceConnector +tasks.max=1 +# Parameters can be found here: https://github.com/apache/kafka/blob/trunk/connect/file/src/main/java/org/apache/kafka/connect/file/FileStreamSourceConnector.java +file=/tmp/lista_relevancia_2.txt +topic=trabalhe-conosco-backend-dev.relevant.list.2 diff --git a/infra/kafka-connect/worker.properties b/infra/kafka-connect/worker.properties new file mode 100644 index 000000000..1c8ccc5e4 --- /dev/null +++ b/infra/kafka-connect/worker.properties @@ -0,0 +1,17 @@ +# from more information, visit: http://docs.confluent.io/3.2.0/connect/userguide.html#common-worker-configs +bootstrap.servers=kafka:9092 +key.converter=org.apache.kafka.connect.json.JsonConverter +key.converter.schemas.enable=true +value.converter=org.apache.kafka.connect.json.JsonConverter +value.converter.schemas.enable=true +internal.key.converter=org.apache.kafka.connect.json.JsonConverter +internal.key.converter.schemas.enable=false +internal.value.converter=org.apache.kafka.connect.json.JsonConverter +internal.value.converter.schemas.enable=false +# Rest API +rest.port=8086 +rest.host.name=127.0.0.1 +# this config is only for standalone workers +offset.storage.file.filename=standalone.offsets +offset.flush.interval.ms=10000 +plugin.path=/tmp/plugins/ \ No newline at end of file diff --git a/infra/kafka-docker/.travis.yml b/infra/kafka-docker/.travis.yml new file mode 100644 index 000000000..15ab13522 --- /dev/null +++ b/infra/kafka-docker/.travis.yml @@ -0,0 +1,77 @@ +sudo: required + +language: scala + +services: + - docker + +# This version will be also tagged as 'latest' +env: + global: + - LATEST="2.12-2.1.0" + +# Build recommended versions based on: http://kafka.apache.org/downloads +matrix: + include: + - scala: "2.10" + env: KAFKA_VERSION=0.8.2.2 + - scala: 2.11 + env: KAFKA_VERSION=0.9.0.1 + - scala: 2.11 + env: KAFKA_VERSION=0.10.2.2 + - scala: 2.11 + env: KAFKA_VERSION=0.11.0.3 + - scala: 2.11 + env: KAFKA_VERSION=1.0.2 + - scala: 2.11 + env: KAFKA_VERSION=1.1.1 + - scala: 2.12 + env: KAFKA_VERSION=2.0.1 + - scala: 2.12 + env: KAFKA_VERSION=2.1.0 + +install: + - docker --version + - docker-compose --version + - echo "KAFKA VERSION $KAFKA_VERSION" + - echo "SCALA VERSION $TRAVIS_SCALA_VERSION" + - echo "LATEST VERSION $LATEST" + - export CURRENT=${TRAVIS_SCALA_VERSION}-${KAFKA_VERSION} + - docker build --build-arg kafka_version=$KAFKA_VERSION --build-arg scala_version=$TRAVIS_SCALA_VERSION --build-arg vcs_ref=$TRAVIS_COMMIT --build-arg build_date=$(date -u +"%Y-%m-%dT%H:%M:%SZ") -t wurstmeister/kafka . + - docker pull confluentinc/cp-kafkacat + +before_script: + - docker-compose -f test/docker-compose.yml up -d zookeeper + - docker-compose -f test/docker-compose.yml scale kafka=2 + +script: + # Shellcheck main source files + - shellcheck -s bash broker-list.sh create-topics.sh start-kafka.sh download-kafka.sh versions.sh + - cd test + # Shellcheck the tests + - shellcheck -x -e SC1090 -s bash *.sh **/*.sh + - ./verifyImageLabels.sh # Verify docker image's label + - sleep 5 # Wait for containers to start + - docker ps -a + - ./runAllTests.sh + # End-to-End scenario tests + - cd scenarios + - ./runJmxScenario.sh + - cd $TRAVIS_BUILD_DIR + +after_script: + - docker-compose stop + +# This will deploy from master. Might want to have a single release branch for a little more control +deploy: + - provider: script + script: bash docker_push latest + on: + repo: wurstmeister/kafka-docker + branch: master + condition: $CURRENT = $LATEST + - provider: script + script: bash docker_push "${TRAVIS_SCALA_VERSION}-${KAFKA_VERSION}" + on: + repo: wurstmeister/kafka-docker + # branch: release diff --git a/infra/kafka-docker/CHANGELOG.md b/infra/kafka-docker/CHANGELOG.md new file mode 100644 index 000000000..fd9e3a1a0 --- /dev/null +++ b/infra/kafka-docker/CHANGELOG.md @@ -0,0 +1,56 @@ +Changelog +========= + +Kafka features are not tied to a specific kafka-docker version (ideally all changes will be merged into all branches). Therefore, this changelog will track changes to the image by date. + +21-Nov-2018 +----------- + +- Update to latest Kafka: `2.1.0` +- Set scala version for Kafka `2.1.0` and `2.0.1` to recommended `2.12` + +10-Nov-2018 +----------- + +- Update to Kafka `2.0.0` -> `2.0.1`. +- Update glibc to `2.28-r0` +- Update base image to openjdk:8u181-jre-alpine + +29-Jun-2018 +----------- + +- **MAJOR:** Use new docker image labelling (`-`) and use travis to publish images. +- Update base image to openjdk:8u171-jre-alpine + +20-Apr-2018 +----------- + +- Issue #312 - Fix conflict between KAFKA_xxx broker config values (e.g. KAFKA_JMX_OPTS) and container configuration options (e.g. KAFKA_CREATE_TOPICS) + +19-Apr-2018 +----------- + +- Issue #310 - Only return Apache download mirrors that can supply required kafka/scala version + +11-Apr-2018 +----------- + +- Issue #313 - Fix parsing of environment value substitution when spaces included. + +08-Apr-2018 +----------- + +- Issue #208 - Add `KAFKA_CREATE_TOPICS_SEPARATOR` to allow custom input, such as multi-line YAML. +- Issue #298 - Fix SNAPPY compression support by adding glibc port back into image (removed when switching to openjdk base image in #7a25ade) + +04-Apr-2018 +----------- + +- Support `_{PORT_COMMAND}` placeholder. + +03-Apr-2018 +----------- + +- **BREAKING:** removed `KAFKA_ADVERTISED_PROTOCOL_NAME` and `KAFKA_PROTOCOL_NAME`. Use the canonical [Kafka Configuration](http://kafka.apache.org/documentation.html#brokerconfigs) instead. +- Support `_{HOSTNAME_COMMAND}` placeholder. +- **BREAKING:** Make `KAFKA_ZOOKEEPER_CONNECT` mandatory diff --git a/infra/kafka-docker/Dockerfile b/infra/kafka-docker/Dockerfile new file mode 100644 index 000000000..fc5b5b928 --- /dev/null +++ b/infra/kafka-docker/Dockerfile @@ -0,0 +1,45 @@ +FROM openjdk:8u181-jre-alpine + +ARG kafka_version=2.0.1 +ARG scala_version=2.12 +ARG glibc_version=2.28-r0 +ARG vcs_ref=unspecified +ARG build_date=unspecified + +LABEL org.label-schema.name="kafka" \ + org.label-schema.description="Apache Kafka" \ + org.label-schema.build-date="${build_date}" \ + org.label-schema.vcs-url="https://github.com/wurstmeister/kafka-docker" \ + org.label-schema.vcs-ref="${vcs_ref}" \ + org.label-schema.version="${scala_version}_${kafka_version}" \ + org.label-schema.schema-version="1.0" \ + maintainer="wurstmeister" + +ENV KAFKA_VERSION=$kafka_version \ + SCALA_VERSION=$scala_version \ + KAFKA_HOME=/opt/kafka \ + GLIBC_VERSION=$glibc_version + +ENV PATH=${PATH}:${KAFKA_HOME}/bin + +COPY download-kafka.sh start-kafka.sh broker-list.sh create-topics.sh versions.sh /tmp/ + +RUN apk add --no-cache bash curl jq docker \ + && mkdir /opt \ + && chmod a+x /tmp/*.sh \ + && mv /tmp/start-kafka.sh /tmp/broker-list.sh /tmp/create-topics.sh /tmp/versions.sh /usr/bin \ + && sync && /tmp/download-kafka.sh \ + && tar xfz /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz -C /opt \ + && rm /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz \ + && ln -s /opt/kafka_${SCALA_VERSION}-${KAFKA_VERSION} /opt/kafka \ + && rm /tmp/* \ + && wget https://github.com/sgerrand/alpine-pkg-glibc/releases/download/${GLIBC_VERSION}/glibc-${GLIBC_VERSION}.apk \ + && apk add --no-cache --allow-untrusted glibc-${GLIBC_VERSION}.apk \ + && rm glibc-${GLIBC_VERSION}.apk + +COPY overrides /opt/overrides + +VOLUME ["/kafka"] + +# Use "exec" form so that it runs as PID 1 (useful for graceful shutdown) +CMD ["start-kafka.sh"] diff --git a/infra/kafka-docker/LICENSE b/infra/kafka-docker/LICENSE new file mode 100644 index 000000000..e06d20818 --- /dev/null +++ b/infra/kafka-docker/LICENSE @@ -0,0 +1,202 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/infra/kafka-docker/README.md b/infra/kafka-docker/README.md new file mode 100644 index 000000000..57131dd94 --- /dev/null +++ b/infra/kafka-docker/README.md @@ -0,0 +1,217 @@ +[![Docker Pulls](https://img.shields.io/docker/pulls/wurstmeister/kafka.svg)](https://hub.docker.com/r/wurstmeister/kafka/) +[![Docker Stars](https://img.shields.io/docker/stars/wurstmeister/kafka.svg)](https://hub.docker.com/r/wurstmeister/kafka/) +[![](https://images.microbadger.com/badges/version/wurstmeister/kafka.svg)](https://microbadger.com/images/wurstmeister/kafka "Get your own version badge on microbadger.com") +[![](https://images.microbadger.com/badges/image/wurstmeister/kafka.svg)](https://microbadger.com/images/wurstmeister/kafka "Get your own image badge on microbadger.com") +[![Build Status](https://travis-ci.org/wurstmeister/kafka-docker.svg?branch=master)](https://travis-ci.org/wurstmeister/kafka-docker) + +kafka-docker +============ + +Dockerfile for [Apache Kafka](http://kafka.apache.org/) + +The image is available directly from [Docker Hub](https://hub.docker.com/r/wurstmeister/kafka/) + +Tags and releases +----------------- + +All versions of the image are built from the same set of scripts with only minor variations (i.e. certain features are not supported on older versions). The version format mirrors the Kafka format, `-`. Initially, all images are built with the recommended version of scala documented on [http://kafka.apache.org/downloads](http://kafka.apache.org/downloads). Available tags are: + +- `2.12-2.1.0` +- `2.12-2.0.1` +- `2.11-1.1.1` +- `2.11-1.0.2` +- `2.11-0.11.0.3` +- `2.11-0.10.2.2` +- `2.11-0.9.0.1` +- `2.10-0.8.2.2` + +Everytime the image is updated, all tags will be pushed with the latest updates. This should allow for greater consistency across tags, as well as any security updates that have been made to the base image. + +--- + +## Announcements + +* **11-Nov-2018** - Update base image to openjdk 181 ([Release notes](https://www.oracle.com/technetwork/java/javase/8u181-relnotes-4479407.html)). Please force pull to get these latest updates - including security patches etc. +* **28-May-2018** - New docker image tag format - see Readme. + +--- + +## Pre-Requisites + +- install docker-compose [https://docs.docker.com/compose/install/](https://docs.docker.com/compose/install/) +- modify the ```KAFKA_ADVERTISED_HOST_NAME``` in [docker-compose.yml](https://raw.githubusercontent.com/wurstmeister/kafka-docker/master/docker-compose.yml) to match your docker host IP (Note: Do not use localhost or 127.0.0.1 as the host ip if you want to run multiple brokers.) +- if you want to customize any Kafka parameters, simply add them as environment variables in ```docker-compose.yml```, e.g. in order to increase the ```message.max.bytes``` parameter set the environment to ```KAFKA_MESSAGE_MAX_BYTES: 2000000```. To turn off automatic topic creation set ```KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'false'``` +- Kafka's log4j usage can be customized by adding environment variables prefixed with ```LOG4J_```. These will be mapped to ```log4j.properties```. For example: ```LOG4J_LOGGER_KAFKA_AUTHORIZER_LOGGER=DEBUG, authorizerAppender``` + +**NOTE:** There are several 'gotchas' with configuring networking. If you are not sure about what the requirements are, please check out the [Connectivity Guide](https://github.com/wurstmeister/kafka-docker/wiki/Connectivity) in the [Wiki](https://github.com/wurstmeister/kafka-docker/wiki) + +## Usage + +Start a cluster: + +- ```docker-compose up -d ``` + +Add more brokers: + +- ```docker-compose scale kafka=3``` + +Destroy a cluster: + +- ```docker-compose stop``` + +## Note + +The default ```docker-compose.yml``` should be seen as a starting point. By default each broker will get a new port number and broker id on restart. Depending on your use case this might not be desirable. If you need to use specific ports and broker ids, modify the docker-compose configuration accordingly, e.g. [docker-compose-single-broker.yml](https://github.com/wurstmeister/kafka-docker/blob/master/docker-compose-single-broker.yml): + +- ```docker-compose -f docker-compose-single-broker.yml up``` + +## Broker IDs + +You can configure the broker id in different ways + +1. explicitly, using ```KAFKA_BROKER_ID``` +2. via a command, using ```BROKER_ID_COMMAND```, e.g. ```BROKER_ID_COMMAND: "hostname | awk -F'-' '{print $$2}'"``` + +If you don't specify a broker id in your docker-compose file, it will automatically be generated (see [https://issues.apache.org/jira/browse/KAFKA-1070](https://issues.apache.org/jira/browse/KAFKA-1070). This allows scaling up and down. In this case it is recommended to use the ```--no-recreate``` option of docker-compose to ensure that containers are not re-created and thus keep their names and ids. + + +## Automatically create topics + +If you want to have kafka-docker automatically create topics in Kafka during +creation, a ```KAFKA_CREATE_TOPICS``` environment variable can be +added in ```docker-compose.yml```. + +Here is an example snippet from ```docker-compose.yml```: + + environment: + KAFKA_CREATE_TOPICS: "Topic1:1:3,Topic2:1:1:compact" + +```Topic 1``` will have 1 partition and 3 replicas, ```Topic 2``` will have 1 partition, 1 replica and a `cleanup.policy` set to `compact`. Also, see FAQ: [Topic compaction does not work](https://github.com/wurstmeister/kafka-docker/wiki#topic-compaction-does-not-work) + +If you wish to use multi-line YAML or some other delimiter between your topic definitions, override the default `,` separator by specifying the `KAFKA_CREATE_TOPICS_SEPARATOR` environment variable. + +For example, `KAFKA_CREATE_TOPICS_SEPARATOR: "$$'\n'"` would use a newline to split the topic definitions. Syntax has to follow docker-compose escaping rules, and [ANSI-C](https://www.gnu.org/software/bash/manual/html_node/ANSI_002dC-Quoting.html) quoting. + +## Advertised hostname + +You can configure the advertised hostname in different ways + +1. explicitly, using ```KAFKA_ADVERTISED_HOST_NAME``` +2. via a command, using ```HOSTNAME_COMMAND```, e.g. ```HOSTNAME_COMMAND: "route -n | awk '/UG[ \t]/{print $$2}'"``` + +When using commands, make sure you review the "Variable Substitution" section in [https://docs.docker.com/compose/compose-file/](https://docs.docker.com/compose/compose-file/) + +If ```KAFKA_ADVERTISED_HOST_NAME``` is specified, it takes precedence over ```HOSTNAME_COMMAND``` + +For AWS deployment, you can use the Metadata service to get the container host's IP: +``` +HOSTNAME_COMMAND=wget -t3 -T2 -qO- http://169.254.169.254/latest/meta-data/local-ipv4 +``` +Reference: http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-metadata.html + +### Injecting HOSTNAME_COMMAND into configuration + +If you require the value of `HOSTNAME_COMMAND` in any of your other `KAFKA_XXX` variables, use the `_{HOSTNAME_COMMAND}` string in your variable value, i.e. + +``` +KAFKA_ADVERTISED_LISTENERS=SSL://_{HOSTNAME_COMMAND}:9093,PLAINTEXT://9092 +``` + +## Advertised port + +If the required advertised port is not static, it may be necessary to determine this programatically. This can be done with the `PORT_COMMAND` environment variable. + +``` +PORT_COMMAND: "docker port $$(hostname) 9092/tcp | cut -d: -f2 +``` + +This can be then interpolated in any other `KAFKA_XXX` config using the `_{PORT_COMMAND}` string, i.e. + +``` +KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://1.2.3.4:_{PORT_COMMAND} +``` + +## Listener Configuration + +It may be useful to have the [Kafka Documentation](https://kafka.apache.org/documentation/) open, to understand the various broker listener configuration options. + +Since 0.9.0, Kafka has supported [multiple listener configurations](https://issues.apache.org/jira/browse/KAFKA-1809) for brokers to help support different protocols and discriminate between internal and external traffic. Later versions of Kafka have deprecated ```advertised.host.name``` and ```advertised.port```. + +**NOTE:** ```advertised.host.name``` and ```advertised.port``` still work as expected, but should not be used if configuring the listeners. + +### Example + +The example environment below: + +``` +HOSTNAME_COMMAND: curl http://169.254.169.254/latest/meta-data/public-hostname +KAFKA_ADVERTISED_LISTENERS: INSIDE://:9092,OUTSIDE://_{HOSTNAME_COMMAND}:9094 +KAFKA_LISTENERS: INSIDE://:9092,OUTSIDE://:9094 +KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INSIDE:PLAINTEXT,OUTSIDE:PLAINTEXT +KAFKA_INTER_BROKER_LISTENER_NAME: INSIDE +``` + +Will result in the following broker config: + +``` +advertised.listeners = OUTSIDE://ec2-xx-xx-xxx-xx.us-west-2.compute.amazonaws.com:9094,INSIDE://:9092 +listeners = OUTSIDE://:9094,INSIDE://:9092 +inter.broker.listener.name = INSIDE +``` + +### Rules + +* No listeners may share a port number. +* An advertised.listener must be present by protocol name and port number in the list of listeners. + +## Broker Rack + +You can configure the broker rack affinity in different ways + +1. explicitly, using ```KAFKA_BROKER_RACK``` +2. via a command, using ```RACK_COMMAND```, e.g. ```RACK_COMMAND: "curl http://169.254.169.254/latest/meta-data/placement/availability-zone"``` + +In the above example the AWS metadata service is used to put the instance's availability zone in the ```broker.rack``` property. + +## JMX + +For monitoring purposes you may wish to configure JMX. Additional to the standard JMX parameters, problems could arise from the underlying RMI protocol used to connect + +* java.rmi.server.hostname - interface to bind listening port +* com.sun.management.jmxremote.rmi.port - The port to service RMI requests + +For example, to connect to a kafka running locally (assumes exposing port 1099) + + KAFKA_JMX_OPTS: "-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=127.0.0.1 -Dcom.sun.management.jmxremote.rmi.port=1099" + JMX_PORT: 1099 + +Jconsole can now connect at ```jconsole 192.168.99.100:1099``` + +## Docker Swarm Mode + +The listener configuration above is necessary when deploying Kafka in a Docker Swarm using an overlay network. By separating OUTSIDE and INSIDE listeners, a host can communicate with clients outside the overlay network while still benefiting from it from within the swarm. + +In addition to the multiple-listener configuration, additional best practices for operating Kafka in a Docker Swarm include: + +* Use "deploy: global" in a compose file to launch one and only one Kafka broker per swarm node. +* Use compose file version '3.2' (minimum Docker version 16.04) and the "long" port definition with the port in "host" mode instead of the default "ingress" load-balanced port binding. This ensures that outside requests are always routed to the correct broker. For example: + +``` +ports: + - target: 9094 + published: 9094 + protocol: tcp + mode: host +``` + +Older compose files using the short-version of port mapping may encounter Kafka client issues if their connection to individual brokers cannot be guaranteed. + +See the included sample compose file ```docker-compose-swarm.yml``` + +## Release process + +See the [wiki](https://github.com/wurstmeister/kafka-docker/wiki/ReleaseProcess) for information on adding or updating versions to release to Dockerhub. + +## Tutorial + +[http://wurstmeister.github.io/kafka-docker/](http://wurstmeister.github.io/kafka-docker/) diff --git a/infra/kafka-docker/broker-list.sh b/infra/kafka-docker/broker-list.sh new file mode 100644 index 000000000..73aa82209 --- /dev/null +++ b/infra/kafka-docker/broker-list.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +CONTAINERS=$(docker ps | grep 9092 | awk '{print $1}') +BROKERS=$(for CONTAINER in ${CONTAINERS}; do docker port "$CONTAINER" 9092 | sed -e "s/0.0.0.0:/$HOST_IP:/g"; done) +echo "${BROKERS/$'\n'/,}" diff --git a/infra/kafka-docker/create-topics.sh b/infra/kafka-docker/create-topics.sh new file mode 100644 index 000000000..0bacf7b5c --- /dev/null +++ b/infra/kafka-docker/create-topics.sh @@ -0,0 +1,57 @@ +#!/bin/bash + +if [[ -z "$KAFKA_CREATE_TOPICS" ]]; then + exit 0 +fi + +if [[ -z "$START_TIMEOUT" ]]; then + START_TIMEOUT=600 +fi + +start_timeout_exceeded=false +count=0 +step=10 +while netstat -lnt | awk '$4 ~ /:'"$KAFKA_PORT"'$/ {exit 1}'; do + echo "waiting for kafka to be ready" + sleep $step; + count=$((count + step)) + if [ $count -gt $START_TIMEOUT ]; then + start_timeout_exceeded=true + break + fi +done + +if $start_timeout_exceeded; then + echo "Not able to auto-create topic (waited for $START_TIMEOUT sec)" + exit 1 +fi + +# introduced in 0.10. In earlier versions, this will fail because the topic already exists. +# shellcheck disable=SC1091 +source "/usr/bin/versions.sh" +if [[ "$MAJOR_VERSION" == "0" && "$MINOR_VERSION" -gt "9" ]] || [[ "$MAJOR_VERSION" -gt "0" ]]; then + KAFKA_0_10_OPTS="--if-not-exists" +fi + +# Expected format: +# name:partitions:replicas:cleanup.policy +IFS="${KAFKA_CREATE_TOPICS_SEPARATOR-,}"; for topicToCreate in $KAFKA_CREATE_TOPICS; do + echo "creating topics: $topicToCreate" + IFS=':' read -r -a topicConfig <<< "$topicToCreate" + config= + if [ -n "${topicConfig[3]}" ]; then + config="--config=cleanup.policy=${topicConfig[3]}" + fi + + COMMAND="JMX_PORT='' ${KAFKA_HOME}/bin/kafka-topics.sh \\ + --create \\ + --zookeeper ${KAFKA_ZOOKEEPER_CONNECT} \\ + --topic ${topicConfig[0]} \\ + --partitions ${topicConfig[1]} \\ + --replication-factor ${topicConfig[2]} \\ + ${config} \\ + ${KAFKA_0_10_OPTS} &" + eval "${COMMAND}" +done + +wait diff --git a/infra/kafka-docker/docker-compose-single-broker.yml b/infra/kafka-docker/docker-compose-single-broker.yml new file mode 100644 index 000000000..4d8e9f511 --- /dev/null +++ b/infra/kafka-docker/docker-compose-single-broker.yml @@ -0,0 +1,16 @@ +version: '2' +services: + zookeeper: + image: wurstmeister/zookeeper + ports: + - "2181:2181" + kafka: + build: . + ports: + - "9092:9092" + environment: + KAFKA_ADVERTISED_HOST_NAME: 192.168.99.100 + KAFKA_CREATE_TOPICS: "test:1:1" + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + volumes: + - /var/run/docker.sock:/var/run/docker.sock diff --git a/infra/kafka-docker/docker-compose-swarm.yml b/infra/kafka-docker/docker-compose-swarm.yml new file mode 100644 index 000000000..86e63eb18 --- /dev/null +++ b/infra/kafka-docker/docker-compose-swarm.yml @@ -0,0 +1,22 @@ +version: '3.2' +services: + zookeeper: + image: wurstmeister/zookeeper + ports: + - "2181:2181" + kafka: + image: wurstmeister/kafka:latest + ports: + - target: 9094 + published: 9094 + protocol: tcp + mode: host + environment: + HOSTNAME_COMMAND: "docker info | grep ^Name: | cut -d' ' -f 2" + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INSIDE:PLAINTEXT,OUTSIDE:PLAINTEXT + KAFKA_ADVERTISED_LISTENERS: INSIDE://:9092,OUTSIDE://_{HOSTNAME_COMMAND}:9094 + KAFKA_LISTENERS: INSIDE://:9092,OUTSIDE://:9094 + KAFKA_INTER_BROKER_LISTENER_NAME: INSIDE + volumes: + - /var/run/docker.sock:/var/run/docker.sock diff --git a/infra/kafka-docker/docker-compose.yml b/infra/kafka-docker/docker-compose.yml new file mode 100644 index 000000000..10de3a810 --- /dev/null +++ b/infra/kafka-docker/docker-compose.yml @@ -0,0 +1,21 @@ +version: '2' +services: + zookeeper: + image: wurstmeister/zookeeper + ports: + - "2181:2181" + kafka: + build: . + ports: + - "9092" + environment: + KAFKA_ADVERTISED_HOST_NAME: 127.0.0.1 + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + volumes: + - /var/run/docker.sock:/var/run/docker.sock + manager: + image: sheepkiller/kafka-manager + environment: + ZK_HOSTS: zookeeper:2181 + ports: + - "9000:9000" \ No newline at end of file diff --git a/infra/kafka-docker/docker_push b/infra/kafka-docker/docker_push new file mode 100644 index 000000000..99975bb90 --- /dev/null +++ b/infra/kafka-docker/docker_push @@ -0,0 +1,14 @@ +#!/bin/bash -e + +BASE_IMAGE="wurstmeister/kafka" +IMAGE_VERSION="$1" + +if [ -z "$IMAGE_VERSION" ]; then + echo "No IMAGE_VERSION var specified" + exit 1 +fi + +echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin +TARGET="$BASE_IMAGE:$IMAGE_VERSION" +docker tag "$BASE_IMAGE" "$TARGET" +docker push "$TARGET" diff --git a/infra/kafka-docker/download-kafka.sh b/infra/kafka-docker/download-kafka.sh new file mode 100644 index 000000000..0b50015d2 --- /dev/null +++ b/infra/kafka-docker/download-kafka.sh @@ -0,0 +1,23 @@ +#!/bin/sh -e + +# shellcheck disable=SC1091 +source "/usr/bin/versions.sh" + +FILENAME="kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz" + +## Versions prior to 0.10.2.1 are not actively mirrored +echo "Downloading kafka $MAJOR_VERSION.$MINOR_VERSION" +if [[ "$MAJOR_VERSION" == "0" && "$MINOR_VERSION" -lt "11" ]]; then + echo "Version prior to 0.10.2.1 - downloading direct" + url="https://archive.apache.org/dist/kafka/${KAFKA_VERSION}/${FILENAME}" +else + url=$(curl --stderr /dev/null "https://www.apache.org/dyn/closer.cgi?path=/kafka/${KAFKA_VERSION}/${FILENAME}&as_json=1" | jq -r '"\(.preferred)\(.path_info)"') +fi + +if [[ -z "$url" ]]; then + echo "Unable to determine mirror for downloading Kafka, the service may be down" + exit 1 +fi + +echo "Downloading Kafka from $url" +wget "${url}" -O "/tmp/${FILENAME}" diff --git a/infra/kafka-docker/hs_err_pid10256.log b/infra/kafka-docker/hs_err_pid10256.log new file mode 100644 index 000000000..968971071 --- /dev/null +++ b/infra/kafka-docker/hs_err_pid10256.log @@ -0,0 +1,123 @@ +# +# There is insufficient memory for the Java Runtime Environment to continue. +# Native memory allocation (mmap) failed to map 268435456 bytes for Failed to commit area from 0x00000000f0000000 to 0x0000000100000000 of length 268435456. +# Possible reasons: +# The system is out of physical RAM or swap space +# In 32 bit mode, the process size limit was hit +# Possible solutions: +# Reduce memory load on the system +# Increase physical memory or swap space +# Check if swap backing store is full +# Use 64 bit Java on a 64 bit OS +# Decrease Java heap size (-Xmx/-Xms) +# Decrease number of Java threads +# Decrease Java thread stack sizes (-Xss) +# Set larger code cache with -XX:ReservedCodeCacheSize= +# This output file may be truncated or incomplete. +# +# Out of Memory Error (os_windows.cpp:3341), pid=10256, tid=0x00000000000032c8 +# +# JRE version: (8.0_191-b12) (build ) +# Java VM: Java HotSpot(TM) 64-Bit Server VM (25.191-b12 mixed mode windows-amd64 compressed oops) +# Failed to write core dump. Minidumps are not enabled by default on client versions of Windows +# + +--------------- T H R E A D --------------- + +Current thread (0x0000000002f77800): JavaThread "Unknown thread" [_thread_in_vm, id=13000, stack(0x0000000002a40000,0x0000000002b40000)] + +Stack: [0x0000000002a40000,0x0000000002b40000] +[error occurred during error reporting (printing stack bounds), id 0xc0000005] + +Native frames: (J=compiled Java code, j=interpreted, Vv=VM code, C=native code) + + +--------------- P R O C E S S --------------- + +Java Threads: ( => current thread ) + +Other Threads: + +=>0x0000000002f77800 (exited) JavaThread "Unknown thread" [_thread_in_vm, id=13000, stack(0x0000000002a40000,0x0000000002b40000)] + +VM state:not at safepoint (not fully initialized) + +VM Mutex/Monitor currently owned by a thread: ([mutex/lock_event]) +[0x0000000002f75e90] Heap_lock - owner thread: 0x0000000002f77800 + +GC Heap History (0 events): +No events + +Deoptimization events (0 events): +No events + +Classes redefined (0 events): +No events + +Internal exceptions (0 events): +No events + +Events (0 events): +No events + + +Dynamic libraries: +0x00007ff757960000 - 0x00007ff757997000 C:\Program Files (x86)\Common Files\Oracle\Java\javapath\java.exe +0x00007ffc9c200000 - 0x00007ffc9c3ed000 C:\WINDOWS\SYSTEM32\ntdll.dll +0x00007ffc9a920000 - 0x00007ffc9a9d3000 C:\WINDOWS\System32\KERNEL32.DLL +0x00007ffc98990000 - 0x00007ffc98c23000 C:\WINDOWS\System32\KERNELBASE.dll +0x00007ffc9c120000 - 0x00007ffc9c1c3000 C:\WINDOWS\System32\ADVAPI32.dll +0x00007ffc9bba0000 - 0x00007ffc9bc3e000 C:\WINDOWS\System32\msvcrt.dll +0x00007ffc9af60000 - 0x00007ffc9affe000 C:\WINDOWS\System32\sechost.dll +0x00007ffc9abd0000 - 0x00007ffc9acf2000 C:\WINDOWS\System32\RPCRT4.dll +0x00007ffc9b180000 - 0x00007ffc9b317000 C:\WINDOWS\System32\USER32.dll +0x00007ffc986d0000 - 0x00007ffc986f0000 C:\WINDOWS\System32\win32u.dll +0x00007ffc9b000000 - 0x00007ffc9b029000 C:\WINDOWS\System32\GDI32.dll +0x00007ffc987f0000 - 0x00007ffc9898a000 C:\WINDOWS\System32\gdi32full.dll +0x00007ffc98630000 - 0x00007ffc986d0000 C:\WINDOWS\System32\msvcp_win.dll +0x00007ffc986f0000 - 0x00007ffc987ec000 C:\WINDOWS\System32\ucrtbase.dll +0x00007ffc90610000 - 0x00007ffc90889000 C:\WINDOWS\WinSxS\amd64_microsoft.windows.common-controls_6595b64144ccf1df_6.0.17763.195_none_05b436ac07203599\COMCTL32.dll +0x00007ffc9b630000 - 0x00007ffc9b95d000 C:\WINDOWS\System32\combase.dll +0x00007ffc98380000 - 0x00007ffc983fe000 C:\WINDOWS\System32\bcryptPrimitives.dll +0x00007ffc9bac0000 - 0x00007ffc9baee000 C:\WINDOWS\System32\IMM32.DLL +0x00007e1100000000 - 0x00007e110006d000 C:\cmder\vendor\conemu-maximus5\ConEmu\ConEmuHk64.dll +0x0000000050ad0000 - 0x0000000050ba2000 C:\Program Files\Java\jre1.8.0_191\bin\msvcr100.dll +0x0000000050220000 - 0x0000000050ac4000 C:\Program Files\Java\jre1.8.0_191\bin\server\jvm.dll +0x00007ffc9baa0000 - 0x00007ffc9baa8000 C:\WINDOWS\System32\PSAPI.DLL +0x00007ffc84d20000 - 0x00007ffc84d29000 C:\WINDOWS\SYSTEM32\WSOCK32.dll +0x00007ffc96290000 - 0x00007ffc962b4000 C:\WINDOWS\SYSTEM32\WINMM.dll +0x00007ffc9b090000 - 0x00007ffc9b0fd000 C:\WINDOWS\System32\WS2_32.dll +0x00007ffc93790000 - 0x00007ffc9379a000 C:\WINDOWS\SYSTEM32\VERSION.dll +0x00007ffc96260000 - 0x00007ffc9628d000 C:\WINDOWS\SYSTEM32\WINMMBASE.dll +0x00007ffc993e0000 - 0x00007ffc9942a000 C:\WINDOWS\System32\cfgmgr32.dll +0x0000000050210000 - 0x000000005021f000 C:\Program Files\Java\jre1.8.0_191\bin\verify.dll +0x00000000501e0000 - 0x0000000050209000 C:\Program Files\Java\jre1.8.0_191\bin\java.dll +0x00000000501c0000 - 0x00000000501d6000 C:\Program Files\Java\jre1.8.0_191\bin\zip.dll + +VM Arguments: +jvm_args: -Xmx256M -XX:+UseG1GC -XX:MaxGCPauseMillis=20 -XX:InitiatingHeapOccupancyPercent=35 -XX:+ExplicitGCInvokesConcurrent -Djava.awt.headless=true -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dkafka.logs.dir=/logs -Dlog4j.configuration=file:C:\kafka_2.12-2.0.1/config/tools-log4j.properties +java_command: kafka.admin.ConsumerGroupCommand --group sample-group --bootstrap-server localhost:32769 --describe +java_class_path (initial): C:\kafka_2.12-2.0.1\libs\activation-1.1.1.jar;C:\kafka_2.12-2.0.1\libs\aopalliance-repackaged-2.5.0-b42.jar;C:\kafka_2.12-2.0.1\libs\argparse4j-0.7.0.jar;C:\kafka_2.12-2.0.1\libs\audience-annotations-0.5.0.jar;C:\kafka_2.12-2.0.1\libs\commons-lang3-3.5.jar;C:\kafka_2.12-2.0.1\libs\connect-api-2.0.1.jar;C:\kafka_2.12-2.0.1\libs\connect-basic-auth-extension-2.0.1.jar;C:\kafka_2.12-2.0.1\libs\connect-file-2.0.1.jar;C:\kafka_2.12-2.0.1\libs\connect-json-2.0.1.jar;C:\kafka_2.12-2.0.1\libs\connect-runtime-2.0.1.jar;C:\kafka_2.12-2.0.1\libs\connect-transforms-2.0.1.jar;C:\kafka_2.12-2.0.1\libs\guava-20.0.jar;C:\kafka_2.12-2.0.1\libs\hk2-api-2.5.0-b42.jar;C:\kafka_2.12-2.0.1\libs\hk2-locator-2.5.0-b42.jar;C:\kafka_2.12-2.0.1\libs\hk2-utils-2.5.0-b42.jar;C:\kafka_2.12-2.0.1\libs\jackson-annotations-2.9.7.jar;C:\kafka_2.12-2.0.1\libs\jackson-core-2.9.7.jar;C:\kafka_2.12-2.0.1\libs\jackson-databind-2.9.7.jar;C:\kafka_2.12-2.0.1\libs\jackson-jaxrs-base-2.9.7.jar;C:\kafka_2.12-2.0.1\libs\jackson-jaxrs-json-provider-2.9.7.jar;C:\kafka_2.12-2.0.1\libs\jackson-module-jaxb-annotations-2.9.7.jar;C:\kafka_2.12-2.0.1\libs\javassist-3.22.0-CR2.jar;C:\kafka_2.12-2.0.1\libs\javax.annotation-api-1.2.jar;C:\kafka_2.12-2.0.1\libs\javax.inject-1.jar;C:\kafka_2.12-2.0.1\libs\javax.inject-2.5.0-b42.jar;C:\kafka_2.12-2.0.1\libs\javax.servlet-api-3.1.0.jar;C:\kafka_2.12-2.0.1\libs\javax.ws.rs-api-2.1.jar;C:\kafka_2.12-2.0.1\libs\jaxb-api-2.3.0.jar;C:\kafka_2.12-2.0.1\libs\jersey-client-2.27.jar;C:\kafka_2.12-2.0.1\libs\jersey-common-2.27.jar;C:\kafka_2.12-2.0.1\libs\jersey-container-servlet-2.27.jar;C:\kafka_2.12-2.0.1\libs\jersey-container-servlet-core-2.27.jar;C:\kafka_2.12-2.0.1\libs\jersey-hk2-2.27.jar;C:\kafka_2.12-2.0.1\libs\jersey-media-jaxb-2.27.jar;C:\kafka_2.12-2.0.1\libs\jersey-server-2.27.jar;C:\kafka_2.12-2.0.1\libs\jetty-client-9.4.11.v20180605.jar;C:\kafka_2.12-2.0.1\libs\jetty-continuation-9.4.11.v20180605.jar;C:\kafka_2.12-2.0.1\libs\jetty-http-9.4.11 +Launcher Type: SUN_STANDARD + +Environment Variables: +PATH=C:\cmder\bin;C:\cmder\vendor\conemu-maximus5\ConEmu\Scripts;C:\cmder\vendor\conemu-maximus5;C:\cmder\vendor\conemu-maximus5\ConEmu;C:\ProgramData\DockerDesktop\version-bin;C:\Program Files\Docker\Docker\Resources\bin;C:\Program Files (x86)\Common Files\Oracle\Java\javapath;C:\WINDOWS\system32;C:\WINDOWS;C:\WINDOWS\System32\Wbem;C:\WINDOWS\System32\WindowsPowerShell\v1.0\;C:\WINDOWS\System32\OpenSSH\;C:\Program Files\Git\cmd;C:\kafka_2.12-2.0.1\bin\windows;C:\Users\Vinicius\AppData\Local\Microsoft\WindowsApps;C:\Users\Vinicius\AppData\Local\Programs\Microsoft VS Code\bin;C:\Program Files\Git\mingw64;C:\Program Files\Git\usr\bin;C:\cmder +USERNAME=Vinicius +OS=Windows_NT +PROCESSOR_IDENTIFIER=Intel64 Family 6 Model 94 Stepping 3, GenuineIntel + + + +--------------- S Y S T E M --------------- + +OS: Windows 10.0 , 64 bit Build 17763 (10.0.17763.1) + +CPU:total 4 (initial active 4) (4 cores per cpu, 1 threads per core) family 6 model 94 stepping 3, cmov, cx8, fxsr, mmx, sse, sse2, sse3, ssse3, sse4.1, sse4.2, popcnt, avx, avx2, aes, clmul, erms, rtm, 3dnowpref, lzcnt, tsc, tscinvbit, bmi1, bmi2, adx + +Memory: 4k page, physical 16734928k(635400k free), swap 31099984k(150640k free) + +vm_info: Java HotSpot(TM) 64-Bit Server VM (25.191-b12) for windows-amd64 JRE (1.8.0_191-b12), built on Oct 6 2018 09:29:03 by "java_re" with MS VC++ 10.0 (VS2010) + +time: Thu Jan 03 14:56:14 2019 +elapsed time: 0 seconds (0d 0h 0m 0s) + diff --git a/infra/kafka-docker/hs_err_pid7320.log b/infra/kafka-docker/hs_err_pid7320.log new file mode 100644 index 000000000..39140fee2 --- /dev/null +++ b/infra/kafka-docker/hs_err_pid7320.log @@ -0,0 +1,123 @@ +# +# There is insufficient memory for the Java Runtime Environment to continue. +# Native memory allocation (mmap) failed to map 268435456 bytes for Failed to commit area from 0x00000000f0000000 to 0x0000000100000000 of length 268435456. +# Possible reasons: +# The system is out of physical RAM or swap space +# In 32 bit mode, the process size limit was hit +# Possible solutions: +# Reduce memory load on the system +# Increase physical memory or swap space +# Check if swap backing store is full +# Use 64 bit Java on a 64 bit OS +# Decrease Java heap size (-Xmx/-Xms) +# Decrease number of Java threads +# Decrease Java thread stack sizes (-Xss) +# Set larger code cache with -XX:ReservedCodeCacheSize= +# This output file may be truncated or incomplete. +# +# Out of Memory Error (os_windows.cpp:3341), pid=7320, tid=0x0000000000002f04 +# +# JRE version: (8.0_191-b12) (build ) +# Java VM: Java HotSpot(TM) 64-Bit Server VM (25.191-b12 mixed mode windows-amd64 compressed oops) +# Failed to write core dump. Minidumps are not enabled by default on client versions of Windows +# + +--------------- T H R E A D --------------- + +Current thread (0x0000000002d27800): JavaThread "Unknown thread" [_thread_in_vm, id=12036, stack(0x0000000002c20000,0x0000000002d20000)] + +Stack: [0x0000000002c20000,0x0000000002d20000] +[error occurred during error reporting (printing stack bounds), id 0xc0000005] + +Native frames: (J=compiled Java code, j=interpreted, Vv=VM code, C=native code) + + +--------------- P R O C E S S --------------- + +Java Threads: ( => current thread ) + +Other Threads: + +=>0x0000000002d27800 (exited) JavaThread "Unknown thread" [_thread_in_vm, id=12036, stack(0x0000000002c20000,0x0000000002d20000)] + +VM state:not at safepoint (not fully initialized) + +VM Mutex/Monitor currently owned by a thread: ([mutex/lock_event]) +[0x0000000002d25f90] Heap_lock - owner thread: 0x0000000002d27800 + +GC Heap History (0 events): +No events + +Deoptimization events (0 events): +No events + +Classes redefined (0 events): +No events + +Internal exceptions (0 events): +No events + +Events (0 events): +No events + + +Dynamic libraries: +0x00007ff757960000 - 0x00007ff757997000 C:\Program Files (x86)\Common Files\Oracle\Java\javapath\java.exe +0x00007ffc9c200000 - 0x00007ffc9c3ed000 C:\WINDOWS\SYSTEM32\ntdll.dll +0x00007ffc9a920000 - 0x00007ffc9a9d3000 C:\WINDOWS\System32\KERNEL32.DLL +0x00007ffc98990000 - 0x00007ffc98c23000 C:\WINDOWS\System32\KERNELBASE.dll +0x00007ffc9c120000 - 0x00007ffc9c1c3000 C:\WINDOWS\System32\ADVAPI32.dll +0x00007ffc9bba0000 - 0x00007ffc9bc3e000 C:\WINDOWS\System32\msvcrt.dll +0x00007ffc9af60000 - 0x00007ffc9affe000 C:\WINDOWS\System32\sechost.dll +0x00007ffc9abd0000 - 0x00007ffc9acf2000 C:\WINDOWS\System32\RPCRT4.dll +0x00007ffc9b180000 - 0x00007ffc9b317000 C:\WINDOWS\System32\USER32.dll +0x00007ffc986d0000 - 0x00007ffc986f0000 C:\WINDOWS\System32\win32u.dll +0x00007ffc9b000000 - 0x00007ffc9b029000 C:\WINDOWS\System32\GDI32.dll +0x00007ffc987f0000 - 0x00007ffc9898a000 C:\WINDOWS\System32\gdi32full.dll +0x00007ffc98630000 - 0x00007ffc986d0000 C:\WINDOWS\System32\msvcp_win.dll +0x00007ffc986f0000 - 0x00007ffc987ec000 C:\WINDOWS\System32\ucrtbase.dll +0x00007ffc90610000 - 0x00007ffc90889000 C:\WINDOWS\WinSxS\amd64_microsoft.windows.common-controls_6595b64144ccf1df_6.0.17763.195_none_05b436ac07203599\COMCTL32.dll +0x00007ffc9b630000 - 0x00007ffc9b95d000 C:\WINDOWS\System32\combase.dll +0x00007ffc98380000 - 0x00007ffc983fe000 C:\WINDOWS\System32\bcryptPrimitives.dll +0x00007ffc9bac0000 - 0x00007ffc9baee000 C:\WINDOWS\System32\IMM32.DLL +0x00007e1100000000 - 0x00007e110006d000 C:\cmder\vendor\conemu-maximus5\ConEmu\ConEmuHk64.dll +0x0000000050ad0000 - 0x0000000050ba2000 C:\Program Files\Java\jre1.8.0_191\bin\msvcr100.dll +0x0000000050220000 - 0x0000000050ac4000 C:\Program Files\Java\jre1.8.0_191\bin\server\jvm.dll +0x00007ffc9baa0000 - 0x00007ffc9baa8000 C:\WINDOWS\System32\PSAPI.DLL +0x00007ffc84d20000 - 0x00007ffc84d29000 C:\WINDOWS\SYSTEM32\WSOCK32.dll +0x00007ffc96290000 - 0x00007ffc962b4000 C:\WINDOWS\SYSTEM32\WINMM.dll +0x00007ffc93790000 - 0x00007ffc9379a000 C:\WINDOWS\SYSTEM32\VERSION.dll +0x00007ffc96260000 - 0x00007ffc9628d000 C:\WINDOWS\SYSTEM32\winmmbase.dll +0x00007ffc993e0000 - 0x00007ffc9942a000 C:\WINDOWS\System32\cfgmgr32.dll +0x00007ffc9b090000 - 0x00007ffc9b0fd000 C:\WINDOWS\System32\WS2_32.dll +0x0000000050210000 - 0x000000005021f000 C:\Program Files\Java\jre1.8.0_191\bin\verify.dll +0x00000000501e0000 - 0x0000000050209000 C:\Program Files\Java\jre1.8.0_191\bin\java.dll +0x00000000501c0000 - 0x00000000501d6000 C:\Program Files\Java\jre1.8.0_191\bin\zip.dll + +VM Arguments: +jvm_args: -Xmx256M -XX:+UseG1GC -XX:MaxGCPauseMillis=20 -XX:InitiatingHeapOccupancyPercent=35 -XX:+ExplicitGCInvokesConcurrent -Djava.awt.headless=true -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dkafka.logs.dir=/logs -Dlog4j.configuration=file:C:\kafka_2.12-2.0.1/config/tools-log4j.properties +java_command: kafka.admin.ConsumerGroupCommand --group sample-group --bootstrap-server localhost:32769 --describe +java_class_path (initial): C:\kafka_2.12-2.0.1\libs\activation-1.1.1.jar;C:\kafka_2.12-2.0.1\libs\aopalliance-repackaged-2.5.0-b42.jar;C:\kafka_2.12-2.0.1\libs\argparse4j-0.7.0.jar;C:\kafka_2.12-2.0.1\libs\audience-annotations-0.5.0.jar;C:\kafka_2.12-2.0.1\libs\commons-lang3-3.5.jar;C:\kafka_2.12-2.0.1\libs\connect-api-2.0.1.jar;C:\kafka_2.12-2.0.1\libs\connect-basic-auth-extension-2.0.1.jar;C:\kafka_2.12-2.0.1\libs\connect-file-2.0.1.jar;C:\kafka_2.12-2.0.1\libs\connect-json-2.0.1.jar;C:\kafka_2.12-2.0.1\libs\connect-runtime-2.0.1.jar;C:\kafka_2.12-2.0.1\libs\connect-transforms-2.0.1.jar;C:\kafka_2.12-2.0.1\libs\guava-20.0.jar;C:\kafka_2.12-2.0.1\libs\hk2-api-2.5.0-b42.jar;C:\kafka_2.12-2.0.1\libs\hk2-locator-2.5.0-b42.jar;C:\kafka_2.12-2.0.1\libs\hk2-utils-2.5.0-b42.jar;C:\kafka_2.12-2.0.1\libs\jackson-annotations-2.9.7.jar;C:\kafka_2.12-2.0.1\libs\jackson-core-2.9.7.jar;C:\kafka_2.12-2.0.1\libs\jackson-databind-2.9.7.jar;C:\kafka_2.12-2.0.1\libs\jackson-jaxrs-base-2.9.7.jar;C:\kafka_2.12-2.0.1\libs\jackson-jaxrs-json-provider-2.9.7.jar;C:\kafka_2.12-2.0.1\libs\jackson-module-jaxb-annotations-2.9.7.jar;C:\kafka_2.12-2.0.1\libs\javassist-3.22.0-CR2.jar;C:\kafka_2.12-2.0.1\libs\javax.annotation-api-1.2.jar;C:\kafka_2.12-2.0.1\libs\javax.inject-1.jar;C:\kafka_2.12-2.0.1\libs\javax.inject-2.5.0-b42.jar;C:\kafka_2.12-2.0.1\libs\javax.servlet-api-3.1.0.jar;C:\kafka_2.12-2.0.1\libs\javax.ws.rs-api-2.1.jar;C:\kafka_2.12-2.0.1\libs\jaxb-api-2.3.0.jar;C:\kafka_2.12-2.0.1\libs\jersey-client-2.27.jar;C:\kafka_2.12-2.0.1\libs\jersey-common-2.27.jar;C:\kafka_2.12-2.0.1\libs\jersey-container-servlet-2.27.jar;C:\kafka_2.12-2.0.1\libs\jersey-container-servlet-core-2.27.jar;C:\kafka_2.12-2.0.1\libs\jersey-hk2-2.27.jar;C:\kafka_2.12-2.0.1\libs\jersey-media-jaxb-2.27.jar;C:\kafka_2.12-2.0.1\libs\jersey-server-2.27.jar;C:\kafka_2.12-2.0.1\libs\jetty-client-9.4.11.v20180605.jar;C:\kafka_2.12-2.0.1\libs\jetty-continuation-9.4.11.v20180605.jar;C:\kafka_2.12-2.0.1\libs\jetty-http-9.4.11 +Launcher Type: SUN_STANDARD + +Environment Variables: +PATH=C:\cmder\bin;C:\cmder\vendor\conemu-maximus5\ConEmu\Scripts;C:\cmder\vendor\conemu-maximus5;C:\cmder\vendor\conemu-maximus5\ConEmu;C:\ProgramData\DockerDesktop\version-bin;C:\Program Files\Docker\Docker\Resources\bin;C:\Program Files (x86)\Common Files\Oracle\Java\javapath;C:\WINDOWS\system32;C:\WINDOWS;C:\WINDOWS\System32\Wbem;C:\WINDOWS\System32\WindowsPowerShell\v1.0\;C:\WINDOWS\System32\OpenSSH\;C:\Program Files\Git\cmd;C:\kafka_2.12-2.0.1\bin\windows;C:\Users\Vinicius\AppData\Local\Microsoft\WindowsApps;C:\Users\Vinicius\AppData\Local\Programs\Microsoft VS Code\bin;C:\Program Files\Git\mingw64;C:\Program Files\Git\usr\bin;C:\cmder +USERNAME=Vinicius +OS=Windows_NT +PROCESSOR_IDENTIFIER=Intel64 Family 6 Model 94 Stepping 3, GenuineIntel + + + +--------------- S Y S T E M --------------- + +OS: Windows 10.0 , 64 bit Build 17763 (10.0.17763.1) + +CPU:total 4 (initial active 4) (4 cores per cpu, 1 threads per core) family 6 model 94 stepping 3, cmov, cx8, fxsr, mmx, sse, sse2, sse3, ssse3, sse4.1, sse4.2, popcnt, avx, avx2, aes, clmul, erms, rtm, 3dnowpref, lzcnt, tsc, tscinvbit, bmi1, bmi2, adx + +Memory: 4k page, physical 16734928k(634360k free), swap 31099984k(135784k free) + +vm_info: Java HotSpot(TM) 64-Bit Server VM (25.191-b12) for windows-amd64 JRE (1.8.0_191-b12), built on Oct 6 2018 09:29:03 by "java_re" with MS VC++ 10.0 (VS2010) + +time: Thu Jan 03 14:56:10 2019 +elapsed time: 0 seconds (0d 0h 0m 0s) + diff --git a/infra/kafka-docker/overrides/0.9.0.1.sh b/infra/kafka-docker/overrides/0.9.0.1.sh new file mode 100644 index 000000000..d5e85611b --- /dev/null +++ b/infra/kafka-docker/overrides/0.9.0.1.sh @@ -0,0 +1,6 @@ +#!/bin/bash -e + +# Kafka 0.9.x.x has a 'listeners' config by default. We need to remove this +# as the user may be configuring via the host.name / advertised.host.name properties +echo "Removing 'listeners' from server.properties pre-bootstrap" +sed -i -e '/^listeners=/d' "$KAFKA_HOME/config/server.properties" diff --git a/infra/kafka-docker/start-kafka-shell.sh b/infra/kafka-docker/start-kafka-shell.sh new file mode 100644 index 000000000..62663e498 --- /dev/null +++ b/infra/kafka-docker/start-kafka-shell.sh @@ -0,0 +1,2 @@ +#!/bin/bash +docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -e HOST_IP=$1 -e ZK=$2 -i -t wurstmeister/kafka /bin/bash diff --git a/infra/kafka-docker/start-kafka.sh b/infra/kafka-docker/start-kafka.sh new file mode 100644 index 000000000..853591180 --- /dev/null +++ b/infra/kafka-docker/start-kafka.sh @@ -0,0 +1,149 @@ +#!/bin/bash -e + +# Allow specific kafka versions to perform any unique bootstrap operations +OVERRIDE_FILE="/opt/overrides/${KAFKA_VERSION}.sh" +if [[ -x "$OVERRIDE_FILE" ]]; then + echo "Executing override file $OVERRIDE_FILE" + eval "$OVERRIDE_FILE" +fi + +# Store original IFS config, so we can restore it at various stages +ORIG_IFS=$IFS + +if [[ -z "$KAFKA_ZOOKEEPER_CONNECT" ]]; then + echo "ERROR: missing mandatory config: KAFKA_ZOOKEEPER_CONNECT" + exit 1 +fi + +if [[ -z "$KAFKA_PORT" ]]; then + export KAFKA_PORT=9092 +fi + +create-topics.sh & +unset KAFKA_CREATE_TOPICS + +if [[ -z "$KAFKA_ADVERTISED_PORT" && \ + -z "$KAFKA_LISTENERS" && \ + -z "$KAFKA_ADVERTISED_LISTENERS" && \ + -S /var/run/docker.sock ]]; then + KAFKA_ADVERTISED_PORT=$(docker port "$(hostname)" $KAFKA_PORT | sed -r 's/.*:(.*)/\1/g') + export KAFKA_ADVERTISED_PORT +fi + +if [[ -z "$KAFKA_BROKER_ID" ]]; then + if [[ -n "$BROKER_ID_COMMAND" ]]; then + KAFKA_BROKER_ID=$(eval "$BROKER_ID_COMMAND") + export KAFKA_BROKER_ID + else + # By default auto allocate broker ID + export KAFKA_BROKER_ID=-1 + fi +fi + +if [[ -z "$KAFKA_LOG_DIRS" ]]; then + export KAFKA_LOG_DIRS="/kafka/kafka-logs-$HOSTNAME" +fi + +if [[ -n "$KAFKA_HEAP_OPTS" ]]; then + sed -r -i 's/(export KAFKA_HEAP_OPTS)="(.*)"/\1="'"$KAFKA_HEAP_OPTS"'"/g' "$KAFKA_HOME/bin/kafka-server-start.sh" + unset KAFKA_HEAP_OPTS +fi + +if [[ -n "$HOSTNAME_COMMAND" ]]; then + HOSTNAME_VALUE=$(eval "$HOSTNAME_COMMAND") + + # Replace any occurences of _{HOSTNAME_COMMAND} with the value + IFS=$'\n' + for VAR in $(env); do + if [[ $VAR =~ ^KAFKA_ && "$VAR" =~ "_{HOSTNAME_COMMAND}" ]]; then + eval "export ${VAR//_\{HOSTNAME_COMMAND\}/$HOSTNAME_VALUE}" + fi + done + IFS=$ORIG_IFS +fi + +if [[ -n "$PORT_COMMAND" ]]; then + PORT_VALUE=$(eval "$PORT_COMMAND") + + # Replace any occurences of _{PORT_COMMAND} with the value + IFS=$'\n' + for VAR in $(env); do + if [[ $VAR =~ ^KAFKA_ && "$VAR" =~ "_{PORT_COMMAND}" ]]; then + eval "export ${VAR//_\{PORT_COMMAND\}/$PORT_VALUE}" + fi + done + IFS=$ORIG_IFS +fi + +if [[ -n "$RACK_COMMAND" && -z "$KAFKA_BROKER_RACK" ]]; then + KAFKA_BROKER_RACK=$(eval "$RACK_COMMAND") + export KAFKA_BROKER_RACK +fi + +# Try and configure minimal settings or exit with error if there isn't enough information +if [[ -z "$KAFKA_ADVERTISED_HOST_NAME$KAFKA_LISTENERS" ]]; then + if [[ -n "$KAFKA_ADVERTISED_LISTENERS" ]]; then + echo "ERROR: Missing environment variable KAFKA_LISTENERS. Must be specified when using KAFKA_ADVERTISED_LISTENERS" + exit 1 + elif [[ -z "$HOSTNAME_VALUE" ]]; then + echo "ERROR: No listener or advertised hostname configuration provided in environment." + echo " Please define KAFKA_LISTENERS / (deprecated) KAFKA_ADVERTISED_HOST_NAME" + exit 1 + fi + + # Maintain existing behaviour + # If HOSTNAME_COMMAND is provided, set that to the advertised.host.name value if listeners are not defined. + export KAFKA_ADVERTISED_HOST_NAME="$HOSTNAME_VALUE" +fi + +#Issue newline to config file in case there is not one already +echo "" >> "$KAFKA_HOME/config/server.properties" + +( + function updateConfig() { + key=$1 + value=$2 + file=$3 + + # Omit $value here, in case there is sensitive information + echo "[Configuring] '$key' in '$file'" + + # If config exists in file, replace it. Otherwise, append to file. + if grep -E -q "^#?$key=" "$file"; then + sed -r -i "s@^#?$key=.*@$key=$value@g" "$file" #note that no config values may contain an '@' char + else + echo "$key=$value" >> "$file" + fi + } + + # Fixes #312 + # KAFKA_VERSION + KAFKA_HOME + grep -rohe KAFKA[A-Z0-0_]* /opt/kafka/bin | sort | uniq | tr '\n' '|' + EXCLUSIONS="|KAFKA_VERSION|KAFKA_HOME|KAFKA_DEBUG|KAFKA_GC_LOG_OPTS|KAFKA_HEAP_OPTS|KAFKA_JMX_OPTS|KAFKA_JVM_PERFORMANCE_OPTS|KAFKA_LOG|KAFKA_OPTS|" + + # Read in env as a new-line separated array. This handles the case of env variables have spaces and/or carriage returns. See #313 + IFS=$'\n' + for VAR in $(env) + do + env_var=$(echo "$VAR" | cut -d= -f1) + if [[ "$EXCLUSIONS" = *"|$env_var|"* ]]; then + echo "Excluding $env_var from broker config" + continue + fi + + if [[ $env_var =~ ^KAFKA_ ]]; then + kafka_name=$(echo "$env_var" | cut -d_ -f2- | tr '[:upper:]' '[:lower:]' | tr _ .) + updateConfig "$kafka_name" "${!env_var}" "$KAFKA_HOME/config/server.properties" + fi + + if [[ $env_var =~ ^LOG4J_ ]]; then + log4j_name=$(echo "$env_var" | tr '[:upper:]' '[:lower:]' | tr _ .) + updateConfig "$log4j_name" "${!env_var}" "$KAFKA_HOME/config/log4j.properties" + fi + done +) + +if [[ -n "$CUSTOM_INIT_SCRIPT" ]] ; then + eval "$CUSTOM_INIT_SCRIPT" +fi + +exec "$KAFKA_HOME/bin/kafka-server-start.sh" "$KAFKA_HOME/config/server.properties" diff --git a/infra/kafka-docker/test/0.0/test.broker-list.kafka.sh b/infra/kafka-docker/test/0.0/test.broker-list.kafka.sh new file mode 100644 index 000000000..f557e8f9b --- /dev/null +++ b/infra/kafka-docker/test/0.0/test.broker-list.kafka.sh @@ -0,0 +1,19 @@ +#!/bin/bash -e + +testBrokerList() { + # Need to get the proxied ports for kafka + PORT1=$(docker inspect -f '{{ index .NetworkSettings.Ports "9092/tcp" 0 "HostPort" }}' test_kafka_1) + PORT2=$(docker inspect -f '{{ index .NetworkSettings.Ports "9092/tcp" 0 "HostPort" }}' test_kafka_2) + + RESULT=$(HOST_IP=1.2.3.4 broker-list.sh) + + echo "$RESULT" + + if [[ "$RESULT" == "1.2.3.4:$PORT1,1.2.3.4:$PORT2" || "$RESULT" == "1.2.3.4:$PORT2,1.2.3.4:$PORT1" ]]; then + return 0 + else + return 1 + fi +} + +testBrokerList diff --git a/infra/kafka-docker/test/0.0/test.create-topics-custom-separator.kafka.sh b/infra/kafka-docker/test/0.0/test.create-topics-custom-separator.kafka.sh new file mode 100644 index 000000000..abd031c1e --- /dev/null +++ b/infra/kafka-docker/test/0.0/test.create-topics-custom-separator.kafka.sh @@ -0,0 +1,43 @@ +#!/bin/bash -e + +# NOTE: create-topics.sh requires KAFKA_PORT and KAFKA_ZOOKEEPER_CONNECT to be set (see docker-compose.yml) +testCreateTopicsCustomSeparator() { + NOW=$(date +%s) + + # TOPICS array contains the topic name to create / validate + TOPICS[0]="one-$NOW" + TOPICS[1]="two-$NOW" + TOPICS[2]="three-$NOW" + + export KAFKA_CREATE_TOPICS_SEPARATOR=$'\n' + KAFKA_CREATE_TOPICS=$(cat <<-EOF + ${TOPICS[0]}:1:1 + ${TOPICS[1]}:1:1 + ${TOPICS[2]}:1:1 + EOF + ) + export KAFKA_CREATE_TOPICS + + create-topics.sh + + # Loop through each array, validate that topic exists + for i in "${!TOPICS[@]}"; do + TOPIC=${TOPICS[i]} + + echo "Validating topic '$TOPIC'" + + EXISTS=$(/opt/kafka/bin/kafka-topics.sh --zookeeper "$KAFKA_ZOOKEEPER_CONNECT" --list --topic "$TOPIC") + if [[ "$EXISTS" != "$TOPIC" ]]; then + echo "$TOPIC topic not created" + return 1 + fi + done + + return 0 +} + +# mock the netstat call as made by the create-topics.sh script +function netstat() { echo "1 2 3 :$KAFKA_PORT"; } +export -f netstat + +testCreateTopicsCustomSeparator diff --git a/infra/kafka-docker/test/0.0/test.path.kafka.sh b/infra/kafka-docker/test/0.0/test.path.kafka.sh new file mode 100644 index 000000000..ce02401b3 --- /dev/null +++ b/infra/kafka-docker/test/0.0/test.path.kafka.sh @@ -0,0 +1,15 @@ +#!/bin/bash -e + +# NOTE: this tests to see if the /opt/kafka/bin is existing in the path within the docker container + +testPath() { + echo "Checking PATH '$PATH'" + if [[ ! "$PATH" =~ "/opt/kafka/bin" ]]; then + echo "path is not set correctly: $PATH" + return 1 + fi + + return 0 +} + +testPath diff --git a/infra/kafka-docker/test/0.0/test.read-write.kafkacat.sh b/infra/kafka-docker/test/0.0/test.read-write.kafkacat.sh new file mode 100644 index 000000000..325455bb0 --- /dev/null +++ b/infra/kafka-docker/test/0.0/test.read-write.kafkacat.sh @@ -0,0 +1,10 @@ +#!/bin/bash -e + +source version.functions + +testReadWrite() { + echo 'foo,bar' | eval "kafkacat -b $BROKER_LIST $KAFKACAT_OPTS -P -D, -t readwrite" + eval "kafkacat -b $BROKER_LIST $KAFKACAT_OPTS -C -e -t readwrite" +} + +testReadWrite diff --git a/infra/kafka-docker/test/0.0/test.start-kafka-advertised-host.kafka.sh b/infra/kafka-docker/test/0.0/test.start-kafka-advertised-host.kafka.sh new file mode 100644 index 000000000..1791d60b5 --- /dev/null +++ b/infra/kafka-docker/test/0.0/test.start-kafka-advertised-host.kafka.sh @@ -0,0 +1,20 @@ +#!/bin/bash -e + +source test.functions + +testAdvertisedHost() { + # Given a hostname is provided + export KAFKA_ADVERTISED_HOST_NAME=monkey + export KAFKA_ADVERTISED_PORT=8888 + + # When the script is invoked + source "$START_KAFKA" + + # Then the configuration file is correct + assertExpectedConfig "advertised.host.name=monkey" + assertExpectedConfig "advertised.port=8888" + assertAbsent 'advertised.listeners' + assertAbsent 'listeners' +} + +testAdvertisedHost diff --git a/infra/kafka-docker/test/0.0/test.start-kafka-broker-id.kafka.sh b/infra/kafka-docker/test/0.0/test.start-kafka-broker-id.kafka.sh new file mode 100644 index 000000000..4b1326482 --- /dev/null +++ b/infra/kafka-docker/test/0.0/test.start-kafka-broker-id.kafka.sh @@ -0,0 +1,51 @@ +#!/bin/bash -e + +source test.functions + +testManualBrokerId() { + echo "testManualBrokerId" + + # Given a Broker Id is provided + export KAFKA_LISTENERS=PLAINTEXT://:9092 + export KAFKA_BROKER_ID=57 + + # When the script is invoked + source "$START_KAFKA" + + # Then the broker Id is set + assertExpectedConfig 'broker.id=57' +} + +testAutomaticBrokerId() { + echo "testAutomaticBrokerId" + + # Given no Broker Id is provided + export KAFKA_LISTENERS=PLAINTEXT://:9092 + unset KAFKA_BROKER_ID + + # When the script is invoked + source "$START_KAFKA" + + # Then the broker Id is configured to automatic + assertExpectedConfig 'broker.id=-1' +} + +testBrokerIdCommand() { + echo "testBrokerIdCommand" + + # Given a Broker Id command is provided + export KAFKA_LISTENERS=PLAINTEXT://:9092 + unset KAFKA_BROKER_ID + export BROKER_ID_COMMAND='f() { echo "23"; }; f' + + # When the script is invoked + source "$START_KAFKA" + + # Then the broker Id is the result of the command + assertExpectedConfig 'broker.id=23' +} + + +testManualBrokerId \ + && testAutomaticBrokerId \ + && testBrokerIdCommand diff --git a/infra/kafka-docker/test/0.0/test.start-kafka-bug-312-kafka-env.kafka.sh b/infra/kafka-docker/test/0.0/test.start-kafka-bug-312-kafka-env.kafka.sh new file mode 100644 index 000000000..5f0167c35 --- /dev/null +++ b/infra/kafka-docker/test/0.0/test.start-kafka-bug-312-kafka-env.kafka.sh @@ -0,0 +1,25 @@ +#!/bin/bash -e + +source test.functions + +testKafkaEnv() { + # Given required settings are provided + export KAFKA_ADVERTISED_HOST_NAME="testhost" + export KAFKA_OPTS="-Djava.security.auth.login.config=/kafka_server_jaas.conf" + + # When the script is invoked + source "$START_KAFKA" + + # Then env should remain untouched + if [[ ! "$KAFKA_OPTS" == "-Djava.security.auth.login.config=/kafka_server_jaas.conf" ]]; then + echo "KAFKA_OPTS not set to expected value. $KAFKA_OPTS" + exit 1 + fi + + # And the broker config should not be set + assertAbsent 'opts' + + echo " > Set KAFKA_OPTS=$KAFKA_OPTS" +} + +testKafkaEnv diff --git a/infra/kafka-docker/test/0.0/test.start-kafka-bug-313-kafka-opts.kafka.sh b/infra/kafka-docker/test/0.0/test.start-kafka-bug-313-kafka-opts.kafka.sh new file mode 100644 index 000000000..df7b4018f --- /dev/null +++ b/infra/kafka-docker/test/0.0/test.start-kafka-bug-313-kafka-opts.kafka.sh @@ -0,0 +1,23 @@ +#!/bin/bash -e + +source test.functions + +testKafkaOpts() { + # Given required settings are provided + export KAFKA_ADVERTISED_HOST_NAME="testhost" + # .. and a CUSTOM_INIT_SCRIPT with spaces + export CUSTOM_INIT_SCRIPT="export KAFKA_OPTS=-Djava.security.auth.login.config=/kafka_server_jaas.conf" + + # When the script is invoked + source "$START_KAFKA" + + # Then the custom init script should be evaluated + if [[ ! "$KAFKA_OPTS" == "-Djava.security.auth.login.config=/kafka_server_jaas.conf" ]]; then + echo "KAFKA_OPTS not set to expected value. $KAFKA_OPTS" + exit 1 + fi + + echo " > Set KAFKA_OPTS=$KAFKA_OPTS" +} + +testKafkaOpts diff --git a/infra/kafka-docker/test/0.0/test.start-kafka-host-name.kafka.sh b/infra/kafka-docker/test/0.0/test.start-kafka-host-name.kafka.sh new file mode 100644 index 000000000..08dc21137 --- /dev/null +++ b/infra/kafka-docker/test/0.0/test.start-kafka-host-name.kafka.sh @@ -0,0 +1,18 @@ +#!/bin/bash -e + +source test.functions + +testHostnameCommand() { + # Given a hostname command is provided + export HOSTNAME_COMMAND='f() { echo "my-host"; }; f' + + # When the script is invoked + source "$START_KAFKA" + + # Then the configuration uses the value from the command + assertExpectedConfig 'advertised.host.name=my-host' + assertAbsent 'advertised.listeners' + assertAbsent 'listeners' +} + +testHostnameCommand diff --git a/infra/kafka-docker/test/0.0/test.start-kafka-log4j-config.kafka.sh b/infra/kafka-docker/test/0.0/test.start-kafka-log4j-config.kafka.sh new file mode 100644 index 000000000..da4ff28e2 --- /dev/null +++ b/infra/kafka-docker/test/0.0/test.start-kafka-log4j-config.kafka.sh @@ -0,0 +1,17 @@ +#!/bin/bash -e + +source test.functions + +testLog4jConfig() { + # Given Log4j overrides are provided + export KAFKA_ADVERTISED_HOST_NAME="testhost" + export LOG4J_LOGGER_KAFKA=DEBUG + + # When the script is invoked + source "$START_KAFKA" + + # Then the configuration file is correct + assertExpectedLog4jConfig "log4j.logger.kafka=DEBUG" +} + +testLog4jConfig diff --git a/infra/kafka-docker/test/0.0/test.start-kafka-port-command.kafka.sh b/infra/kafka-docker/test/0.0/test.start-kafka-port-command.kafka.sh new file mode 100644 index 000000000..4c4d88336 --- /dev/null +++ b/infra/kafka-docker/test/0.0/test.start-kafka-port-command.kafka.sh @@ -0,0 +1,21 @@ +#!/bin/bash -e + +source test.functions + +testPortCommand() { + # Given a port command is provided + export PORT_COMMAND='f() { echo "12345"; }; f' + export KAFKA_ADVERTISED_LISTENERS="PLAINTEXT://1.2.3.4:_{PORT_COMMAND}" + export KAFKA_LISTENERS="PLAINTEXT://:9092" + + # When the script is invoked + source "$START_KAFKA" + + # Then the configuration uses the value from the command + assertExpectedConfig 'advertised.listeners=PLAINTEXT://1.2.3.4:12345' + assertExpectedConfig 'listeners=PLAINTEXT://:9092' + assertAbsent 'advertised.host.name' + assertAbsent 'advertised.port' +} + +testPortCommand diff --git a/infra/kafka-docker/test/0.0/test.start-kafka-restart.kafka.sh b/infra/kafka-docker/test/0.0/test.start-kafka-restart.kafka.sh new file mode 100644 index 000000000..68a255986 --- /dev/null +++ b/infra/kafka-docker/test/0.0/test.start-kafka-restart.kafka.sh @@ -0,0 +1,18 @@ +#!/bin/bash -e + +source test.functions + +testRestart() { + # Given a hostname is provided + export KAFKA_ADVERTISED_HOST_NAME="testhost" + + # When the container is restarted (Script invoked multiple times) + source "$START_KAFKA" + source "$START_KAFKA" + + # Then the configuration file only has one instance of the config + assertExpectedConfig 'advertised.host.name=testhost' + assertAbsent 'listeners' +} + +testRestart diff --git a/infra/kafka-docker/test/0.10/test.create-topics.kafka.sh b/infra/kafka-docker/test/0.10/test.create-topics.kafka.sh new file mode 100644 index 000000000..b5eb4bd49 --- /dev/null +++ b/infra/kafka-docker/test/0.10/test.create-topics.kafka.sh @@ -0,0 +1,43 @@ +#!/bin/bash -e + +# NOTE: create-topics.sh requires KAFKA_PORT and KAFKA_ZOOKEEPER_CONNECT to be set (see docker-compose.yml) + +testCreateTopics() { + NOW=$(date +%s) + + # TOPICS array contains the topic name to create / validate + # CLEANUP array contains the expected cleanup policy configuration for the topic + TOPICS[0]="default-$NOW" + CLEANUP[0]="" + + TOPICS[1]="compact-$NOW" + CLEANUP[1]="compact,compression.type=snappy" + + KAFKA_CREATE_TOPICS="${TOPICS[0]}:1:1,${TOPICS[1]}:2:1:compact --config=compression.type=snappy" create-topics.sh + + # Loop through each array, validate that topic exists, and correct cleanup policy is set + for i in "${!TOPICS[@]}"; do + TOPIC=${TOPICS[i]} + + echo "Validating topic '$TOPIC'" + + EXISTS=$(/opt/kafka/bin/kafka-topics.sh --zookeeper "$KAFKA_ZOOKEEPER_CONNECT" --list --topic "$TOPIC") + POLICY=$(/opt/kafka/bin/kafka-configs.sh --zookeeper "$KAFKA_ZOOKEEPER_CONNECT" --entity-type topics --entity-name "$TOPIC" --describe | awk -F'cleanup.policy=' '{print $2}') + + RESULT="$EXISTS:$POLICY" + EXPECTED="$TOPIC:${CLEANUP[i]}" + + if [[ "$RESULT" != "$EXPECTED" ]]; then + echo "$TOPIC topic not configured correctly: '$RESULT'" + return 1 + fi + done + + return 0 +} + +# mock the netstat call as made by the create-topics.sh script +function netstat() { echo "1 2 3 :$KAFKA_PORT"; } +export -f netstat + +testCreateTopics diff --git a/infra/kafka-docker/test/0.9/test.snappy.kafkacat.sh b/infra/kafka-docker/test/0.9/test.snappy.kafkacat.sh new file mode 100644 index 000000000..1be89101d --- /dev/null +++ b/infra/kafka-docker/test/0.9/test.snappy.kafkacat.sh @@ -0,0 +1,10 @@ +#!/bin/bash -e + +source version.functions + +testSnappy() { + echo 'foo,bar' | eval "kafkacat -X compression.codec=snappy -b $BROKER_LIST $KAFKACAT_OPTS -P -D, -t snappy" + eval "kafkacat -X compression.codec=snappy -b $BROKER_LIST $KAFKACAT_OPTS -C -e -t snappy" +} + +testSnappy diff --git a/infra/kafka-docker/test/0.9/test.start-kafka-advertised-listeners.kafka.sh b/infra/kafka-docker/test/0.9/test.start-kafka-advertised-listeners.kafka.sh new file mode 100644 index 000000000..92ed26f9e --- /dev/null +++ b/infra/kafka-docker/test/0.9/test.start-kafka-advertised-listeners.kafka.sh @@ -0,0 +1,18 @@ +#!/bin/bash -e + +source test.functions + +testAdvertisedListeners() { + # Given a hostname is provided + export KAFKA_ADVERTISED_LISTENERS="PLAINTEXT://my.domain.com:9040" + export KAFKA_LISTENERS="PLAINTEXT://:9092" + + # When the script is invoked + source "$START_KAFKA" + + # Then the configuration file is correct + assertExpectedConfig 'advertised.listeners=PLAINTEXT://my.domain.com:9040' + assertExpectedConfig 'listeners=PLAINTEXT://:9092' +} + +testAdvertisedListeners diff --git a/infra/kafka-docker/test/0.9/test.start-kafka-listeners.kafka.sh b/infra/kafka-docker/test/0.9/test.start-kafka-listeners.kafka.sh new file mode 100644 index 000000000..ee5abb108 --- /dev/null +++ b/infra/kafka-docker/test/0.9/test.start-kafka-listeners.kafka.sh @@ -0,0 +1,19 @@ +#!/bin/bash -e + +source test.functions + +testListeners() { + # Given a hostname is provided + export KAFKA_LISTENERS="PLAINTEXT://internal.domain.com:9040" + + # When the script is invoked + source "$START_KAFKA" + + # Then the configuration file is correct + assertAbsent 'advertised.host.name' + assertAbsent 'advertised.port' + assertAbsent 'advertised.listeners' + assertExpectedConfig 'listeners=PLAINTEXT://internal.domain.com:9040' +} + +testListeners diff --git a/infra/kafka-docker/test/0.9/test.start-kafka-multiple-listeners.kafka.sh b/infra/kafka-docker/test/0.9/test.start-kafka-multiple-listeners.kafka.sh new file mode 100644 index 000000000..0ca08f4e8 --- /dev/null +++ b/infra/kafka-docker/test/0.9/test.start-kafka-multiple-listeners.kafka.sh @@ -0,0 +1,25 @@ +#!/bin/bash -e +source test.functions + +testMultipleAdvertisedListeners() { + # Given multiple advertised listeners + export HOSTNAME_COMMAND="f() { echo 'monkey.domain'; }; f" + export KAFKA_ADVERTISED_LISTENERS="INSIDE://:9092,OUTSIDE://_{HOSTNAME_COMMAND}:9094" + export KAFKA_LISTENERS="INSIDE://:9092,OUTSIDE://:9094" + export KAFKA_LISTENER_SECURITY_PROTOCOL_MAP="INSIDE:PLAINTEXT,OUTSIDE:PLAINTEXT" + export KAFKA_INTER_BROKER_LISTENER_NAME="INSIDE" + + # When the script is invoked + source "$START_KAFKA" + + # Then the configuration file is correct + assertAbsent "advertised.host.name" + assertAbsent "advertised.port" + + assertExpectedConfig "advertised.listeners=INSIDE://:9092,OUTSIDE://monkey.domain:9094" + assertExpectedConfig "listeners=INSIDE://:9092,OUTSIDE://:9094" + assertExpectedConfig "listener.security.protocol.map=INSIDE:PLAINTEXT,OUTSIDE:PLAINTEXT" + assertExpectedConfig "inter.broker.listener.name=INSIDE" +} + +testMultipleAdvertisedListeners diff --git a/infra/kafka-docker/test/Readme.md b/infra/kafka-docker/test/Readme.md new file mode 100644 index 000000000..d6c3bc3d9 --- /dev/null +++ b/infra/kafka-docker/test/Readme.md @@ -0,0 +1,32 @@ +Tests +===== + +This directory contains some basic tests to validate functionality after building. + +To execute +---------- + +``` +cd test +docker-compose up -d zookeeper +docker-compose scale kafka=2 +./runAllTests.sh +``` + +Run selected tests +------------------ + +### Kafka + +``` +docker-compose run --rm kafkatest +``` + +### Kafkacat + +``` +BROKER_LIST=$(./internal-broker-list.sh) [KAFKA_VERSION=] docker-compose run --rm kafkacattest +``` + +- `` is the kafka version that the tests are targeting. Normally this environment variable should not need to be specified. The default should be the latest image version. Added for CI support. +- `` can be an individual filename, or a pattern such as `'0.0/test.start-kafka*.kafka.sh'` diff --git a/infra/kafka-docker/test/docker-compose.yml b/infra/kafka-docker/test/docker-compose.yml new file mode 100644 index 000000000..baf0f0bc4 --- /dev/null +++ b/infra/kafka-docker/test/docker-compose.yml @@ -0,0 +1,45 @@ +version: '2.1' +services: + zookeeper: + image: wurstmeister/zookeeper + ports: + - "2181" + kafka: + image: wurstmeister/kafka + ports: + - "9092" + environment: + HOSTNAME_COMMAND: "echo $$(hostname)" + KAFKA_ADVERTISED_PORT: 9092 + KAFKA_PORT: 9092 + BROKER_ID_COMMAND: "docker inspect --format '{{ .Name }}' $$(hostname) | awk -F_ '{ printf $$NF }'" + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + volumes: + - /var/run/docker.sock:/var/run/docker.sock + + kafkatest: + image: wurstmeister/kafka + environment: + KAFKA_PORT: 9092 + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + volumes: + - .:/tests + - /var/run/docker.sock:/var/run/docker.sock + working_dir: /tests + entrypoint: + - ./runTestPattern.sh + command: + - "*/*.kafka.sh" + + kafkacattest: + image: confluentinc/cp-kafkacat + environment: + - BROKER_LIST + - KAFKA_VERSION=${KAFKA_VERSION-2.1.0} + volumes: + - .:/tests + working_dir: /tests + entrypoint: + - ./runTestPattern.sh + command: + - "*/*.kafkacat.sh" diff --git a/infra/kafka-docker/test/internal-broker-list.sh b/infra/kafka-docker/test/internal-broker-list.sh new file mode 100644 index 000000000..dac67a839 --- /dev/null +++ b/infra/kafka-docker/test/internal-broker-list.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +CONTAINERS=$(docker inspect -f '{{ .NetworkSettings.Networks.test_default.IPAddress }}' test_kafka_1 test_kafka_2 | awk '{printf "%s:9092\n", $1}' | tr '\n' ',') +echo "${CONTAINERS%,}" diff --git a/infra/kafka-docker/test/runAllTests.sh b/infra/kafka-docker/test/runAllTests.sh new file mode 100644 index 000000000..a575b21fb --- /dev/null +++ b/infra/kafka-docker/test/runAllTests.sh @@ -0,0 +1,25 @@ +#!/bin/bash -e + +BROKER_LIST=$(./internal-broker-list.sh) +export BROKER_LIST + +echo "BROKER_LIST=$BROKER_LIST" + +runAll() { + # Tests that require kafka + docker-compose run --rm kafkatest + + RESULT=$? + if [[ $RESULT -eq 0 ]]; then + # Tests that require kafkacat + docker-compose run --rm kafkacattest + RESULT=$? + fi + + return $RESULT +} + +runAll +result=$? +echo "exit status $result" +exit $result diff --git a/infra/kafka-docker/test/runTestPattern.sh b/infra/kafka-docker/test/runTestPattern.sh new file mode 100644 index 000000000..16220eeb4 --- /dev/null +++ b/infra/kafka-docker/test/runTestPattern.sh @@ -0,0 +1,50 @@ +#!/bin/bash -e + +source version.functions + +PATTERN=$1 +VERSION=$KAFKA_VERSION + +# Allow version to be overridden by -v/--version flag +while [[ "$#" -gt 0 ]]; do + case $1 in + -v|--version) + VERSION="$2"; + shift + ;; + *) + PATTERN="$1" + ;; + esac + shift +done + +echo "" +echo "" +echo "Running tests for Kafka $VERSION with pattern $PATTERN" + +runPattern() { + for t in $PATTERN; do + echo + echo "====================================" + + # only run tests compatible with this version of Kafka + TARGET=$(echo "$t" | cut -d/ -f1) + RESULT=$(compareVersion "$VERSION" "$TARGET") + echo "Kafka $VERSION is '$RESULT' target $TARGET of test $t" + if [[ "$RESULT" != "<" ]]; then + echo " testing '$t'" + ( source "$t" ) + status=$? + if [[ -z "$status" || ! "$status" -eq 0 ]]; then + return $status + fi + fi + done + + return $? +} + +runPattern + +exit $? diff --git a/infra/kafka-docker/test/scenarios/Readme.md b/infra/kafka-docker/test/scenarios/Readme.md new file mode 100644 index 000000000..11c968f0f --- /dev/null +++ b/infra/kafka-docker/test/scenarios/Readme.md @@ -0,0 +1,27 @@ +Scenarios (end-to-end tests) +============================ + +These tests are supposed to test the configuration of indivdiual features + +TODO: +----- + +- SSL (Client + Broker) +- Security + +Done: +----- + +- JMX + +Executing tests +--------------- + +These tests should spin up required containers for full end-to-end testing and exercise required code paths, returing zero exit code for success and non-zero exit code for failure. + +### JMX + +``` +cd test/scenarios +./runJmxScenario.sh +``` diff --git a/infra/kafka-docker/test/scenarios/jmx/docker-compose.yml b/infra/kafka-docker/test/scenarios/jmx/docker-compose.yml new file mode 100644 index 000000000..b87c86604 --- /dev/null +++ b/infra/kafka-docker/test/scenarios/jmx/docker-compose.yml @@ -0,0 +1,44 @@ +version: '2' +services: + zookeeper: + image: wurstmeister/zookeeper + ports: + - "2181" + kafka: + image: wurstmeister/kafka + ports: + - "9092" + - "1099" + environment: + KAFKA_ADVERTISED_HOST_NAME: kafka + KAFKA_ADVERTISED_PORT: 9092 + KAFKA_PORT: 9092 + BROKER_ID_COMMAND: "docker inspect --format '{{ .Name }}' $$(hostname) | awk -F_ '{ printf $$NF }'" + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_JMX_OPTS: "-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=kafka -Dcom.sun.management.jmxremote.rmi.port=1099" + JMX_PORT: 1099 + volumes: + - /var/run/docker.sock:/var/run/docker.sock + jmxexporter: + image: sscaling/jmx-prometheus-exporter + ports: + - "5556:5556" + environment: + SERVICE_PORT: 5556 + volumes: + - $PWD/jmxexporter.yml:/opt/jmx_exporter/config.yml + + test: + image: wurstmeister/kafka + environment: + KAFKA_PORT: 9092 + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + volumes: + - .:/scenario + working_dir: /scenario + entrypoint: + - /bin/bash + - -c + command: + - /scenario/test.sh + diff --git a/infra/kafka-docker/test/scenarios/jmx/jmxexporter.yml b/infra/kafka-docker/test/scenarios/jmx/jmxexporter.yml new file mode 100644 index 000000000..f365449c7 --- /dev/null +++ b/infra/kafka-docker/test/scenarios/jmx/jmxexporter.yml @@ -0,0 +1,9 @@ +--- +startDelaySeconds: 3 +hostPort: kafka:1099 +username: +password: + +whitelistObjectNames: ["kafka.server:type=BrokerTopicMetrics,*"] +rules: + - pattern: ".*" diff --git a/infra/kafka-docker/test/scenarios/jmx/test.sh b/infra/kafka-docker/test/scenarios/jmx/test.sh new file mode 100644 index 000000000..286e9a0fd --- /dev/null +++ b/infra/kafka-docker/test/scenarios/jmx/test.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +set -e -o pipefail + +echo "Sleeping 5 seconds until Kafka is started" +sleep 5 + +echo "Checking to see if Kafka is alive" +echo "dump" | nc -w 20 zookeeper 2181 | fgrep "/brokers/ids/" + +echo "Check JMX" +curl -s jmxexporter:5556/metrics | grep 'kafka_server_BrokerTopicMetrics_MeanRate{name="MessagesInPerSec",' diff --git a/infra/kafka-docker/test/scenarios/runJmxScenario.sh b/infra/kafka-docker/test/scenarios/runJmxScenario.sh new file mode 100644 index 000000000..e99b8851f --- /dev/null +++ b/infra/kafka-docker/test/scenarios/runJmxScenario.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +set -e -o pipefail + +pushd jmx +docker-compose up -d zookeeper kafka jmxexporter +docker-compose run --rm test +docker-compose stop +popd diff --git a/infra/kafka-docker/test/test.functions b/infra/kafka-docker/test/test.functions new file mode 100644 index 000000000..2894cadda --- /dev/null +++ b/infra/kafka-docker/test/test.functions @@ -0,0 +1,78 @@ +#!/bin/bash -e + +# Sourcing this script will replace any of the external calls in the start-kafka.sh script and mock +# any outbound calls (i.e. to docker) + +export START_KAFKA=/usr/bin/start-kafka.sh +export BROKER_CONFIG=/opt/kafka/config/server.properties +export ORIG_LOG4J_CONFIG=/opt/kafka/config/log4j.properties + +enforceOriginalFile() { + SOURCE="$1" + BACKUP="$1.bak" + + if [[ ! -f "$BACKUP" ]]; then + echo "Backing up $SOURCE to $BACKUP" + cp "$SOURCE" "$BACKUP" + else + echo "Restoring $SOURCE from $BACKUP" + cp "$BACKUP" "$SOURCE" + fi +} + +setupStartKafkaScript() { + echo "Preparing $START_KAFKA script for test" + + enforceOriginalFile "$START_KAFKA" + enforceOriginalFile "$BROKER_CONFIG" + enforceOriginalFile "$ORIG_LOG4J_CONFIG" + + # We need to remove all executable commands from the start-kafka.sh script, so it can be sourced to evaluate + # the environment variables + sed -i -E -e '/^create-topics.sh/d' -e '/^exec "\$KAFKA_HOME\/bin\/kafka-server-start.sh"/d' "$START_KAFKA" + + # Mock the call to docker port to return valid result + function docker() { echo "0.0.0.0:9092"; } + export -f docker +} + +setupStartKafkaScript + + +# Will look in the server.properties file, and check if there is an exact match for the provided input +# i.e. `assertExpectedConfig 'broker.id=123'` +# This will only succeed if there is exactly one line matching `broker.id=123` +assertExpectedConfig() { + EXPECTED=$1 + + COUNT=$(grep -E '^'"$EXPECTED"'$' "$BROKER_CONFIG" | wc -l) + RESULT=$(grep -E '^'"$EXPECTED"'$' "$BROKER_CONFIG") + echo " > $COUNT matches of $RESULT" + + [[ "$RESULT" == "$EXPECTED" && "$COUNT" == "1" ]] +} + +assertExpectedLog4jConfig() { + EXPECTED=$1 + + RESULT=$(grep -E '^'"$EXPECTED"'$' "$ORIG_LOG4J_CONFIG") + echo " > $RESULT" + + [[ "$RESULT" == "$EXPECTED" ]] +} + + +assertAbsent() { + EXPECTED_ABSENT=$1 + + RESULT=$(grep -E '^'"$EXPECTED_ABSENT" "$BROKER_CONFIG" | wc -l) + echo " > $RESULT matches for ^$EXPECTED_ABSENT" + + [[ "$RESULT" == "0" ]] +} + +printBrokerConfig() { + echo "----[ $BROKER_CONFIG ]----" + cat "$BROKER_CONFIG" | sed -nE '/^[^#]/p' + echo "--------------------------" +} diff --git a/infra/kafka-docker/test/verifyImageLabels.sh b/infra/kafka-docker/test/verifyImageLabels.sh new file mode 100644 index 000000000..f59c99855 --- /dev/null +++ b/infra/kafka-docker/test/verifyImageLabels.sh @@ -0,0 +1,20 @@ +#!/bin/bash -e + +VCS_REF=$(docker inspect -f '{{ index .Config.Labels "org.label-schema.vcs-ref"}}' wurstmeister/kafka) +echo "VCS_REF=$VCS_REF" +if [ -z "$VCS_REF" ] || [ "$VCS_REF" = "unspecified" ]; then + echo "org.label-schema.vcs-ref is empty or unspecified" + exit 1 +fi +if ! git cat-file -e "$VCS_REF^{commit}"; then + echo "$VCS_REF Not a valid git commit" + exit 1 +fi + +BUILD_DATE=$(docker inspect -f '{{ index .Config.Labels "org.label-schema.build-date"}}' wurstmeister/kafka) +echo "BUILD_DATE=$BUILD_DATE" +if ! date -d "$BUILD_DATE"; then + echo "$BUILD_DATE Not a valid date" + exit 1 +fi +exit 0 diff --git a/infra/kafka-docker/test/version.functions b/infra/kafka-docker/test/version.functions new file mode 100644 index 000000000..65c00f607 --- /dev/null +++ b/infra/kafka-docker/test/version.functions @@ -0,0 +1,37 @@ +#!/bin/bash -e + +# Modified from https://stackoverflow.com/a/4025065 +compareVersion() { + # Only care about major / minor + LEFT=$(echo "$1" | cut -d. -f1-2) + RIGHT=$(echo "$2" | cut -d. -f1-2) + if [[ "$LEFT" != "$RIGHT" ]] + then + local IFS=. + local i ver1=($LEFT) ver2=($RIGHT) + for ((i=0; i<${#ver1[@]}; i++)) + do + if (( "${ver1[i]}" > "${ver2[i]}" )) + then + echo ">" + return + fi + if (( "${ver1[i]}" < "${ver2[i]}" )) + then + echo "<" + return + fi + done + fi + echo "=" +} + +# https://github.com/edenhill/librdkafka/wiki/Broker-version-compatibility +# To support different broker versions, we need to configure kafkacat differently +VERSION_8=$(compareVersion "$KAFKA_VERSION" "0.8") +VERSION_9=$(compareVersion "$KAFKA_VERSION" "0.9") + +if [[ "$VERSION_8" == "=" || "$VERSION_9" == "=" ]]; then + export KAFKACAT_OPTS="-Xapi.version.request=false -Xbroker.version.fallback=$KAFKA_VERSION" + echo "[INFO] Using kafkacat opts on older version '$KAFKACAT_OPTS'" +fi diff --git a/infra/kafka-docker/versions.sh b/infra/kafka-docker/versions.sh new file mode 100644 index 000000000..d790d1a42 --- /dev/null +++ b/infra/kafka-docker/versions.sh @@ -0,0 +1,7 @@ +#!/bin/bash -e + +MAJOR_VERSION=$(echo "$KAFKA_VERSION" | cut -d. -f1) +export MAJOR_VERSION + +MINOR_VERSION=$(echo "$KAFKA_VERSION" | cut -d. -f2) +export MINOR_VERSION diff --git a/infra/start.sh b/infra/start.sh new file mode 100755 index 000000000..979b5aaaa --- /dev/null +++ b/infra/start.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +exec $(type -p java) -jar /opt/app.jar \ + --spring.data.jest.uri=http://elasticsearch:9200 \ + --spring.kafka.bootstrap-servers=kafka:9092 \ + --spring.kafka.consumer.bootstrap-servers=kafka:9092 diff --git a/mvnw b/mvnw new file mode 100644 index 000000000..5551fde8e --- /dev/null +++ b/mvnw @@ -0,0 +1,286 @@ +#!/bin/sh +# ---------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ---------------------------------------------------------------------------- + +# ---------------------------------------------------------------------------- +# Maven2 Start Up Batch script +# +# Required ENV vars: +# ------------------ +# JAVA_HOME - location of a JDK home dir +# +# Optional ENV vars +# ----------------- +# M2_HOME - location of maven2's installed home dir +# MAVEN_OPTS - parameters passed to the Java VM when running Maven +# e.g. to debug Maven itself, use +# set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 +# MAVEN_SKIP_RC - flag to disable loading of mavenrc files +# ---------------------------------------------------------------------------- + +if [ -z "$MAVEN_SKIP_RC" ] ; then + + if [ -f /etc/mavenrc ] ; then + . /etc/mavenrc + fi + + if [ -f "$HOME/.mavenrc" ] ; then + . "$HOME/.mavenrc" + fi + +fi + +# OS specific support. $var _must_ be set to either true or false. +cygwin=false; +darwin=false; +mingw=false +case "`uname`" in + CYGWIN*) cygwin=true ;; + MINGW*) mingw=true;; + Darwin*) darwin=true + # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home + # See https://developer.apple.com/library/mac/qa/qa1170/_index.html + if [ -z "$JAVA_HOME" ]; then + if [ -x "/usr/libexec/java_home" ]; then + export JAVA_HOME="`/usr/libexec/java_home`" + else + export JAVA_HOME="/Library/Java/Home" + fi + fi + ;; +esac + +if [ -z "$JAVA_HOME" ] ; then + if [ -r /etc/gentoo-release ] ; then + JAVA_HOME=`java-config --jre-home` + fi +fi + +if [ -z "$M2_HOME" ] ; then + ## resolve links - $0 may be a link to maven's home + PRG="$0" + + # need this for relative symlinks + while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG="`dirname "$PRG"`/$link" + fi + done + + saveddir=`pwd` + + M2_HOME=`dirname "$PRG"`/.. + + # make it fully qualified + M2_HOME=`cd "$M2_HOME" && pwd` + + cd "$saveddir" + # echo Using m2 at $M2_HOME +fi + +# For Cygwin, ensure paths are in UNIX format before anything is touched +if $cygwin ; then + [ -n "$M2_HOME" ] && + M2_HOME=`cygpath --unix "$M2_HOME"` + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`cygpath --unix "$JAVA_HOME"` + [ -n "$CLASSPATH" ] && + CLASSPATH=`cygpath --path --unix "$CLASSPATH"` +fi + +# For Mingw, ensure paths are in UNIX format before anything is touched +if $mingw ; then + [ -n "$M2_HOME" ] && + M2_HOME="`(cd "$M2_HOME"; pwd)`" + [ -n "$JAVA_HOME" ] && + JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" + # TODO classpath? +fi + +if [ -z "$JAVA_HOME" ]; then + javaExecutable="`which javac`" + if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then + # readlink(1) is not available as standard on Solaris 10. + readLink=`which readlink` + if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then + if $darwin ; then + javaHome="`dirname \"$javaExecutable\"`" + javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" + else + javaExecutable="`readlink -f \"$javaExecutable\"`" + fi + javaHome="`dirname \"$javaExecutable\"`" + javaHome=`expr "$javaHome" : '\(.*\)/bin'` + JAVA_HOME="$javaHome" + export JAVA_HOME + fi + fi +fi + +if [ -z "$JAVACMD" ] ; then + if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + else + JAVACMD="`which java`" + fi +fi + +if [ ! -x "$JAVACMD" ] ; then + echo "Error: JAVA_HOME is not defined correctly." >&2 + echo " We cannot execute $JAVACMD" >&2 + exit 1 +fi + +if [ -z "$JAVA_HOME" ] ; then + echo "Warning: JAVA_HOME environment variable is not set." +fi + +CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher + +# traverses directory structure from process work directory to filesystem root +# first directory with .mvn subdirectory is considered project base directory +find_maven_basedir() { + + if [ -z "$1" ] + then + echo "Path not specified to find_maven_basedir" + return 1 + fi + + basedir="$1" + wdir="$1" + while [ "$wdir" != '/' ] ; do + if [ -d "$wdir"/.mvn ] ; then + basedir=$wdir + break + fi + # workaround for JBEAP-8937 (on Solaris 10/Sparc) + if [ -d "${wdir}" ]; then + wdir=`cd "$wdir/.."; pwd` + fi + # end of workaround + done + echo "${basedir}" +} + +# concatenates all lines of a file +concat_lines() { + if [ -f "$1" ]; then + echo "$(tr -s '\n' ' ' < "$1")" + fi +} + +BASE_DIR=`find_maven_basedir "$(pwd)"` +if [ -z "$BASE_DIR" ]; then + exit 1; +fi + +########################################################################################## +# Extension to allow automatically downloading the maven-wrapper.jar from Maven-central +# This allows using the maven wrapper in projects that prohibit checking in binary data. +########################################################################################## +if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found .mvn/wrapper/maven-wrapper.jar" + fi +else + if [ "$MVNW_VERBOSE" = true ]; then + echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..." + fi + jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar" + while IFS="=" read key value; do + case "$key" in (wrapperUrl) jarUrl="$value"; break ;; + esac + done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties" + if [ "$MVNW_VERBOSE" = true ]; then + echo "Downloading from: $jarUrl" + fi + wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" + + if command -v wget > /dev/null; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found wget ... using wget" + fi + wget "$jarUrl" -O "$wrapperJarPath" + elif command -v curl > /dev/null; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found curl ... using curl" + fi + curl -o "$wrapperJarPath" "$jarUrl" + else + if [ "$MVNW_VERBOSE" = true ]; then + echo "Falling back to using Java to download" + fi + javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java" + if [ -e "$javaClass" ]; then + if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then + if [ "$MVNW_VERBOSE" = true ]; then + echo " - Compiling MavenWrapperDownloader.java ..." + fi + # Compiling the Java class + ("$JAVA_HOME/bin/javac" "$javaClass") + fi + if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then + # Running the downloader + if [ "$MVNW_VERBOSE" = true ]; then + echo " - Running MavenWrapperDownloader.java ..." + fi + ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR") + fi + fi + fi +fi +########################################################################################## +# End of extension +########################################################################################## + +export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"} +if [ "$MVNW_VERBOSE" = true ]; then + echo $MAVEN_PROJECTBASEDIR +fi +MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" + +# For Cygwin, switch paths to Windows format before running java +if $cygwin; then + [ -n "$M2_HOME" ] && + M2_HOME=`cygpath --path --windows "$M2_HOME"` + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` + [ -n "$CLASSPATH" ] && + CLASSPATH=`cygpath --path --windows "$CLASSPATH"` + [ -n "$MAVEN_PROJECTBASEDIR" ] && + MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"` +fi + +WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain + +exec "$JAVACMD" \ + $MAVEN_OPTS \ + -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ + "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ + ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@" diff --git a/mvnw.cmd b/mvnw.cmd new file mode 100644 index 000000000..e5cfb0ae9 --- /dev/null +++ b/mvnw.cmd @@ -0,0 +1,161 @@ +@REM ---------------------------------------------------------------------------- +@REM Licensed to the Apache Software Foundation (ASF) under one +@REM or more contributor license agreements. See the NOTICE file +@REM distributed with this work for additional information +@REM regarding copyright ownership. The ASF licenses this file +@REM to you under the Apache License, Version 2.0 (the +@REM "License"); you may not use this file except in compliance +@REM with the License. You may obtain a copy of the License at +@REM +@REM http://www.apache.org/licenses/LICENSE-2.0 +@REM +@REM Unless required by applicable law or agreed to in writing, +@REM software distributed under the License is distributed on an +@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +@REM KIND, either express or implied. See the License for the +@REM specific language governing permissions and limitations +@REM under the License. +@REM ---------------------------------------------------------------------------- + +@REM ---------------------------------------------------------------------------- +@REM Maven2 Start Up Batch script +@REM +@REM Required ENV vars: +@REM JAVA_HOME - location of a JDK home dir +@REM +@REM Optional ENV vars +@REM M2_HOME - location of maven2's installed home dir +@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands +@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending +@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven +@REM e.g. to debug Maven itself, use +@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 +@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files +@REM ---------------------------------------------------------------------------- + +@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' +@echo off +@REM set title of command window +title %0 +@REM enable echoing my setting MAVEN_BATCH_ECHO to 'on' +@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% + +@REM set %HOME% to equivalent of $HOME +if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") + +@REM Execute a user defined script before this one +if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre +@REM check for pre script, once with legacy .bat ending and once with .cmd ending +if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" +if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" +:skipRcPre + +@setlocal + +set ERROR_CODE=0 + +@REM To isolate internal variables from possible post scripts, we use another setlocal +@setlocal + +@REM ==== START VALIDATION ==== +if not "%JAVA_HOME%" == "" goto OkJHome + +echo. +echo Error: JAVA_HOME not found in your environment. >&2 +echo Please set the JAVA_HOME variable in your environment to match the >&2 +echo location of your Java installation. >&2 +echo. +goto error + +:OkJHome +if exist "%JAVA_HOME%\bin\java.exe" goto init + +echo. +echo Error: JAVA_HOME is set to an invalid directory. >&2 +echo JAVA_HOME = "%JAVA_HOME%" >&2 +echo Please set the JAVA_HOME variable in your environment to match the >&2 +echo location of your Java installation. >&2 +echo. +goto error + +@REM ==== END VALIDATION ==== + +:init + +@REM Find the project base dir, i.e. the directory that contains the folder ".mvn". +@REM Fallback to current working directory if not found. + +set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% +IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir + +set EXEC_DIR=%CD% +set WDIR=%EXEC_DIR% +:findBaseDir +IF EXIST "%WDIR%"\.mvn goto baseDirFound +cd .. +IF "%WDIR%"=="%CD%" goto baseDirNotFound +set WDIR=%CD% +goto findBaseDir + +:baseDirFound +set MAVEN_PROJECTBASEDIR=%WDIR% +cd "%EXEC_DIR%" +goto endDetectBaseDir + +:baseDirNotFound +set MAVEN_PROJECTBASEDIR=%EXEC_DIR% +cd "%EXEC_DIR%" + +:endDetectBaseDir + +IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig + +@setlocal EnableExtensions EnableDelayedExpansion +for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a +@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% + +:endReadAdditionalConfig + +SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" +set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar" +set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain + +set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar" +FOR /F "tokens=1,2 delims==" %%A IN (%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties) DO ( + IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B +) + +@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central +@REM This allows using the maven wrapper in projects that prohibit checking in binary data. +if exist %WRAPPER_JAR% ( + echo Found %WRAPPER_JAR% +) else ( + echo Couldn't find %WRAPPER_JAR%, downloading it ... + echo Downloading from: %DOWNLOAD_URL% + powershell -Command "(New-Object Net.WebClient).DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')" + echo Finished downloading %WRAPPER_JAR% +) +@REM End of extension + +%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %* +if ERRORLEVEL 1 goto error +goto end + +:error +set ERROR_CODE=1 + +:end +@endlocal & set ERROR_CODE=%ERROR_CODE% + +if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost +@REM check for post script, once with legacy .bat ending and once with .cmd ending +if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" +if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" +:skipRcPost + +@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' +if "%MAVEN_BATCH_PAUSE%" == "on" pause + +if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% + +exit /B %ERROR_CODE% diff --git a/pom.xml b/pom.xml new file mode 100644 index 000000000..b298fabad --- /dev/null +++ b/pom.xml @@ -0,0 +1,80 @@ + + + 4.0.0 + + org.springframework.boot + spring-boot-starter-parent + 2.1.3.RELEASE + + + br.com.vtferrari + trabalhe-conosco-backend-dev + 0.0.1-SNAPSHOT + trabalhe-conosco-backend-dev + Demo project for Spring Boot + + + 11 + Greenwich.RELEASE + + + + + + + org.springframework.boot + spring-boot-starter-webflux + + + com.github.vanroy + spring-boot-starter-data-jest + 3.2.1.RELEASE + + + + org.springframework.boot + spring-boot-devtools + runtime + + + org.projectlombok + lombok + true + + + org.springframework.kafka + spring-kafka + + + org.springframework.boot + spring-boot-starter-security + + + org.springframework.boot + spring-boot-starter-test + test + + + io.projectreactor + reactor-test + test + + + org.springframework.kafka + spring-kafka-test + test + + + + + + + org.springframework.boot + spring-boot-maven-plugin + + + + + + diff --git a/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/TrabalheConoscoBackendDevApplication.java b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/TrabalheConoscoBackendDevApplication.java new file mode 100644 index 000000000..ef7f36fba --- /dev/null +++ b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/TrabalheConoscoBackendDevApplication.java @@ -0,0 +1,15 @@ +package br.com.vtferrari.trabalheconoscobackenddev; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.kafka.annotation.EnableKafka; + +@EnableKafka +@SpringBootApplication +public class TrabalheConoscoBackendDevApplication { + + public static void main(String[] args) { + SpringApplication.run(TrabalheConoscoBackendDevApplication.class, args); + } + +} diff --git a/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/config/ElasticSearchConfig.java b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/config/ElasticSearchConfig.java new file mode 100644 index 000000000..f5b7439e2 --- /dev/null +++ b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/config/ElasticSearchConfig.java @@ -0,0 +1,30 @@ +package br.com.vtferrari.trabalheconoscobackenddev.config; + +import com.github.vanroy.springdata.jest.JestElasticsearchTemplate; +import com.github.vanroy.springdata.jest.mapper.DefaultJestResultsMapper; +import com.github.vanroy.springdata.jest.mapper.JestResultsMapper; +import io.searchbox.client.JestClient; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.elasticsearch.core.DefaultEntityMapper; +import org.springframework.data.elasticsearch.core.mapping.SimpleElasticsearchMappingContext; +import org.springframework.http.converter.json.Jackson2ObjectMapperBuilder; + +@Configuration +public class ElasticSearchConfig { + @Bean + public JestElasticsearchTemplate elasticsearchTemplate(JestClient client) { + return new JestElasticsearchTemplate(client, defaultJestResultsMapper()); + } + + @Bean + public DefaultJestResultsMapper defaultJestResultsMapper() { + return new DefaultJestResultsMapper(elasticsearchMappingContext()); + } + + @Bean + public SimpleElasticsearchMappingContext elasticsearchMappingContext() { + return new SimpleElasticsearchMappingContext(); + } + +} diff --git a/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/config/SecurityConfig.java b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/config/SecurityConfig.java new file mode 100644 index 000000000..cfd76c247 --- /dev/null +++ b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/config/SecurityConfig.java @@ -0,0 +1,22 @@ +package br.com.vtferrari.trabalheconoscobackenddev.config; + +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.security.config.annotation.web.reactive.EnableWebFluxSecurity; +import org.springframework.security.config.web.server.ServerHttpSecurity; +import org.springframework.security.web.server.SecurityWebFilterChain; + +@Configuration +@EnableWebFluxSecurity +public class SecurityConfig { + + @Bean + public SecurityWebFilterChain springSecurityFilterChain(ServerHttpSecurity http) { + http + .authorizeExchange() + .anyExchange().authenticated() + .and() + .httpBasic(); + return http.build(); + } +} \ No newline at end of file diff --git a/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/controller/IndexController.java b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/controller/IndexController.java new file mode 100644 index 000000000..9d9beff12 --- /dev/null +++ b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/controller/IndexController.java @@ -0,0 +1,26 @@ +package br.com.vtferrari.trabalheconoscobackenddev.controller; + +import br.com.vtferrari.trabalheconoscobackenddev.service.FilterUserService; +import br.com.vtferrari.trabalheconoscobackenddev.service.domain.User; +import lombok.AllArgsConstructor; +import org.springframework.data.domain.Page; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + + +@RestController +@AllArgsConstructor +@RequestMapping("/v1/users") +public class IndexController { + private final FilterUserService filterUserService; + + @GetMapping + public Page index( + @RequestParam("keyword") String keyword, + @RequestParam(value = "page", defaultValue = "10") Integer page, + @RequestParam(value = "size", defaultValue = "10") Integer size) { + return filterUserService.findUserByKeyword(keyword, page, size); + } +} \ No newline at end of file diff --git a/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/listener/RelevancyListener.java b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/listener/RelevancyListener.java new file mode 100644 index 000000000..57607c62c --- /dev/null +++ b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/listener/RelevancyListener.java @@ -0,0 +1,49 @@ +package br.com.vtferrari.trabalheconoscobackenddev.listener; + +import br.com.vtferrari.trabalheconoscobackenddev.listener.converter.KafkaMessageConverter; +import br.com.vtferrari.trabalheconoscobackenddev.listener.converter.RelevancyConverter; +import br.com.vtferrari.trabalheconoscobackenddev.listener.exception.IdNotFoundException; +import br.com.vtferrari.trabalheconoscobackenddev.service.RelevancyService; +import lombok.AllArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.kafka.annotation.KafkaListener; +import org.springframework.messaging.handler.annotation.Payload; +import org.springframework.stereotype.Component; +import reactor.core.publisher.Mono; + +import java.io.IOException; + +@Slf4j +@Component +@AllArgsConstructor +public class RelevancyListener { + + private static final int HIGH = 0; + private static final int LOW = 1; + private final KafkaMessageConverter kafkaMessageConverter; + private final RelevancyConverter relevancyConverter; + private final RelevancyService relevancyService; + + @KafkaListener(topics = "trabalhe-conosco-backend-dev.relevant.list.1") + public void relevantListOne(@Payload String message) throws Exception { + processMessage(message, HIGH); + } + + @KafkaListener(topics = "trabalhe-conosco-backend-dev.relevant.list.2") + public void relevantListTwo(String message) throws IOException { + processMessage(message, LOW); + } + + + private void processMessage(@Payload String message, int high) { + Mono.just(message) + .map(kafkaMessageConverter::convert) + .map(kafkaMessage -> relevancyConverter.convert(kafkaMessage, high)) + .doOnNext(relevancyService::save) + .retry(throwable-> IdNotFoundException.class.equals(throwable.getClass())) + .doOnError(t-> System.out.println("error")) + .block(); + } + + +} diff --git a/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/listener/converter/KafkaMessageConverter.java b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/listener/converter/KafkaMessageConverter.java new file mode 100644 index 000000000..f48278114 --- /dev/null +++ b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/listener/converter/KafkaMessageConverter.java @@ -0,0 +1,27 @@ +package br.com.vtferrari.trabalheconoscobackenddev.listener.converter; + +import br.com.vtferrari.trabalheconoscobackenddev.listener.resource.KafkaMessage; +import com.fasterxml.jackson.databind.ObjectMapper; +import lombok.AllArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Component; +import reactor.core.Exceptions; + +import java.io.IOException; + +@Slf4j +@Component +@AllArgsConstructor +public class KafkaMessageConverter { + + private final ObjectMapper objectMapper; + + public KafkaMessage convert(final String message) { + try { + return objectMapper.readValue(message, KafkaMessage.class); + } catch (IOException e) { + throw Exceptions.propagate(e); + } + } + +} diff --git a/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/listener/converter/RelevancyConverter.java b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/listener/converter/RelevancyConverter.java new file mode 100644 index 000000000..7bc25edd5 --- /dev/null +++ b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/listener/converter/RelevancyConverter.java @@ -0,0 +1,21 @@ +package br.com.vtferrari.trabalheconoscobackenddev.listener.converter; + +import br.com.vtferrari.trabalheconoscobackenddev.listener.resource.KafkaMessage; +import br.com.vtferrari.trabalheconoscobackenddev.service.domain.PriorityLevel; +import br.com.vtferrari.trabalheconoscobackenddev.service.domain.Relevancy; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Component; + +@Slf4j +@Component +public class RelevancyConverter { + + public Relevancy convert(final KafkaMessage message, Integer level) { + return Relevancy + .builder() + .priorityLevel(PriorityLevel.fromLevel(level)) + .id(message.getPayload()) + .build(); + } + +} diff --git a/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/listener/exception/IdNotFoundException.java b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/listener/exception/IdNotFoundException.java new file mode 100644 index 000000000..421d76573 --- /dev/null +++ b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/listener/exception/IdNotFoundException.java @@ -0,0 +1,8 @@ +package br.com.vtferrari.trabalheconoscobackenddev.listener.exception; + +public class IdNotFoundException extends RuntimeException { + + public IdNotFoundException(String message) { + super(message); + } +} diff --git a/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/listener/resource/KafkaMessage.java b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/listener/resource/KafkaMessage.java new file mode 100644 index 000000000..341545f89 --- /dev/null +++ b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/listener/resource/KafkaMessage.java @@ -0,0 +1,9 @@ +package br.com.vtferrari.trabalheconoscobackenddev.listener.resource; + +import lombok.Data; + +@Data +public class KafkaMessage { + + private String payload; +} diff --git a/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/repository/RelevancyRepository.java b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/repository/RelevancyRepository.java new file mode 100644 index 000000000..59a46fc71 --- /dev/null +++ b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/repository/RelevancyRepository.java @@ -0,0 +1,7 @@ +package br.com.vtferrari.trabalheconoscobackenddev.repository; + +import br.com.vtferrari.trabalheconoscobackenddev.repository.model.RelevancyElasticsearch; +import org.springframework.data.elasticsearch.repository.ElasticsearchRepository; + +public interface RelevancyRepository extends ElasticsearchRepository { +} diff --git a/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/repository/UserRepository.java b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/repository/UserRepository.java new file mode 100644 index 000000000..24cba96b4 --- /dev/null +++ b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/repository/UserRepository.java @@ -0,0 +1,8 @@ +package br.com.vtferrari.trabalheconoscobackenddev.repository; + +import br.com.vtferrari.trabalheconoscobackenddev.repository.customized.CustomizedUserRepository; +import br.com.vtferrari.trabalheconoscobackenddev.repository.model.UserElasticsearch; +import org.springframework.data.elasticsearch.repository.ElasticsearchRepository; + +public interface UserRepository extends ElasticsearchRepository, CustomizedUserRepository { +} diff --git a/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/repository/converter/RelevancyElasticsearchConverter.java b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/repository/converter/RelevancyElasticsearchConverter.java new file mode 100644 index 000000000..df24bf17d --- /dev/null +++ b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/repository/converter/RelevancyElasticsearchConverter.java @@ -0,0 +1,18 @@ +package br.com.vtferrari.trabalheconoscobackenddev.repository.converter; + + +import br.com.vtferrari.trabalheconoscobackenddev.service.domain.Relevancy; +import br.com.vtferrari.trabalheconoscobackenddev.repository.model.RelevancyElasticsearch; +import org.springframework.stereotype.Component; + +@Component +public class RelevancyElasticsearchConverter { + + public RelevancyElasticsearch convert(Relevancy relevancy) { + return RelevancyElasticsearch + .builder() + .id(relevancy.getId()) + .priorityLevel(relevancy.getPriorityLevel().getLevel()) + .build(); + } +} diff --git a/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/repository/converter/UserConverter.java b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/repository/converter/UserConverter.java new file mode 100644 index 000000000..3919fe1eb --- /dev/null +++ b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/repository/converter/UserConverter.java @@ -0,0 +1,19 @@ +package br.com.vtferrari.trabalheconoscobackenddev.repository.converter; + +import br.com.vtferrari.trabalheconoscobackenddev.repository.model.UserElasticsearch; +import br.com.vtferrari.trabalheconoscobackenddev.service.domain.User; +import org.springframework.stereotype.Component; + +@Component +public class UserConverter { + + public User convert(UserElasticsearch userElasticsearch) { + + return User + .builder() + .id(userElasticsearch.getId()) + .name(userElasticsearch.getName()) + .username(userElasticsearch.getUsername()) + .build(); + } +} diff --git a/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/repository/customized/CustomizedUserRepository.java b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/repository/customized/CustomizedUserRepository.java new file mode 100644 index 000000000..f0c16397b --- /dev/null +++ b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/repository/customized/CustomizedUserRepository.java @@ -0,0 +1,10 @@ +package br.com.vtferrari.trabalheconoscobackenddev.repository.customized; + +import br.com.vtferrari.trabalheconoscobackenddev.repository.model.UserElasticsearch; +import org.springframework.data.domain.Page; + +public interface CustomizedUserRepository { + + Page findDistinctByKeyword(String keyword, Integer page, Integer size); + +} diff --git a/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/repository/customized/CustomizedUserRepositoryImpl.java b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/repository/customized/CustomizedUserRepositoryImpl.java new file mode 100644 index 000000000..89091f1fc --- /dev/null +++ b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/repository/customized/CustomizedUserRepositoryImpl.java @@ -0,0 +1,67 @@ +package br.com.vtferrari.trabalheconoscobackenddev.repository.customized; + +import br.com.vtferrari.trabalheconoscobackenddev.repository.model.UserElasticsearch; +import com.github.vanroy.springdata.jest.JestElasticsearchTemplate; +import lombok.AllArgsConstructor; +import org.elasticsearch.common.lucene.search.function.CombineFunction; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder.FilterFunctionBuilder; +import org.elasticsearch.index.query.functionscore.WeightBuilder; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilder; +import org.springframework.stereotype.Repository; + +import static org.elasticsearch.index.query.QueryBuilders.boolQuery; +import static org.elasticsearch.index.query.QueryBuilders.fuzzyQuery; + +@Repository +@AllArgsConstructor +public class CustomizedUserRepositoryImpl implements CustomizedUserRepository { + + private final JestElasticsearchTemplate jestElasticsearchTemplate; + + @Override + public Page findDistinctByKeyword(String keyword, Integer page, Integer size) { + + final var nativeSearchQuery = + new NativeSearchQueryBuilder() + .withQuery(getQueryBuilder(keyword)) + .withPageable(PageRequest.of(page, size)) + .build(); + return jestElasticsearchTemplate.queryForPage(nativeSearchQuery, UserElasticsearch.class); + } + + private QueryBuilder getQueryBuilder(String keyword) { + + final var should = boolQuery() + .should(fuzzyQuery("name", keyword)) + .should(fuzzyQuery("name", keyword)); + return getQueryWithWeight(should); + } + + private QueryBuilder getQueryWithWeight(QueryBuilder queryBuilder) { + + return QueryBuilders.functionScoreQuery(queryBuilder, getFilterFunctionBuilders()) + .boostMode(CombineFunction.MULTIPLY); + } + + private FilterFunctionBuilder[] getFilterFunctionBuilders() { + return new FilterFunctionBuilder[]{ + getPriority(0, 100), + getPriority(1, 50) + }; + } + + private FilterFunctionBuilder getPriority(int priority, int weight) { + return new FilterFunctionBuilder(QueryBuilders.termQuery("priority", priority), getWeightBuilder(weight)); + } + + private WeightBuilder getWeightBuilder(Integer weight) { + final WeightBuilder weightBuilder = new WeightBuilder(); + weightBuilder.setWeight(weight); + return weightBuilder; + } + +} diff --git a/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/repository/model/RelevancyElasticsearch.java b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/repository/model/RelevancyElasticsearch.java new file mode 100644 index 000000000..e640c0f38 --- /dev/null +++ b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/repository/model/RelevancyElasticsearch.java @@ -0,0 +1,16 @@ +package br.com.vtferrari.trabalheconoscobackenddev.repository.model; + + +import lombok.Builder; +import lombok.Data; +import org.springframework.data.annotation.Id; +import org.springframework.data.elasticsearch.annotations.Document; + +@Data +@Builder +@Document(indexName = "trabalhe-conosco-backend-dev.relevancy") +public class RelevancyElasticsearch { + @Id + private String id; + private int priorityLevel; +} diff --git a/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/repository/model/UserElasticsearch.java b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/repository/model/UserElasticsearch.java new file mode 100644 index 000000000..e436b6ae9 --- /dev/null +++ b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/repository/model/UserElasticsearch.java @@ -0,0 +1,16 @@ +package br.com.vtferrari.trabalheconoscobackenddev.repository.model; + +import lombok.Data; +import org.springframework.data.annotation.Id; +import org.springframework.data.elasticsearch.annotations.Document; + +@Data +@Document(indexName = "trabalhe-conosco-backend-dev.users", type = "user") +public class UserElasticsearch { + @Id + private String id; + private String name; + private String username; + private int priority; +} + diff --git a/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/service/FilterUserService.java b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/service/FilterUserService.java new file mode 100644 index 000000000..27d766a01 --- /dev/null +++ b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/service/FilterUserService.java @@ -0,0 +1,8 @@ +package br.com.vtferrari.trabalheconoscobackenddev.service; + +import br.com.vtferrari.trabalheconoscobackenddev.service.domain.User; +import org.springframework.data.domain.Page; + +public interface FilterUserService { + Page findUserByKeyword(String keyword, Integer page, Integer size); +} diff --git a/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/service/RelevancyService.java b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/service/RelevancyService.java new file mode 100644 index 000000000..b134f8f45 --- /dev/null +++ b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/service/RelevancyService.java @@ -0,0 +1,8 @@ +package br.com.vtferrari.trabalheconoscobackenddev.service; + +import br.com.vtferrari.trabalheconoscobackenddev.service.domain.Relevancy; + +public interface RelevancyService { + + void save(Relevancy relevancy); +} diff --git a/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/service/domain/PriorityLevel.java b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/service/domain/PriorityLevel.java new file mode 100644 index 000000000..bfa4d120c --- /dev/null +++ b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/service/domain/PriorityLevel.java @@ -0,0 +1,23 @@ +package br.com.vtferrari.trabalheconoscobackenddev.service.domain; + +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; + +import java.util.stream.Stream; + +@NoArgsConstructor +@AllArgsConstructor +public enum PriorityLevel { + HIGH(0),LOW(1),ERROR(Integer.MAX_VALUE); + + @Getter + private Integer level; + + public static PriorityLevel fromLevel(Integer level) { + return Stream.of(PriorityLevel.values()) + .filter(priorityLevel -> priorityLevel.level == level) + .findFirst() + .orElse(PriorityLevel.ERROR); + } +} diff --git a/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/service/domain/Relevancy.java b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/service/domain/Relevancy.java new file mode 100644 index 000000000..78dc2ff39 --- /dev/null +++ b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/service/domain/Relevancy.java @@ -0,0 +1,11 @@ +package br.com.vtferrari.trabalheconoscobackenddev.service.domain; + +import lombok.Builder; +import lombok.Getter; + +@Getter +@Builder +public class Relevancy { + private String id; + private PriorityLevel priorityLevel; +} diff --git a/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/service/domain/User.java b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/service/domain/User.java new file mode 100644 index 000000000..4c522d3fc --- /dev/null +++ b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/service/domain/User.java @@ -0,0 +1,15 @@ +package br.com.vtferrari.trabalheconoscobackenddev.service.domain; + +import lombok.Builder; +import lombok.Data; +import org.springframework.data.annotation.Id; + +@Data +@Builder +public class User { + @Id + private String id; + private String name; + private String username; +} + diff --git a/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/service/impl/ElasticSearchRelevancyService.java b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/service/impl/ElasticSearchRelevancyService.java new file mode 100644 index 000000000..81a022973 --- /dev/null +++ b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/service/impl/ElasticSearchRelevancyService.java @@ -0,0 +1,47 @@ +package br.com.vtferrari.trabalheconoscobackenddev.service.impl; + +import br.com.vtferrari.trabalheconoscobackenddev.listener.exception.IdNotFoundException; +import br.com.vtferrari.trabalheconoscobackenddev.repository.RelevancyRepository; +import br.com.vtferrari.trabalheconoscobackenddev.repository.UserRepository; +import br.com.vtferrari.trabalheconoscobackenddev.repository.converter.RelevancyElasticsearchConverter; +import br.com.vtferrari.trabalheconoscobackenddev.repository.model.RelevancyElasticsearch; +import br.com.vtferrari.trabalheconoscobackenddev.repository.model.UserElasticsearch; +import br.com.vtferrari.trabalheconoscobackenddev.service.RelevancyService; +import br.com.vtferrari.trabalheconoscobackenddev.service.domain.Relevancy; +import lombok.AllArgsConstructor; +import org.springframework.stereotype.Service; + +import java.util.Optional; + +@Service +@AllArgsConstructor +public class ElasticSearchRelevancyService implements RelevancyService { + + private final RelevancyRepository relevancyRepository; + private final UserRepository userRepository; + private final RelevancyElasticsearchConverter relevancyElasticsearchConverter; + + @Override + public void save(Relevancy relevancy) { + Optional + .ofNullable(relevancyElasticsearchConverter.convert(relevancy)) + .map(this::updateUser) + .ifPresent(relevancyRepository::save); + + } + + private RelevancyElasticsearch updateUser(RelevancyElasticsearch relevancyElasticsearch) { + userRepository.findById(relevancyElasticsearch.getId()) + .ifPresentOrElse( + userElasticsearch -> getUpdateWithPriorityLevel(userElasticsearch, relevancyElasticsearch), + () -> { + throw new IdNotFoundException("Id not found"); + }); + return relevancyElasticsearch; + } + + private UserElasticsearch getUpdateWithPriorityLevel(UserElasticsearch userElasticsearch, RelevancyElasticsearch relevancyElasticsearch) { + userElasticsearch.setPriority(relevancyElasticsearch.getPriorityLevel()); + return userRepository.save(userElasticsearch); + } +} diff --git a/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/service/impl/FilterUserServiceImpl.java b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/service/impl/FilterUserServiceImpl.java new file mode 100644 index 000000000..ee3370f89 --- /dev/null +++ b/src/main/java/br/com/vtferrari/trabalheconoscobackenddev/service/impl/FilterUserServiceImpl.java @@ -0,0 +1,24 @@ +package br.com.vtferrari.trabalheconoscobackenddev.service.impl; + +import br.com.vtferrari.trabalheconoscobackenddev.repository.UserRepository; +import br.com.vtferrari.trabalheconoscobackenddev.repository.converter.UserConverter; +import br.com.vtferrari.trabalheconoscobackenddev.service.FilterUserService; +import br.com.vtferrari.trabalheconoscobackenddev.service.domain.User; +import lombok.AllArgsConstructor; +import org.springframework.data.domain.Page; +import org.springframework.stereotype.Service; + +@Service +@AllArgsConstructor +public class FilterUserServiceImpl implements FilterUserService { + + private final UserRepository userRepository; + private final UserConverter userConverter; + + @Override + public Page findUserByKeyword(String keyword, Integer page, Integer size) { + + return userRepository.findDistinctByKeyword(keyword, page, size) + .map(userConverter::convert); + } +} diff --git a/src/main/resources/application.yml b/src/main/resources/application.yml new file mode 100644 index 000000000..5b350e239 --- /dev/null +++ b/src/main/resources/application.yml @@ -0,0 +1,15 @@ +spring: + data: + jest: + uri: http://localhost:9200 + kafka: + consumer: + auto-offset-reset: earliest + group-id: ${random.uuid} + enable-auto-commit: false + listener: + ack-mode: manual_immediate + security: + user: + name: admin + password: admin \ No newline at end of file diff --git a/src/main/resources/static/index.html b/src/main/resources/static/index.html new file mode 100644 index 000000000..51225e182 --- /dev/null +++ b/src/main/resources/static/index.html @@ -0,0 +1,117 @@ + + + + + + + + React Local + + + + + + + + + + + +
+
+
Vinícius Temóteo Ferrari
+ +
+
+
+ + + + + \ No newline at end of file diff --git a/src/test/java/br/com/vtferrari/trabalheconoscobackenddev/listener/RelevancyListenerTest.java b/src/test/java/br/com/vtferrari/trabalheconoscobackenddev/listener/RelevancyListenerTest.java new file mode 100644 index 000000000..741aba28f --- /dev/null +++ b/src/test/java/br/com/vtferrari/trabalheconoscobackenddev/listener/RelevancyListenerTest.java @@ -0,0 +1,48 @@ +package br.com.vtferrari.trabalheconoscobackenddev.listener; + +import br.com.vtferrari.trabalheconoscobackenddev.listener.converter.KafkaMessageConverter; +import br.com.vtferrari.trabalheconoscobackenddev.listener.converter.RelevancyConverter; +import br.com.vtferrari.trabalheconoscobackenddev.listener.resource.KafkaMessage; +import br.com.vtferrari.trabalheconoscobackenddev.service.RelevancyService; +import br.com.vtferrari.trabalheconoscobackenddev.service.domain.Relevancy; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +@RunWith(MockitoJUnitRunner.class) +public class RelevancyListenerTest { + + @InjectMocks + private RelevancyListener relevancyListener; + @Mock + private KafkaMessageConverter kafkaMessageConverter; + @Mock + private RelevancyConverter relevancyConverter; + @Mock + private RelevancyService relevancyService; + + @Before + public void setup() { + } + + + @Test + public void testShouldSaveMessageOnDatabase() throws Exception { + + when(kafkaMessageConverter.convert(anyString())).thenReturn(new KafkaMessage()); + when(relevancyConverter.convert(any(KafkaMessage.class), anyInt())).thenReturn(Relevancy.builder().build()); + doNothing().when(relevancyService).save(any(Relevancy.class)); + + relevancyListener.relevantListOne("message"); + + verify(kafkaMessageConverter).convert(anyString()); + verify(relevancyConverter).convert(any(KafkaMessage.class), anyInt()); + verify(relevancyService).save(any(Relevancy.class)); + } +} \ No newline at end of file diff --git a/src/test/java/br/com/vtferrari/trabalheconoscobackenddev/listener/converter/KafkaMessageConverterTest.java b/src/test/java/br/com/vtferrari/trabalheconoscobackenddev/listener/converter/KafkaMessageConverterTest.java new file mode 100644 index 000000000..40c89a837 --- /dev/null +++ b/src/test/java/br/com/vtferrari/trabalheconoscobackenddev/listener/converter/KafkaMessageConverterTest.java @@ -0,0 +1,25 @@ +package br.com.vtferrari.trabalheconoscobackenddev.listener.converter; + +import br.com.vtferrari.trabalheconoscobackenddev.listener.resource.KafkaMessage; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +public class KafkaMessageConverterTest { + private KafkaMessageConverter kafkaMessageConverter; + + @Before + public void setup() { + kafkaMessageConverter = new KafkaMessageConverter(new ObjectMapper()); + } + + @Test + public void testShouldConvert() { + + final KafkaMessage result = kafkaMessageConverter.convert("{\"payload\":\"new message\"}"); + assertEquals("new message", result.getPayload()); + } + +} \ No newline at end of file diff --git a/src/test/java/br/com/vtferrari/trabalheconoscobackenddev/listener/converter/RelevancyConverterTest.java b/src/test/java/br/com/vtferrari/trabalheconoscobackenddev/listener/converter/RelevancyConverterTest.java new file mode 100644 index 000000000..4d920082b --- /dev/null +++ b/src/test/java/br/com/vtferrari/trabalheconoscobackenddev/listener/converter/RelevancyConverterTest.java @@ -0,0 +1,31 @@ +package br.com.vtferrari.trabalheconoscobackenddev.listener.converter; + +import br.com.vtferrari.trabalheconoscobackenddev.listener.resource.KafkaMessage; +import br.com.vtferrari.trabalheconoscobackenddev.service.domain.PriorityLevel; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +public class RelevancyConverterTest { + + private RelevancyConverter relevancyConverter; + + @Before + public void setup() { + relevancyConverter = new RelevancyConverter(); + } + + + @Test + public void testShouldConvertKafkaMessageInRelevancy() { + + final var spec = new KafkaMessage(); + spec.setPayload("teste"); + final var result = relevancyConverter.convert(spec, 0); + assertEquals(spec.getPayload(), result.getId()); + assertEquals(PriorityLevel.HIGH, result.getPriorityLevel()); + + } + +} \ No newline at end of file diff --git a/src/test/java/br/com/vtferrari/trabalheconoscobackenddev/repository/converter/RelevancyElasticsearchConverterTest.java b/src/test/java/br/com/vtferrari/trabalheconoscobackenddev/repository/converter/RelevancyElasticsearchConverterTest.java new file mode 100644 index 000000000..f15ea0745 --- /dev/null +++ b/src/test/java/br/com/vtferrari/trabalheconoscobackenddev/repository/converter/RelevancyElasticsearchConverterTest.java @@ -0,0 +1,34 @@ +package br.com.vtferrari.trabalheconoscobackenddev.repository.converter; + +import br.com.vtferrari.trabalheconoscobackenddev.repository.model.RelevancyElasticsearch; +import br.com.vtferrari.trabalheconoscobackenddev.service.domain.PriorityLevel; +import br.com.vtferrari.trabalheconoscobackenddev.service.domain.Relevancy; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.*; + +public class RelevancyElasticsearchConverterTest { + + private RelevancyElasticsearchConverter relevancyElasticsearchConverter; + + @Before + public void setup(){ + relevancyElasticsearchConverter=new RelevancyElasticsearchConverter(); + } + + @Test + public void testShouldConvert(){ + final var spec = Relevancy + .builder() + .id("new id") + .priorityLevel(PriorityLevel.LOW) + .build(); + final var result = relevancyElasticsearchConverter.convert(spec); + + assertEquals(spec.getId(),result.getId()); + assertEquals(spec.getPriorityLevel().getLevel().intValue(),result.getPriorityLevel()); + } + + +} \ No newline at end of file diff --git a/src/test/java/br/com/vtferrari/trabalheconoscobackenddev/repository/converter/UserConverterTest.java b/src/test/java/br/com/vtferrari/trabalheconoscobackenddev/repository/converter/UserConverterTest.java new file mode 100644 index 000000000..151df6f50 --- /dev/null +++ b/src/test/java/br/com/vtferrari/trabalheconoscobackenddev/repository/converter/UserConverterTest.java @@ -0,0 +1,34 @@ +package br.com.vtferrari.trabalheconoscobackenddev.repository.converter; + +import br.com.vtferrari.trabalheconoscobackenddev.repository.model.UserElasticsearch; +import br.com.vtferrari.trabalheconoscobackenddev.service.domain.User; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.*; + +public class UserConverterTest { + + private UserConverter userConverter; + + @Before + public void setup(){ + userConverter = new UserConverter(); + } + + @Test + public void testShouldConvert(){ + final UserElasticsearch spec = new UserElasticsearch(); + spec.setPriority(1); + spec.setId("new id"); + spec.setName("test"); + spec.setUsername("test"); + + final User result = userConverter.convert(spec); + + assertEquals(spec.getId(),result.getId()); + assertEquals(spec.getName(),result.getName()); + assertEquals(spec.getUsername(),result.getUsername()); + } + +} \ No newline at end of file diff --git a/src/test/java/br/com/vtferrari/trabalheconoscobackenddev/repository/customized/CustomizedUserRepositoryImplTest.java b/src/test/java/br/com/vtferrari/trabalheconoscobackenddev/repository/customized/CustomizedUserRepositoryImplTest.java new file mode 100644 index 000000000..ea88cbafd --- /dev/null +++ b/src/test/java/br/com/vtferrari/trabalheconoscobackenddev/repository/customized/CustomizedUserRepositoryImplTest.java @@ -0,0 +1,30 @@ +package br.com.vtferrari.trabalheconoscobackenddev.repository.customized; + +import br.com.vtferrari.trabalheconoscobackenddev.repository.model.UserElasticsearch; +import com.github.vanroy.springdata.jest.JestElasticsearchTemplate; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; +import org.springframework.data.elasticsearch.core.query.NativeSearchQuery; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.verify; + +@RunWith(MockitoJUnitRunner.class) +public class CustomizedUserRepositoryImplTest { + @InjectMocks + private CustomizedUserRepositoryImpl customizedUserRepository; + @Mock + private JestElasticsearchTemplate jestElasticsearchTemplate; + + @Test + public void testShouldCreateAQuery() { + + customizedUserRepository.findDistinctByKeyword("alfred", 0, 10); + + verify(jestElasticsearchTemplate).queryForPage(any(NativeSearchQuery.class), eq(UserElasticsearch.class)); + } +} \ No newline at end of file diff --git a/src/test/java/br/com/vtferrari/trabalheconoscobackenddev/service/domain/PriorityLevelTest.java b/src/test/java/br/com/vtferrari/trabalheconoscobackenddev/service/domain/PriorityLevelTest.java new file mode 100644 index 000000000..6759e76f3 --- /dev/null +++ b/src/test/java/br/com/vtferrari/trabalheconoscobackenddev/service/domain/PriorityLevelTest.java @@ -0,0 +1,24 @@ +package br.com.vtferrari.trabalheconoscobackenddev.service.domain; + +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +public class PriorityLevelTest { + + @Test + public void testShouldGetHighPriority() { + assertEquals(PriorityLevel.HIGH, PriorityLevel.fromLevel(0)); + } + + @Test + public void testShouldGetLowPriority() { + assertEquals(PriorityLevel.LOW, PriorityLevel.fromLevel(1)); + } + + @Test + public void testShouldGetERRORPriority() { + assertEquals(PriorityLevel.ERROR, PriorityLevel.fromLevel(5461235)); + } + +} \ No newline at end of file diff --git a/src/test/java/br/com/vtferrari/trabalheconoscobackenddev/service/impl/ElasticSearchRelevancyServiceTest.java b/src/test/java/br/com/vtferrari/trabalheconoscobackenddev/service/impl/ElasticSearchRelevancyServiceTest.java new file mode 100644 index 000000000..8042beb3a --- /dev/null +++ b/src/test/java/br/com/vtferrari/trabalheconoscobackenddev/service/impl/ElasticSearchRelevancyServiceTest.java @@ -0,0 +1,57 @@ +package br.com.vtferrari.trabalheconoscobackenddev.service.impl; + +import br.com.vtferrari.trabalheconoscobackenddev.repository.RelevancyRepository; +import br.com.vtferrari.trabalheconoscobackenddev.repository.UserRepository; +import br.com.vtferrari.trabalheconoscobackenddev.repository.converter.RelevancyElasticsearchConverter; +import br.com.vtferrari.trabalheconoscobackenddev.repository.model.UserElasticsearch; +import br.com.vtferrari.trabalheconoscobackenddev.service.domain.PriorityLevel; +import br.com.vtferrari.trabalheconoscobackenddev.service.domain.Relevancy; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +import java.util.Optional; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.*; + +@RunWith(MockitoJUnitRunner.class) +public class ElasticSearchRelevancyServiceTest { + + @InjectMocks + private ElasticSearchRelevancyService elasticSearchRelevancyService; + @Mock + private RelevancyRepository relevancyRepository; + @Mock + private UserRepository userRepository; + @Mock + private RelevancyElasticsearchConverter relevancyElasticsearchConverter; + + @Test + public void testShouldSearchByNullId() { + final Relevancy spec = Relevancy.builder().build(); + + elasticSearchRelevancyService.save(spec); + verify(relevancyElasticsearchConverter,times(1)).convert(any()); + verify(userRepository,never()).findById(any()); + verify(relevancyRepository,never()).save(any()); + + } + + @Test + public void testShouldSearchById() { + final Relevancy spec = Relevancy.builder().priorityLevel(PriorityLevel.ERROR).id("Test").build(); + when(relevancyElasticsearchConverter.convert(any(Relevancy.class))).thenCallRealMethod(); + when(userRepository.findById(any())).thenReturn(Optional.of(new UserElasticsearch())); + + + elasticSearchRelevancyService.save(spec); + + verify(relevancyElasticsearchConverter,times(1)).convert(any()); + verify(userRepository,times(1)).findById(any()); + verify(relevancyRepository,times(1)).save(any()); + + } +} \ No newline at end of file diff --git a/src/test/java/br/com/vtferrari/trabalheconoscobackenddev/service/impl/FilterUserServiceImplTest.java b/src/test/java/br/com/vtferrari/trabalheconoscobackenddev/service/impl/FilterUserServiceImplTest.java new file mode 100644 index 000000000..5ccd242d9 --- /dev/null +++ b/src/test/java/br/com/vtferrari/trabalheconoscobackenddev/service/impl/FilterUserServiceImplTest.java @@ -0,0 +1,45 @@ +package br.com.vtferrari.trabalheconoscobackenddev.service.impl; + +import br.com.vtferrari.trabalheconoscobackenddev.repository.UserRepository; +import br.com.vtferrari.trabalheconoscobackenddev.repository.converter.UserConverter; +import br.com.vtferrari.trabalheconoscobackenddev.repository.model.UserElasticsearch; +import br.com.vtferrari.trabalheconoscobackenddev.service.domain.Relevancy; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; +import org.springframework.data.domain.PageImpl; + +import java.util.List; +import java.util.Optional; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.*; + +@RunWith(MockitoJUnitRunner.class) +public class FilterUserServiceImplTest { + + + @InjectMocks + private FilterUserServiceImpl elasticSearchRelevancyService; + @Mock + private UserRepository userRepository; + @Mock + private UserConverter userConverter; + @Test + public void testShouldSearchByNullAKeyword() { + final Relevancy spec = Relevancy.builder().build(); + + when(userRepository.findDistinctByKeyword(anyString(),anyInt(),anyInt())).thenReturn(new PageImpl<>(List.of(new UserElasticsearch()))); + when(userConverter.convert(any(UserElasticsearch.class))).thenCallRealMethod(); + + + elasticSearchRelevancyService.findUserByKeyword("test",0,10); + + verify(userConverter,times(1)).convert(any()); + verify(userRepository,times(1)).findDistinctByKeyword(anyString(),anyInt(),anyInt()); + + } + +} \ No newline at end of file diff --git a/start.sh b/start.sh new file mode 100755 index 000000000..1d81d4880 --- /dev/null +++ b/start.sh @@ -0,0 +1,32 @@ +#!/bin/bash + +OS=`uname -s` +REV=`uname -r` +MACH=`uname -m` + + +if [ "${OS}" = "Linux" ]; then + docker_host_ip=$(hostname -I | awk '{print $1}') + export DOCKER_HOST_IP=$docker_host_ip +elif [ "${OS}" = "Darwin" ]; then + if [[ -z "${DOCKER_HOST_IP-}" ]]; then + docker_host_ip=$(docker run --rm --net host alpine ip address show eth0 | awk '$1=="inet" {print $2}' | cut -f1 -d'/') + if [[ $docker_host_ip = '192.168.65.2' ]]; then + docker_host_ip=$(/sbin/ifconfig | grep -v '127.0.0.1' | awk '$1=="inet" {print $2}' | cut -f1 -d'/' | head -n 1) + fi + export DOCKER_HOST_IP=$docker_host_ip + fi +fi +echo 'Getting ip: '$DOCKER_HOST_IP +docker-compose -f ./infra/docker-compose.yml down +docker-compose -f ./infra/docker-compose.yml up -d --build zookeeper kafka + +docker-compose -f ./infra/docker-compose.yml up -d --build elasticsearch +echo "Wait for ElasticSearch" +bash -c 'while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' localhost:9200)" != "200" ]]; do sleep 1; echo "#"; done; echo "done"' + +docker-compose -f ./infra/docker-compose.yml up -d --build connect + +docker run -it --rm --name java -v "$(pwd)":/usr/src/app -w /usr/src/app maven:slim mvn clean install + +docker-compose -f ./infra/docker-compose.yml up -d --build app