Skip to content

Commit

Permalink
Merge pull request #21 from osstotalsoft/feature/docker-compose
Browse files Browse the repository at this point in the history
Feature/docker compose
  • Loading branch information
niemenX authored Mar 31, 2023
2 parents 02be392 + 86f78fe commit a560745
Show file tree
Hide file tree
Showing 22 changed files with 3,544 additions and 2,882 deletions.
131 changes: 131 additions & 0 deletions docker-compose-es6.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,131 @@
version: '2.3'

services:
conductor-server:
environment:
- CONFIG_PROP=config-local.properties
- ES_VERSION=6
- INDEXING_ENABLED=true
- "-Dpolyglot.engine.WarnInterpreterOnly=false"
image: niemen/conductor-server:6.2.5
networks:
- internal
ports:
- 8080:8080
- 8090:8090
healthcheck:
test: ["CMD", "curl","-I" ,"-XGET", "http://localhost:8080/health"]
interval: 60s
timeout: 30s
retries: 12
links:
- elasticsearch:es
depends_on:
redis:
condition: service_healthy
elasticsearch:
condition: service_healthy
logging:
driver: "json-file"
options:
max-size: "1k"
max-file: "3"

conductor-ui:
environment:
- WF_SERVER=http://conductor-server:8080
image: niemen/conductor-ui:4.0.0
networks:
- internal
ports:
- 5001:5000

redis:
image: redis:6.2-alpine
restart: always
networks:
- internal
ports:
- '6379:6379'
command: redis-server --save 20 1 --loglevel warning
healthcheck:
test: [ "CMD", "redis-cli", "--raw", "incr", "ping" ]
interval: 10s
timeout: 10s
retries: 12
volumes:
- redis:/data

elasticsearch:
image: elasticsearch:6.8.23
environment:
- "ES_JAVA_OPTS=-Xms512m -Xmx1024m"
- transport.host=0.0.0.0
- discovery.type=single-node
- xpack.security.enabled=false
volumes:
- es6data-conductor:/usr/share/elasticsearch/data
networks:
- internal
ports:
- 9200:9200
- 9300:9300
healthcheck:
test: timeout 5 bash -c 'cat < /dev/null > /dev/tcp/localhost/9300'
interval: 5s
timeout: 5s
retries: 12
logging:
driver: "json-file"
options:
max-size: "1k"
max-file: "3"

nats:
image: nats-streaming:latest
networks:
- internal
ports:
- 4222:4222
- 8222:8222

schellar:
profiles: ["scheduler"]
image: flaviostutz/schellar
environment:
- CONDUCTOR_API_URL=http://conductor-server:8080/api
- MONGO_ADDRESS=mongo
- MONGO_USERNAME=root
- MONGO_PASSWORD=root
- LOG_LEVEL=info
networks:
- internal
ports:
- 3001:3000
depends_on:
- "conductor-server"
logging:
driver: "json-file"
options:
max-size: "20MB"
max-file: "5"

mongo:
image: mongo:4.1.10
profiles: ["scheduler"]
networks:
- internal
environment:
- MONGO_INITDB_ROOT_USERNAME=root
- MONGO_INITDB_ROOT_PASSWORD=root
ports:
- 27017-27019:27017-27019

volumes:
es6data-conductor:
driver: local
redis:
driver: local

networks:
internal:
29 changes: 24 additions & 5 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,10 @@ services:
conductor-server:
environment:
- CONFIG_PROP=config-local.properties
- ES_VERSION=7
- INDEXING_ENABLED=true
image: niemen/conductor-server:7.1.3
- JAVA_OPTS=-Dpolyglot.engine.WarnInterpreterOnly=false
image: niemen/conductor-server:7.1.5
networks:
- internal
ports:
Expand All @@ -28,6 +30,15 @@ services:
options:
max-size: "1k"
max-file: "3"

conductor-ui:
environment:
- WF_SERVER=http://conductor-server:8080
image: niemen/conductor-ui:4.0.0
networks:
- internal
ports:
- 5001:5000

redis:
image: redis:6.2-alpine
Expand All @@ -38,15 +49,15 @@ services:
- '6379:6379'
command: redis-server --save 20 1 --loglevel warning
healthcheck:
test: ["CMD", "redis-cli","ping"]
interval: 5s
timeout: 5s
test: [ "CMD", "redis-cli", "--raw", "incr", "ping" ]
interval: 10s
timeout: 10s
retries: 12
volumes:
- redis:/data

elasticsearch:
image: elasticsearch:7.17.8
image: elasticsearch:7.17.9
environment:
- "ES_JAVA_OPTS=-Xms512m -Xmx1024m"
- transport.host=0.0.0.0
Expand All @@ -70,6 +81,14 @@ services:
max-size: "1k"
max-file: "3"

nats:
image: nats-streaming:latest
networks:
- internal
ports:
- 4222:4222
- 8222:8222

schellar:
profiles: ["scheduler"]
image: flaviostutz/schellar
Expand Down
4 changes: 4 additions & 0 deletions gql-bff/.env
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ IDENTITY_AUTHORITY=
ELASTIC_SEARCH_HOST=http://localhost:9200
ELASTIC_USER=
ELASTIC_PASSWORD=
ELASTIC_VERSION=6
ELASTIC_PREFIX=atlas

# DEBUG, ERROR, INFO
APOLLO_LOGGING_LEVEL=ERROR
Expand All @@ -13,3 +15,5 @@ BASE_API_URL=http://localhost:8080
IS_MULTITENANT=false

SCHEDULE_URL=http://localhost:3001
#DEBUG=jwks

2 changes: 2 additions & 0 deletions gql-bff/helm/atlas-gql-mesh/values.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,8 @@ gql:
ELASTIC_SEARCH_API_VERSION: "[ELASTIC_SEARCH_API_VERSION]"
IDENTITY_AUTHORITY: "[IDENTITY_AUTHORITY_URL]"
APOLLO_LOGGING_LEVEL: "[APOLLO_LOGGING_LEVEL]"
ELASTIC_VERSION: 6
ELASTIC_PREFIX: "atlas"
API_URL: "[API_URL]"
BASE_API_URL: "[BASE_API_URL]"
PORT: "4000"
Expand Down
7 changes: 4 additions & 3 deletions gql-bff/package.json
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
{
"private": true,
"scripts": {
"start": "cross-env NODE_ENV=development node src/index.js",
"start": "cross-env NODE_ENV=development node src/index.js",
"start:production": "cross-env NODE_ENV=production node src/index.js"
},
"dependencies": {
"@elastic/elasticsearch": "^6.8.8",
"es6": "npm:@elastic/elasticsearch@^6.8.8",
"es8": "npm:@elastic/elasticsearch@^8.6.0",
"@graphql-mesh/cli": "^0.29.12",
"@graphql-mesh/openapi": "0.16.9",
"@koa/cors": "^3.1.0",
Expand All @@ -22,7 +23,7 @@
"humps": "^2.0.1",
"i": "^0.3.6",
"jsonwebtoken": "^8.5.1",
"jwks-rsa": "^2.0.3",
"jwks-rsa": "^3.0.1",
"koa": "^2.13.1",
"koa-bodyparser": "^4.3.0",
"koa-graphql": "^0.8.0",
Expand Down
77 changes: 31 additions & 46 deletions gql-bff/src/elasticSearch/client.js
Original file line number Diff line number Diff line change
@@ -1,50 +1,35 @@
const { Client } = require("@elastic/elasticsearch");
const { snapshots_body, logs_body } = require("./config");
const { elasticLogs, workflowSnapshots } = require("./registeredIndexes");
const { ELASTIC_VERSION } = process.env;

const { ELASTIC_SEARCH_HOST, ELASTIC_USER, ELASTIC_PASSWORD } = process.env;
if (+ELASTIC_VERSION === 6) {
const {
esSchema,
esClient,
initElastic,
search,
seed,
} = require("./clientes6");

const esSchema = {
size: 1000,
from: 0,
track_total_hits: false,
query: {
match_all: {},
},
};
module.exports = {
esSchema,
esClient,
initElastic,
search,
seed,
};
} else {
const {
esSchema,
esClient,
initElastic,
search,
seed,
} = require("./clientes8");

const esClient = new Client({
node: ELASTIC_SEARCH_HOST,
auth: {
username: ELASTIC_USER,
password: ELASTIC_PASSWORD,
},
pingTimeout: 60000,
});

async function initializeIndex(indexName, body) {
const { body: exists } = await esClient.indices.exists({ index: indexName });
if (exists) console.log(`index "${indexName}" already created`);
else {
console.log(`index "${indexName}" is missing. Creating...`);
await esClient.indices.create({
index: indexName,
body,
});
console.log(`index "${indexName}" created`);
}
module.exports = {
esSchema,
esClient,
initElastic,
seed,
search,
};
}

esClient.ping(function (error) {
if (error) throw new TypeError("ElasticSearch cluster is down!");
else {
initializeIndex(workflowSnapshots.index, snapshots_body);
initializeIndex(elasticLogs.index, logs_body);
console.log("ElasticSearch is ok");
}
});

module.exports = {
esSchema,
esClient,
};
Loading

0 comments on commit a560745

Please sign in to comment.