Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix/task 164 #586

Merged
merged 3 commits into from
Dec 7, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
54 changes: 53 additions & 1 deletion etl/ReadMe.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,18 +37,60 @@ jdbc:oracle:thin:@(description=(address=(protocol=tcps)(host=172.18.215.225.nip.

# Create sample data in postgresql using init.sql

# OpenShift Build and Deploy process

## 1. Build custom postgis

```
oc process -f debezium-postgis.build.yaml |oc apply -f -
```

## 2. Build kafka jdbc connect component

```
oc process -f debezium-jdbc.build.yaml --param-file=.env |oc apply -f -
```

## 3. Build kafka broker.
```
oc process -f debezium-kafka.build.yaml |oc apply -f -
```

## 4. Deploy postgis (switch to correct project for env before this step)
```
oc process -f debezium-postgis.deploy.yaml |oc apply -f -
```
## 5. Deploy zookeeper
```
oc process -f debezium-zookeeper.deploy.yaml |oc apply -f -
```
## 6. Deploy kafka
```
oc process -f debezium-kafka.deploy.yaml |oc apply -f -
```
## 7. Deploy kafka-jdbc connect.
```
oc process -f debezium-jdbc.deploy.yaml |oc apply -f -
```

# Register postgresql connector
```
curl -H "Content-Type: application/json" -d @register-postgres-source-connector.json http://localhost:8083/connectors/

curl -H "Content-Type: application/json" -d @register-postgres-source-connector.json https://debezium-jdbc-latest.apps.silver.devops.gov.bc.ca/connectors/



```

# Register Oracle jdbc sink connector

```
curl -H "Content-Type: application/json" -d @register-oracle-jdbc-sink-connector.json http://localhost:8083/connectors/

curl -H "Content-Type: application/json" -d @register-oracle-jdbc-sink-connector.json https://debezium-jdbc-latest.apps.silver.devops.gov.bc.ca/connectors/


```

# Test with a kafka console consumer.
Expand All @@ -67,6 +109,16 @@ curl -X DELETE localhost:8083/connectors/<connector-name>

curl -X DELETE localhost:8083/connectors/oracle-jdbc-sink-connector
curl -X DELETE localhost:8083/connectors/postgres-source-connector
```

curl -X DELETE https://debezium-jdbc-latest.apps.silver.devops.gov.bc.ca/connectors/oracle-jdbc-sink-connector
curl -X DELETE https://debezium-jdbc-latest.apps.silver.devops.gov.bc.ca/connectors/postgres-source-connector

```

# Get all connectors registered.
```
curl localhost:8083/connectors/
curl https://debezium-jdbc-latest.apps.silver.devops.gov.bc.ca/connectors
```


6 changes: 6 additions & 0 deletions etl/debezium-kafka/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@

FROM debezium/kafka:1.9


COPY connect-log4j.properties /kafka/config/
COPY log4j.properties /kafka/config/
67 changes: 67 additions & 0 deletions etl/debezium-kafka/connect-log4j.properties
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
kafka.logs.dir=logs

log4j.rootLogger=ERROR, stdout

# Disable excessive reflection warnings - KAFKA-5229
log4j.logger.org.reflections=ERROR

log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.threshold=ERROR
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n

log4j.appender.kafkaAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.kafkaAppender.File=${kafka.logs.dir}/server.log
log4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.kafkaAppender.layout.ConversionPattern=%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n

log4j.appender.stateChangeAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.stateChangeAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.stateChangeAppender.File=${kafka.logs.dir}/state-change.log
log4j.appender.stateChangeAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.stateChangeAppender.layout.ConversionPattern=%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n

log4j.appender.requestAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.requestAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.requestAppender.File=${kafka.logs.dir}/kafka-request.log
log4j.appender.requestAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.requestAppender.layout.ConversionPattern=%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n

log4j.appender.cleanerAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.cleanerAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.cleanerAppender.File=${kafka.logs.dir}/log-cleaner.log
log4j.appender.cleanerAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.cleanerAppender.layout.ConversionPattern=%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n

log4j.appender.controllerAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.controllerAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.controllerAppender.File=${kafka.logs.dir}/controller.log
log4j.appender.controllerAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.controllerAppender.layout.ConversionPattern=%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n

# Turn on all our debugging info
#log4j.logger.kafka.producer.async.DefaultEventHandler=DEBUG, kafkaAppender
#log4j.logger.kafka.client.ClientUtils=DEBUG, kafkaAppender
#log4j.logger.kafka.perf=DEBUG, kafkaAppender
#log4j.logger.kafka.perf.ProducerPerformance$ProducerThread=DEBUG, kafkaAppender
#log4j.logger.org.I0Itec.zkclient.ZkClient=DEBUG
log4j.logger.kafka=ERROR, kafkaAppender

log4j.logger.kafka.network.RequestChannel$=ERROR, requestAppender
log4j.additivity.kafka.network.RequestChannel$=false

#log4j.logger.kafka.network.Processor=TRACE, requestAppender
#log4j.logger.kafka.server.KafkaApis=TRACE, requestAppender
#log4j.additivity.kafka.server.KafkaApis=false
log4j.logger.kafka.request.logger=ERROR, requestAppender
log4j.additivity.kafka.request.logger=false

log4j.logger.kafka.controller=ERROR, controllerAppender
log4j.additivity.kafka.controller=false

log4j.logger.kafka.log.LogCleaner=ERROR, cleanerAppender
log4j.additivity.kafka.log.LogCleaner=false

log4j.logger.state.change.logger=ERROR, stateChangeAppender
log4j.additivity.state.change.logger=false
67 changes: 67 additions & 0 deletions etl/debezium-kafka/log4j.properties
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
kafka.logs.dir=logs

log4j.rootLogger=ERROR, stdout

# Disable excessive reflection warnings - KAFKA-5229
log4j.logger.org.reflections=ERROR

log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.threshold=ERROR
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n

log4j.appender.kafkaAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.kafkaAppender.File=${kafka.logs.dir}/server.log
log4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.kafkaAppender.layout.ConversionPattern=%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n

log4j.appender.stateChangeAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.stateChangeAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.stateChangeAppender.File=${kafka.logs.dir}/state-change.log
log4j.appender.stateChangeAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.stateChangeAppender.layout.ConversionPattern=%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n

log4j.appender.requestAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.requestAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.requestAppender.File=${kafka.logs.dir}/kafka-request.log
log4j.appender.requestAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.requestAppender.layout.ConversionPattern=%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n

log4j.appender.cleanerAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.cleanerAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.cleanerAppender.File=${kafka.logs.dir}/log-cleaner.log
log4j.appender.cleanerAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.cleanerAppender.layout.ConversionPattern=%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n

log4j.appender.controllerAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.controllerAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.controllerAppender.File=${kafka.logs.dir}/controller.log
log4j.appender.controllerAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.controllerAppender.layout.ConversionPattern=%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n

# Turn on all our debugging info
#log4j.logger.kafka.producer.async.DefaultEventHandler=DEBUG, kafkaAppender
#log4j.logger.kafka.client.ClientUtils=DEBUG, kafkaAppender
#log4j.logger.kafka.perf=DEBUG, kafkaAppender
#log4j.logger.kafka.perf.ProducerPerformance$ProducerThread=DEBUG, kafkaAppender
#log4j.logger.org.I0Itec.zkclient.ZkClient=DEBUG
log4j.logger.kafka=ERROR, kafkaAppender

log4j.logger.kafka.network.RequestChannel$=ERROR, requestAppender
log4j.additivity.kafka.network.RequestChannel$=false

#log4j.logger.kafka.network.Processor=TRACE, requestAppender
#log4j.logger.kafka.server.KafkaApis=TRACE, requestAppender
#log4j.additivity.kafka.server.KafkaApis=false
log4j.logger.kafka.request.logger=ERROR, requestAppender
log4j.additivity.kafka.request.logger=false

log4j.logger.kafka.controller=ERROR, controllerAppender
log4j.additivity.kafka.controller=false

log4j.logger.kafka.log.LogCleaner=ERROR, cleanerAppender
log4j.additivity.kafka.log.LogCleaner=false

log4j.logger.state.change.logger=ERROR, stateChangeAppender
log4j.additivity.state.change.logger=false
2 changes: 2 additions & 0 deletions etl/docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ services:
- 3888:3888
kafka:
image: debezium/kafka:${DEBEZIUM_VERSION}
build:
context: debezium-kafka
ports:
- 9092:9092
links:
Expand Down
6 changes: 4 additions & 2 deletions etl/register-oracle-jdbc-sink-connector.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
"topics.regex": "dbserver1.public.(.*)",
"connection.url": "jdbc:oracle:thin:@(DESCRIPTION=(ADDRESS=(PROTOCOL=TCPS)(PORT=1543)(HOST=nrcdb01.bcgov))(CONNECT_DATA=(SERVICE_NAME=SD57387.NRS.BCGOV))(SECURITY=(ssl_server_cert_dn=\"CN=nrcdb01.bcgov\")))",
"connection.user": "JAITHOMA",
"connection.password": "jaithoma#1020",
"connection.password": "xxxxx",
"dialect.name": "OracleDatabaseDialect",
"security.protocol":"SSL",
"ssl.enabled.protocols": "TLSv1.2,TLSv1.1",
Expand All @@ -16,7 +16,9 @@
"table.name.format" : "SIS.${topic}",
"delete.enabled": "true",
"pk.mode": "record_key",
"insert.mode": "upsert",
"insert.mode": "upsert",
"max.retries": "30",
"retry.backoff.ms": "10000",
"quote.sql.identifiers": "never",
"transforms":"route,ConvertCreatedTimestamp,ConvertUpdatedTimestamp,topicCase",
"transforms.route.type": "org.apache.kafka.connect.transforms.RegexRouter",
Expand Down
59 changes: 59 additions & 0 deletions openshift/templates/etl/debezium-kafka.build.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
---
kind: Template
apiVersion: template.openshift.io/v1
metadata:
name: "${API_NAME}-build-template"
creationTimestamp:
objects:
- kind: ImageStream
apiVersion: v1
metadata:
name: "${API_NAME}"
- kind: BuildConfig
apiVersion: v1
metadata:
name: "${API_NAME}-build"
labels:
app: "${API_NAME}-build"
spec:
runPolicy: Serial
source:
type: Git
git:
uri: "${GIT_REPO_URL}"
ref: "${GIT_REF}"
contextDir: "${SOURCE_CONTEXT_DIR}"
strategy:
type: Docker
dockerStrategy:
dockerfilePath: "${DOCKER_FILE_PATH}"
output:
to:
kind: ImageStreamTag
name: "${API_NAME}:latest"
parameters:
- name: API_NAME
displayName: Name
description: The name assigned to all of the resources defined in this template.
required: true
value: debezium-kafka
- name: GIT_REPO_URL
displayName: Git Repo URL
description: The URL to your GIT repo.
required: true
value: https://github.com/bcgov/nr-epd-digital-services
- name: GIT_REF
displayName: Git Reference
description: The git reference or branch.
required: true
value: dev
- name: SOURCE_CONTEXT_DIR
displayName: Source Context Directory
description: The source context directory.
required: false
value: etl/debezium-kafka
- name: DOCKER_FILE_PATH
displayName: Docker File Path
description: The path to the docker file defining the build.
required: false
value: "Dockerfile"
13 changes: 11 additions & 2 deletions openshift/templates/etl/debezium-kafka.deploy.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,15 @@ objects:
maxSurge: 25%
triggers:
- type: ConfigChange
- type: ImageChange
imageChangeParams:
automatic: true
containerNames:
- "${API_NAME}"
from:
kind: ImageStreamTag
namespace: "${IMAGE_NAMESPACE}"
name: "${API_NAME}:${TAG_NAME}"
replicas: 1
test: false
selector:
Expand All @@ -42,7 +51,7 @@ objects:
spec:
containers:
- name: "${API_NAME}"
image: debezium/kafka:1.9
image: "${API_NAME}"
imagePullPolicy: Always
ports:
- containerPort: 9092
Expand Down Expand Up @@ -108,4 +117,4 @@ parameters:
displayName: Environment TAG name
description: The TAG name for this environment, e.g., dev, test, prod
required: true
value: dev
value: latest
Loading