Skip to content

Commit

Permalink
Merge branch 'main' into fix/1386-seedlot-registration-form-fe-changes
Browse files Browse the repository at this point in the history
  • Loading branch information
craigyu authored Aug 16, 2024
2 parents 5841765 + 36334ee commit e950f1c
Show file tree
Hide file tree
Showing 25 changed files with 72 additions and 720 deletions.
9 changes: 4 additions & 5 deletions .github/workflows/.deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@ jobs:
overwrite: true
parameters:
-p ZONE=${{ inputs.target }}
-p DB_PASSWORD='${{ secrets.DB_PASSWORD }}'
-p FORESTCLIENTAPI_KEY='${{ secrets.FORESTCLIENTAPI_KEY }}'
-p ORACLE_PASSWORD='${{ secrets.ORACLE_PASSWORD }}'
-p ORACLE_SERVICE='${{ vars.ORACLE_SERVICE }}'
Expand All @@ -96,12 +97,10 @@ jobs:
oc_namespace: ${{ vars.OC_NAMESPACE }}
oc_server: ${{ vars.OC_SERVER }}
oc_token: ${{ secrets.OC_TOKEN }}
file: database/openshift.deploy.yml
file: common/openshift.database.yml
overwrite: false
parameters:
-p TAG=${{ inputs.tag }}
-p ZONE=${{ inputs.target }}
-p DB_PASSWORD='${{ secrets.DB_PASSWORD }}'
${{ github.event_name == 'pull_request' && '-p DB_PVC_SIZE=192Mi' || '' }}
${{ github.event_name == 'pull_request' && '-p MEMORY_REQUEST=100Mi' || '' }}
${{ github.event_name == 'pull_request' && '-p MEMORY_LIMIT=200Mi' || '' }}
Expand Down Expand Up @@ -177,7 +176,7 @@ jobs:
parameters:
-p TAG=${{ inputs.tag }}
-p ZONE=${{ inputs.target }}
${{ github.event_name == 'pull_request' && '-p TEST_MODE=true' || '' }}
${{ inputs.target == 'test' && '-p TEST_MODE=false' || '-p TEST_MODE=true' }}

- name: Override OpenShift version
if: github.event_name == 'pull_request'
Expand All @@ -191,4 +190,4 @@ jobs:

- name: Run sync ETL
if: github.event_name == 'pull_request'
run: ./sync/oc_run.sh ${{ secrets.oc_token }}
run: ./sync/oc_run.sh ${{ inputs.tag }} ${{ secrets.oc_token }}
24 changes: 2 additions & 22 deletions .github/workflows/job-sync.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,26 +23,6 @@ jobs:
oc version
working-directory: /usr/local/bin/

- uses: actions/checkout@v4
- name: ETL Sync
run: |
# Run and verify job
# Login
oc login --token=${{ secrets.oc_token }} --server=${{ vars.oc_server }}
oc project ${{ vars.oc_namespace }} #Safeguard!
# Exit on errors or unset variables
set -eu
# Create job
CRONJOB=nr-spar-test-sync
RUN_JOB=${CRONJOB}--$(date +"%Y-%m-%d--%H-%M-%S")
oc create job ${RUN_JOB} --from=cronjob/${CRONJOB}
# Follow
oc wait --for=condition=ready pod --selector=job-name=${RUN_JOB} --timeout=1m
oc logs -l job-name=${RUN_JOB} --tail=50 --follow
# Verify successful completion
oc wait --for jsonpath='{.status.phase}'=Succeeded pod --selector=job-name=${RUN_JOB} --timeout=1m
echo "Job successful!"
run: ./sync/oc_run.sh test ${{ secrets.oc_token }}
2 changes: 1 addition & 1 deletion .github/workflows/merge.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
package: [backend, common, database, frontend, oracle-api, sync]
package: [backend, frontend, oracle-api, sync]
steps:
- uses: shrink/actions-docker-registry-tag@v4
with:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/pr-close.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,5 +18,5 @@ jobs:
oc_token: ${{ secrets.OC_TOKEN }}
with:
cleanup: label
packages: database backend frontend oracle-api sync common
packages: backend frontend oracle-api sync

2 changes: 1 addition & 1 deletion .github/workflows/pr-open.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ jobs:
packages: write
strategy:
matrix:
package: [database, common, backend, frontend, oracle-api, sync]
package: [backend, frontend, oracle-api, sync]
steps:
- uses: bcgov-nr/[email protected]
id: build
Expand Down
4 changes: 2 additions & 2 deletions backend/openshift.deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,9 @@ parameters:
- name: FORESTCLIENTAPI_ADDRESS
value: "https://nr-forest-client-api-prod.api.gov.bc.ca/api"
- name: CPU_REQUEST
value: 15m
value: 25m
- name: CPU_LIMIT
value: 60m
value: 100m
- name: MEMORY_REQUEST
value: 150Mi
- name: MEMORY_LIMIT
Expand Down
20 changes: 0 additions & 20 deletions common/Dockerfile

This file was deleted.

35 changes: 18 additions & 17 deletions database/init_db/init.sql → common/init_db/init.sql
Original file line number Diff line number Diff line change
Expand Up @@ -2622,22 +2622,6 @@ CREATE TRIGGER trg_seedlot_audit_DIU
AFTER INSERT OR UPDATE OR DELETE ON spar.seedlot
FOR EACH ROW EXECUTE PROCEDURE spar.seedlot_if_modified_func();

create table spar.ETL_EXECUTION_LOG(
from_timestamp timestamp not null,
to_timestamp timestamp not null,
run_status varchar(100) not null,
updated_at timestamp default now() not null,
created_at timestamp default now() not null
);


comment on table spar.ETL_EXECUTION_LOG is 'ETL Tool monitoring table to store execution current instance of batch processing interfaces';
comment on column spar.ETL_EXECUTION_LOG.from_timestamp is 'From timestamp for the run (i.e. update_timestamp between from_timestamp and to_timetsamp)';
comment on column spar.ETL_EXECUTION_LOG.to_timestamp is 'To timestamp for the run (i.e. update_timestamp between from_timestamp and to_timetsamp)';
comment on column spar.ETL_EXECUTION_LOG.run_status is 'Status of ETL execution';
comment on column spar.ETL_EXECUTION_LOG.updated_at is 'Timestamp of the last time this record was updated';
comment on column spar.ETL_EXECUTION_LOG.created_at is 'Timestamp of the time this record was created';

alter table spar.seedlot
add column approved_timestamp timestamp,
add column approved_userid varchar(30);
Expand Down Expand Up @@ -4490,6 +4474,23 @@ comment on column spar.ETL_EXECUTION_MAP.retry_errors is 'If true,
comment on column spar.ETL_EXECUTION_MAP.updated_at is 'Timestamp of the last time this record was updated';
comment on column spar.ETL_EXECUTION_MAP.created_at is 'Timestamp of the time this record was created';

create table spar.ETL_EXECUTION_LOG(
from_timestamp timestamp not null,
to_timestamp timestamp not null,
run_status varchar(100) not null,
updated_at timestamp default now() not null,
created_at timestamp default now() not null
);


comment on table spar.ETL_EXECUTION_LOG is 'ETL Tool monitoring table to store execution current instance of batch processing interfaces';
comment on column spar.ETL_EXECUTION_LOG.from_timestamp is 'From timestamp for the run (i.e. update_timestamp between from_timestamp and to_timetsamp)';
comment on column spar.ETL_EXECUTION_LOG.to_timestamp is 'To timestamp for the run (i.e. update_timestamp between from_timestamp and to_timetsamp)';
comment on column spar.ETL_EXECUTION_LOG.run_status is 'Status of ETL execution';
comment on column spar.ETL_EXECUTION_LOG.updated_at is 'Timestamp of the last time this record was updated';
comment on column spar.ETL_EXECUTION_LOG.created_at is 'Timestamp of the time this record was created';


create table spar.ETL_EXECUTION_SCHEDULE(
interface_id varchar(100) not null,
execution_id integer not null,
Expand All @@ -4512,7 +4513,7 @@ comment on column spar.ETL_EXECUTION_SCHEDULE.created_at is 'Timestamp o

create table spar.etl_execution_log_hist
( entry_timestamp timestamp(6) not null default current_timestamp
, log_details jsonb not null)
, log_details jsonb not null);

comment on table spar.ETL_EXECUTION_LOG_HIST is 'ETL Tool monitoring table to store all executed instances of batch processing interfaces';
comment on column spar.ETL_EXECUTION_LOG_HIST.entry_timestamp is 'The timestamp when the record was inserted';
Expand Down
37 changes: 6 additions & 31 deletions database/openshift.deploy.yml → common/openshift.database.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,18 +12,6 @@ parameters:
- name: ZONE
description: Deployment zone, e.g. pr-### or prod
required: true
- name: TAG
description: Image tag; e.g. PR number, latest or prod
required: true
- name: REGISTRY
description: Container registry to import from (internal is image-registry.openshift-image-registry.svc:5000)
value: ghcr.io
- name: ORG
description: Organization name
value: bcgov
- name: PVC_MOUNT_PATH
description: Where to mount the PVC, subpath (e.g. data/)
value: /var/lib/postgresql
- name: CPU_REQUEST
value: 25m
- name: CPU_LIMIT
Expand All @@ -33,24 +21,9 @@ parameters:
- name: MEMORY_LIMIT
value: 4Gi
- name: DB_PVC_SIZE
description: Volume space available for data, e.g. 512Mi, 2Gi.
displayName: Database Volume Capacity
value: 1Gi
- name: DB_PASSWORD
description: Password for the PostgreSQL connection user
required: true
description: Volume space available for data, e.g. 512Mi, 2Gi
value: 1.8Gi
objects:
- apiVersion: v1
kind: Secret
metadata:
name: ${NAME}-${ZONE}-${COMPONENT}
labels:
app: ${NAME}-${ZONE}
stringData:
database-name: ${NAME}
database-password: ${DB_PASSWORD}
database-port: "5432"
database-user: ${NAME}
- kind: PersistentVolumeClaim
apiVersion: v1
metadata:
Expand Down Expand Up @@ -93,7 +66,7 @@ objects:
claimName: ${NAME}-${ZONE}-${COMPONENT}
containers:
- name: ${NAME}-${ZONE}
image: ${REGISTRY}/${ORG}/${NAME}/${COMPONENT}:${TAG}
image: postgis/postgis:15-master
resources:
requests:
cpu: ${CPU_REQUEST}
Expand All @@ -111,6 +84,8 @@ objects:
- bash
- '-ce'
- exec pg_isready -U $POSTGRES_USER -d "dbname=$POSTGRES_DB" -h 127.0.0.1 -p 5432
periodSeconds: 30
timeoutSeconds: 10
livenessProbe:
exec:
command:
Expand Down Expand Up @@ -138,7 +113,7 @@ objects:
key: database-user
volumeMounts:
- name: ${NAME}-${ZONE}-${COMPONENT}
mountPath: ${PVC_MOUNT_PATH}
mountPath: /var/lib/postgresql
terminationMessagePath: "/dev/termination-log"
terminationMessagePolicy: File
imagePullPolicy: Always
Expand Down
14 changes: 14 additions & 0 deletions common/openshift.init.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ parameters:
- name: ZONE
description: Deployment zone, e.g. pr-### or prod
required: true
- name: DB_PASSWORD
description: Password for the PostgreSQL connection user
required: true
- name: FORESTCLIENTAPI_KEY
required: true
- name: ORACLE_HOST
Expand Down Expand Up @@ -35,6 +38,17 @@ parameters:
description: Cognito user pools web client ID
required: true
objects:
- apiVersion: v1
kind: Secret
metadata:
name: ${NAME}-${ZONE}-database
labels:
app: ${NAME}-${ZONE}
stringData:
database-name: ${NAME}
database-password: ${DB_PASSWORD}
database-port: "5432"
database-user: ${NAME}
- apiVersion: v1
kind: Secret
metadata:
Expand Down
10 changes: 0 additions & 10 deletions database/Dockerfile

This file was deleted.

4 changes: 2 additions & 2 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,12 @@ x-frontend: &frontend
services:
database:
container_name: database
build: ./database
image: postgis/postgis:15-master
environment:
<<: *postgres-vars
volumes:
- "/pgdata"
- "./database/init_db:/init_db"
- "./common/init_db:/init_db"
ports: ["5432:5432"]
healthcheck:
test: psql -q -U $${POSTGRES_USER} -d $${POSTGRES_DB} -c 'SELECT 1'
Expand Down
67 changes: 0 additions & 67 deletions oracle-api/.eslintrc.json

This file was deleted.

Loading

0 comments on commit e950f1c

Please sign in to comment.