-
Notifications
You must be signed in to change notification settings - Fork 13
/
Makefile
315 lines (231 loc) · 11.1 KB
/
Makefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
##################################################
# Constants
##################################################
APP_NAME := grants-api
# Colors for output, can be used as:
# echo -e "this text is the default color $(RED) this text is red $(NO_COLOR) everything here is the default color again"
RED := \033[0;31m
NO_COLOR := \033[0m
# Adding this to the end of a script that outputs JSON will convert
# it to a readable format with timestamps and color-coding.
#
# Note that you can also change the LOG_FORMAT env var to switch
# between JSON & human readable format. This is left in place
# in the event JSON is output from a process we don't log.
DECODE_LOG := 2>&1 | python3 -u src/logging/util/decodelog.py
# Required for CI flags below to work properly
SHELL = /bin/bash -o pipefail
# The APP_DIR variable is the path from the root of the repository to this Makefile.
# This variable is used to display errors from MyPy in the 'Files Changed'
# section of a pull request. If this is set to the incorrect value, you won't be able
# to see the errors on the correct files in that section
APP_DIR := api
ifdef CI
DOCKER_EXEC_ARGS := -T -e CI -e PYTEST_ADDOPTS="--color=yes"
MYPY_FLAGS := --no-pretty
MYPY_POSTPROC := | perl -pe "s/^(.+):(\d+):(\d+): error: (.*)/::warning file=$(APP_DIR)\/\1,line=\2,col=\3::\4/"
endif
# By default, all python/poetry commands will run inside of the docker container
# if you wish to run this natively, add PY_RUN_APPROACH=local to your environment vars
# You can set this by either running `export PY_RUN_APPROACH=local` in your shell or add
# it to your ~/.zshrc file (and run `source ~/.zshrc`)
ifeq "$(PY_RUN_APPROACH)" "local"
PY_RUN_CMD := poetry run
else
PY_RUN_CMD := docker compose run $(DOCKER_EXEC_ARGS) --rm $(APP_NAME) poetry run
endif
FLASK_CMD := $(PY_RUN_CMD) flask
# Docker user configuration
# This logic is to avoid issues with permissions and mounting local volumes,
# which should be owned by the same UID for Linux distros. Mac OS can use root,
# but it is best practice to run things as with least permission where possible
# Can be set by adding user=<username> and/ or uid=<id> after the make command
# If variables are not set explicitly: try looking up values from current
# environment, otherwise fixed defaults.
# uid= defaults to 0 if user= set (which makes sense if user=root, otherwise you
# probably want to set uid as well).
ifeq ($(user),)
RUN_USER ?= $(or $(strip $(USER)),nodummy)
RUN_UID ?= $(or $(strip $(shell id -u)),4000)
else
RUN_USER = $(user)
RUN_UID = $(or $(strip $(uid)),0)
endif
export RUN_USER
export RUN_UID
release-build:
docker buildx build \
--target release \
--platform=linux/amd64 \
--build-arg RUN_USER=$(RUN_USER) \
--build-arg RUN_UID=$(RUN_UID) \
$(OPTS) \
.
##################################################
# Local Development Environment Setup
##################################################
setup-local:
# Configure poetry to use virtualenvs in the project directory
poetry config virtualenvs.in-project true
# Install dependencies
poetry install --no-root --all-extras --with dev
##################################################
# API Build & Run
##################################################
build:
docker compose build
start: ## Start the API
docker compose up --detach
start-debug:
docker compose -f docker-compose.yml -f docker-compose.debug.yml up --detach
run-logs: start ## Start the API and follow the logs
docker compose logs --follow --no-color $(APP_NAME)
init: build init-db init-opensearch init-localstack
clean-volumes: ## Remove project docker volumes - which includes the DB, and OpenSearch state
docker compose down --volumes
volume-recreate: clean-volumes init ## Destroy current volumes, setup new ones - will remove all existing data
stop:
docker compose down
check: format-check lint db-check-migrations test
remake-backend: volume-recreate db-seed-local populate-search-opportunities ## Completely recreate API services, load data into the DB and search index
##################################################
# DB & migrations
##################################################
#########################
# DB running / setup
#########################
# Docker starts the image for the DB but it's not quite
# ready to accept connections so we add a brief wait script
init-db: start-db setup-postgres-db db-migrate
start-db:
docker compose up --detach grants-db
./bin/wait-for-local-db.sh
#########################
# DB Migrations
#########################
alembic_config := ./src/db/migrations/alembic.ini
alembic_cmd := $(PY_RUN_CMD) alembic --config $(alembic_config)
db-migrate: ## Apply pending migrations to db
$(PY_RUN_CMD) db-migrate
db-migrate-down: ## Rollback last migration in db
$(PY_RUN_CMD) db-migrate-down
db-migrate-down-all: ## Rollback all migrations
$(PY_RUN_CMD) db-migrate-down-all
check-migrate-msg:
ifndef MIGRATE_MSG
$(error MIGRATE_MSG is undefined)
endif
db-migrate-create: check-migrate-msg ## Create database migration with description MIGRATE_MSG
$(alembic_cmd) revision --autogenerate -m "$(MIGRATE_MSG)"
MIGRATE_MERGE_MSG := Merge multiple heads
db-migrate-merge-heads: ## Create a new migration that depends on all existing `head`s
$(alembic_cmd) merge heads -m "$(MIGRATE_MERGE_MSG)" $(args)
db-migrate-current: ## Show current revision for a database
$(alembic_cmd) current $(args)
db-migrate-history: ## Show migration history
$(alembic_cmd) history $(args)
db-migrate-heads: ## Show migrations marked as a head
$(alembic_cmd) heads $(args)
db-seed-local: ## Generate records into your local database
$(PY_RUN_CMD) db-seed-local $(args)
db-check-migrations: ## Verify the DB schema matches the DB migrations generated
$(alembic_cmd) check || (echo -e "\n$(RED)Migrations are not up-to-date, make sure you generate migrations by running 'make db-migrate-create <msg>'$(NO_COLOR)"; exit 1)
create-erds: # Create ERD diagrams for our DB schema
$(PY_RUN_CMD) create-erds
mv bin/*.png ../documentation/api/database/erds
setup-postgres-db: ## Does any initial setup necessary for our local database to work
$(PY_RUN_CMD) setup-postgres-db
##################################################
# Opensearch
##################################################
init-opensearch: start-opensearch ## Start the opensearch service locally
start-opensearch:
docker compose up --detach opensearch-node
docker compose up --detach opensearch-dashboards
./bin/wait-for-local-opensearch.sh
##################################################
# Localstack
##################################################
init-localstack: start-localstack setup-localstack ## Start localstack (local s3) and setup buckets
start-localstack:
docker compose up --detach localstack
setup-localstack:
$(PY_RUN_CMD) setup-localstack
##################################################
# Testing
##################################################
test: ## Run all tests except for audit logging tests
$(PY_RUN_CMD) pytest -m "not audit" $(args)
test-audit: ## Run audit logging tests
$(PY_RUN_CMD) pytest -m "audit" $(args)
test-watch: ## Run tests continually and watch for changes
$(PY_RUN_CMD) pytest-watch --clear $(args)
test-coverage: ## Run tests and generate coverage report
$(PY_RUN_CMD) coverage run --branch --source=src -m pytest -m "not audit" $(args)
$(PY_RUN_CMD) coverage run --data-file=.coverage.audit --branch --source=src -m pytest -m "audit" $(args)
$(PY_RUN_CMD) coverage combine --data-file=.coverage --append
$(PY_RUN_CMD) coverage report
test-coverage-report: ## Open HTML test coverage report
$(PY_RUN_CMD) coverage html --directory .coverage_report
open .coverage_report/index.html
##################################################
# Formatting and linting
##################################################
format: ## Format files
$(PY_RUN_CMD) isort --atomic src tests bin
$(PY_RUN_CMD) black src tests bin
format-check: ## Check file formatting
$(PY_RUN_CMD) isort --atomic --check-only src tests bin
$(PY_RUN_CMD) black --check src tests bin
lint: lint-py ## Lint
lint-py: lint-ruff lint-mypy
lint-ruff:
$(PY_RUN_CMD) ruff check src tests bin
lint-mypy:
$(PY_RUN_CMD) mypy --show-error-codes $(MYPY_FLAGS) src bin $(MYPY_POSTPROC)
lint-security: # https://bandit.readthedocs.io/en/latest/index.html
$(PY_RUN_CMD) bandit -c pyproject.toml -r . --number 3 --skip B101 -ll -x ./.venv
##################################################
# CLI Commands
##################################################
cmd: ## Run Flask app CLI command (Run `make cli args="--help"` to see list of CLI commands)
$(FLASK_CMD) $(args)
# Set init-db as pre-requisite since there seems to be a race condition
# where the DB can't yet receive connections if it's starting from a
# clean state (e.g. after make stop, make clean-volumes, make openapi-spec)
openapi-spec: init-db ## Generate OpenAPI spec
$(FLASK_CMD) spec --format yaml --output ./openapi.generated.yml
copy-oracle-data:
$(FLASK_CMD) data-migration copy-oracle-data
setup-foreign-tables:
$(FLASK_CMD) data-migration setup-foreign-tables
seed-local-legacy-tables:
$(PY_RUN_CMD) python3 -m tests.lib.seed_local_legacy_tables
populate-search-opportunities: ## Load opportunities from the DB into the search index, run "make db-seed-local" first to populate your database
$(FLASK_CMD) load-search-data load-opportunity-data $(args)
##################################################
# Miscellaneous Utilities
##################################################
login: start ## Start shell in running container
docker exec -it $(APP_NAME) bash
DB_URI := postgresql://$(DB_USER)@$(DB_HOST):$(DB_PORT)/$(DB_NAME)?options=-csearch_path%3dapi,legacy,staging
login-db: ## Start psql with project environment variables
PGPASSWORD=$$DB_PASSWORD psql $(DB_URI)
console: ## Start interactive Python console
$(PY_RUN_CMD) python3 -i -m tool.console.interactive
help: ## Prints the help documentation and info about each command
@grep -E '^[/a-zA-Z0-9_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
##################################################
# Load testing
##################################################
load-test-local: # Load test the local environment at localhost:3000
artillery run artillery-load-test.yml
load-test-dev: # Load test the dev environment in aws
$(eval API_AUTH_TOKEN := $(shell aws ssm get-parameter --name /api/dev/api-auth-token --query Parameter.Value --with-decryption --output text | cut -d',' -f1))
env API_AUTH_TOKEN=$(API_AUTH_TOKEN) artillery run -e dev artillery-load-test.yml
load-test-staging: # Load test the staging environment in aws
$(eval API_AUTH_TOKEN := $(shell aws ssm get-parameter --name /api/staging/api-auth-token --query Parameter.Value --with-decryption --output text | cut -d',' -f1))
env API_AUTH_TOKEN=$(API_AUTH_TOKEN) artillery run -e staging artillery-load-test.yml
load-test-prod: # Load test the production environment in aws. Please test responsibly
$(eval API_AUTH_TOKEN := $(shell aws ssm get-parameter --name /api/prod/api-auth-token --query Parameter.Value --with-decryption --output text | cut -d',' -f1))
env API_AUTH_TOKEN=$(API_AUTH_TOKEN) artillery run -e prod artillery-load-test.yml