Skip to content

Commit

Permalink
Merge pull request #336 from LACMTA/dev
Browse files Browse the repository at this point in the history
v2.29 - "Optmization Update"
  • Loading branch information
albertkun authored Nov 1, 2023
2 parents 090aa89 + 289df25 commit e46e2d0
Show file tree
Hide file tree
Showing 12 changed files with 205 additions and 59 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/dev-aws.yml
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ jobs:
run: >
aws lightsail create-container-service-deployment
--service-name dev-metro-api-v2
--containers '{"fastapi":{"image":"lacmta/metro-api-v2:fastapi","ports":{"80":"HTTP"},"environment":{"FTP_PASS":"${{secrets.FTP_PASS }}","FTP_SERVER":"${{secrets.FTP_SERVER }}","FTP_USERNAME":"${{secrets.FTP_USERNAME }}","SWIFTLY_AUTH_KEY_BUS":"${{secrets.SWIFTLY_AUTH_KEY_BUS}}","SWIFTLY_AUTH_KEY_RAIL":"${{secrets.SWIFTLY_AUTH_KEY_RAIL}}","API_DB_URI":"${{secrets.API_DB_URI}}","HASH_KEY":"${{secrets.HASH_KEY}}","HASHING_ALGORITHM":"${{secrets.HASHING_ALGORITHM}}","LOGZIO_TOKEN":"${{secrets.LOGZIO_TOKEN}}","LOGZIO_URL":"https://listener.logz.io:8071","RUNNING_ENV":"dev","MAIL_USERNAME":"${{secrets.MAIL_USERNAME}}","MAIL_PASSWORD":"${{secrets.MAIL_PASSWORD}}","MAIL_SERVER":"${{secrets.MAIL_SERVER}}"}},"data-loading-service":{"image":"lacmta/metro-api-v2:data-loading-service","environment":{"FTP_PASS":"${{secrets.FTP_PASS }}","FTP_SERVER":"${{secrets.FTP_SERVER }}","FTP_USERNAME":"${{secrets.FTP_USERNAME }}","SWIFTLY_AUTH_KEY_BUS":"${{secrets.SWIFTLY_AUTH_KEY_BUS}}","SWIFTLY_AUTH_KEY_RAIL":"${{secrets.SWIFTLY_AUTH_KEY_RAIL}}","API_DB_URI":"${{secrets.API_DB_URI}}","HASH_KEY":"${{secrets.HASH_KEY}}","HASHING_ALGORITHM":"${{secrets.HASHING_ALGORITHM}}","LOGZIO_TOKEN":"${{secrets.LOGZIO_TOKEN}}","LOGZIO_URL":"https://listener.logz.io:8071","RUNNING_ENV":"dev","MAIL_USERNAME":"${{secrets.MAIL_USERNAME}}","MAIL_PASSWORD":"${{secrets.MAIL_PASSWORD}}","MAIL_SERVER":"${{secrets.MAIL_SERVER}}"}}}' --public-endpoint '{"containerName": "fastapi","containerPort": 80,"healthCheck":{ "healthyThreshold": 2,"unhealthyThreshold": 2,"timeoutSeconds": 2,"intervalSeconds": 5,"path": "/","successCodes": "200-499"}}'
--containers '{"redis": {"image": "redis:latest", "environment": {}, "ports": {"6379": "HTTP"}}, "prometheus": {"image": "prom/prometheus:latest", "environment": {}, "ports": {"9090": "HTTP"}}, "grafana": {"image": "grafana/grafana:latest", "environment": {}, "ports": {"3000": "HTTP"}}, "data-loading-service": {"image":"lacmta/metro-api-v2:data-loading-service", "environment": {"FTP_SERVER": "${{secrets.FTP_SERVER}}", "FTP_USERNAME": "${{secrets.FTP_USERNAME}}", "FTP_PASS": "${{secrets.FTP_PASS}}", "SWIFTLY_AUTH_KEY_BUS": "${{secrets.SWIFTLY_AUTH_KEY_BUS}}", "SWIFTLY_AUTH_KEY_RAIL": "${{secrets.SWIFTLY_AUTH_KEY_RAIL}}", "AWS_ACCESS_KEY_ID": "${{secrets.AWS_ACCESS_KEY_ID}}", "ACCESS_SECRET_KEY": "${{secrets.ACCESS_SECRET_KEY}}", "SWIFTLY_AUTH_KEY": "${{secrets.SWIFTLY_AUTH_KEY}}", "API_DB_URI": "${{secrets.API_DB_URI}}", "HASH_KEY": "${{secrets.HASH_KEY}}", "HASHING_ALGORITHM": "${{secrets.HASHING_ALGORITHM}}", "LOGZIO_TOKEN": "${{secrets.LOGZIO_TOKEN}}", "LOGZIO_URL": "${{secrets.LOGZIO_URL}}", "RUNNING_ENV": "dev"}}, "fastapi": {"image":"lacmta/metro-api-v2:fastapi", "environment": {"FTP_SERVER": "${{secrets.FTP_SERVER}}", "FTP_USERNAME": "${{secrets.FTP_USERNAME}}", "FTP_PASS": "${{secrets.FTP_PASS}}", "SWIFTLY_AUTH_KEY_BUS": "${{secrets.SWIFTLY_AUTH_KEY_BUS}}", "SWIFTLY_AUTH_KEY_RAIL": "${{secrets.SWIFTLY_AUTH_KEY_RAIL}}", "AWS_ACCESS_KEY_ID": "${{secrets.AWS_ACCESS_KEY_ID}}", "ACCESS_SECRET_KEY": "${{secrets.ACCESS_SECRET_KEY}}", "SWIFTLY_AUTH_KEY": "${{secrets.SWIFTLY_AUTH_KEY}}", "API_DB_URI": "${{secrets.API_DB_URI}}", "HASH_KEY": "${{secrets.HASH_KEY}}", "HASHING_ALGORITHM": "${{secrets.HASHING_ALGORITHM}}", "LOGZIO_TOKEN": "${{secrets.LOGZIO_TOKEN}}", "LOGZIO_URL": "https://listener.logz.io:8071", "RUNNING_ENV": "dev"}, "ports": {"80": "HTTP"}}}' --public-endpoint '{"containerName": "fastapi","containerPort": 80,"healthCheck":{ "healthyThreshold": 2,"unhealthyThreshold": 2,"timeoutSeconds": 2,"intervalSeconds": 5,"path": "/","successCodes": "200-499"}}'
deploy-documentation:
runs-on: ubuntu-latest
name: Deploy Documentation to GitHub Pages
Expand Down
7 changes: 3 additions & 4 deletions .github/workflows/prod-aws.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

# Source Partly from: https://aws.amazon.com/getting-started/guides/deploy-webapp-lightsail/module-four/

name: Deploy Docker Image to Prod AWS Lightsail Container Service
name: Deploy Docker Image to AWS Lightsail Container Service

on:
push:
Expand Down Expand Up @@ -76,13 +76,12 @@ jobs:
context: ./fastapi/
build-args: |
BUILDKIT_CONTEXT_KEEP_GIT_DIR=true
- name: Push Updated Docker Image on Lightsail Prod
- name: Push Updated Docker Image on Lightsail
if: steps.changed-files.outputs.any_changed == 'true'
run: >
aws lightsail create-container-service-deployment
--service-name metro-api-v2
--containers '{"fastapi":{"image":"lacmta/metro-api-v2:fastapi","ports":{"80":"HTTP"},"environment":{"FTP_PASS":"${{secrets.FTP_PASS }}","FTP_SERVER":"${{secrets.FTP_SERVER }}","FTP_USERNAME":"${{secrets.FTP_USERNAME }}","SWIFTLY_AUTH_KEY_BUS":"${{secrets.SWIFTLY_AUTH_KEY_BUS}}","SWIFTLY_AUTH_KEY_RAIL":"${{secrets.SWIFTLY_AUTH_KEY_RAIL}}","API_DB_URI":"${{secrets.API_DB_URI}}","HASH_KEY":"${{secrets.HASH_KEY}}","HASHING_ALGORITHM":"${{secrets.HASHING_ALGORITHM}}","LOGZIO_TOKEN":"${{secrets.LOGZIO_TOKEN}}","LOGZIO_URL":"https://listener.logz.io:8071","RUNNING_ENV":"prod","MAIL_USERNAME":"${{secrets.MAIL_USERNAME}}","MAIL_PASSWORD":"${{secrets.MAIL_PASSWORD}}","MAIL_SERVER":"${{secrets.MAIL_SERVER}}"}},"data-loading-service":{"image":"lacmta/metro-api-v2:data-loading-service","environment":{"FTP_PASS":"${{secrets.FTP_PASS }}","FTP_SERVER":"${{secrets.FTP_SERVER }}","FTP_USERNAME":"${{secrets.FTP_USERNAME }}","SWIFTLY_AUTH_KEY_BUS":"${{secrets.SWIFTLY_AUTH_KEY_BUS}}","SWIFTLY_AUTH_KEY_RAIL":"${{secrets.SWIFTLY_AUTH_KEY_RAIL}}","API_DB_URI":"${{secrets.API_DB_URI}}","HASH_KEY":"${{secrets.HASH_KEY}}","HASHING_ALGORITHM":"${{secrets.HASHING_ALGORITHM}}","LOGZIO_TOKEN":"${{secrets.LOGZIO_TOKEN}}","LOGZIO_URL":"https://listener.logz.io:8071","RUNNING_ENV":"prod","MAIL_USERNAME":"${{secrets.MAIL_USERNAME}}","MAIL_PASSWORD":"${{secrets.MAIL_PASSWORD}}","MAIL_SERVER":"${{secrets.MAIL_SERVER}}"}}}' --public-endpoint '{"containerName": "fastapi","containerPort": 80,"healthCheck":{ "healthyThreshold": 2,"unhealthyThreshold": 2,"timeoutSeconds": 2,"intervalSeconds": 5,"path": "/","successCodes": "200-499"}}'
--containers '{"redis": {"image": "redis:latest", "environment": {}, "ports": {"6379": "HTTP"}}, "prometheus": {"image": "prom/prometheus:latest", "environment": {}, "ports": {"9090": "HTTP"}}, "grafana": {"image": "grafana/grafana:latest", "environment": {}, "ports": {"3000": "HTTP"}}, "data-loading-service": {"image":"lacmta/metro-api-v2:data-loading-service", "environment": {"FTP_SERVER": "${{secrets.FTP_SERVER}}", "FTP_USERNAME": "${{secrets.FTP_USERNAME}}", "FTP_PASS": "${{secrets.FTP_PASS}}", "SWIFTLY_AUTH_KEY_BUS": "${{secrets.SWIFTLY_AUTH_KEY_BUS}}", "SWIFTLY_AUTH_KEY_RAIL": "${{secrets.SWIFTLY_AUTH_KEY_RAIL}}", "AWS_ACCESS_KEY_ID": "${{secrets.AWS_ACCESS_KEY_ID}}", "ACCESS_SECRET_KEY": "${{secrets.ACCESS_SECRET_KEY}}", "SWIFTLY_AUTH_KEY": "${{secrets.SWIFTLY_AUTH_KEY}}", "API_DB_URI": "${{secrets.API_DB_URI}}", "HASH_KEY": "${{secrets.HASH_KEY}}", "HASHING_ALGORITHM": "${{secrets.HASHING_ALGORITHM}}", "LOGZIO_TOKEN": "${{secrets.LOGZIO_TOKEN}}", "LOGZIO_URL": "${{secrets.LOGZIO_URL}}", "RUNNING_ENV": "prod"}}, "fastapi": {"image":"lacmta/metro-api-v2:fastapi", "environment": {"FTP_SERVER": "${{secrets.FTP_SERVER}}", "FTP_USERNAME": "${{secrets.FTP_USERNAME}}", "FTP_PASS": "${{secrets.FTP_PASS}}", "SWIFTLY_AUTH_KEY_BUS": "${{secrets.SWIFTLY_AUTH_KEY_BUS}}", "SWIFTLY_AUTH_KEY_RAIL": "${{secrets.SWIFTLY_AUTH_KEY_RAIL}}", "AWS_ACCESS_KEY_ID": "${{secrets.AWS_ACCESS_KEY_ID}}", "ACCESS_SECRET_KEY": "${{secrets.ACCESS_SECRET_KEY}}", "SWIFTLY_AUTH_KEY": "${{secrets.SWIFTLY_AUTH_KEY}}", "API_DB_URI": "${{secrets.API_DB_URI}}", "HASH_KEY": "${{secrets.HASH_KEY}}", "HASHING_ALGORITHM": "${{secrets.HASHING_ALGORITHM}}", "LOGZIO_TOKEN": "${{secrets.LOGZIO_TOKEN}}", "LOGZIO_URL": "https://listener.logz.io:8071", "RUNNING_ENV": "prod"}, "ports": {"80": "HTTP"}}}' --public-endpoint '{"containerName": "fastapi","containerPort": 80,"healthCheck":{ "healthyThreshold": 2,"unhealthyThreshold": 2,"timeoutSeconds": 2,"intervalSeconds": 5,"path": "/","successCodes": "200-499"}}'
deploy-documentation:
runs-on: ubuntu-latest
name: Deploy Documentation to GitHub Pages
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -16,3 +16,4 @@ notebooks/**.csv
notebooks/**.zip
notebooks/input
"version.txt"
aws-lightsail-container.json
19 changes: 19 additions & 0 deletions convert-docker-compose-to-aws-lightsail-container.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import yaml
import json

with open('docker-compose.yml', 'r') as file:
docker_compose = yaml.safe_load(file)

containers = {}

for service, config in docker_compose['services'].items():
containers[service] = {
'image': config['image'],
'environment': config.get('environment', {}),
}

if 'ports' in config:
containers[service]['ports'] = {config['ports'][0].split(':')[0]: 'HTTP'}

with open('aws-lightsail-container.json', 'w') as outfile:
json.dump(containers, outfile)
23 changes: 16 additions & 7 deletions data-loading-service/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,24 +1,33 @@
# syntax=docker/dockerfile:1.3
FROM python:3.11

FROM python:3.9-slim-buster
# 2161.3s
#
# WORKDIR /code
WORKDIR /app

#
COPY ./requirements.txt requirements.txt
COPY requirements.txt .
# ARG FTP_SERVER
# COPY use-secret.sh .
# RUN --mount=type=secret,id=ftp_server ./use-secret.sh
# RUN --mount=type=secret,id=ftp_server ./use_secret.sh
# RUN pip install --upgrade cython

RUN apt-get update && apt-get install -y \
libgeos-dev
gcc \
git \
tk \
musl-dev \
linux-headers-amd64 \
postgresql \
python3-dev \
proj-bin \
libgeos-dev \
&& rm -rf /var/lib/apt/lists/* \
&& pip install --no-cache-dir -r requirements.txt

RUN pip install --no-cache-dir --upgrade -r requirements.txt
#

COPY ./app /app
COPY . .
# COPY ../appdata /appdata
# COPY .git /code/.git
#
Expand Down
2 changes: 1 addition & 1 deletion data-loading-service/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ requests
python-dotenv
# pytz
# python-multipart
psycopg2
psycopg2-binary
schedule
geopandas
GeoAlchemy2
Expand Down
29 changes: 18 additions & 11 deletions fastapi/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,26 +1,33 @@
# syntax=docker/dockerfile:1.3
FROM python:3.11
FROM python:3.9-slim-buster

WORKDIR /app

#
# WORKDIR /code
COPY requirements.txt .

#
COPY ./requirements.txt requirements.txt
# ARG FTP_SERVER
# COPY use-secret.sh .
# RUN --mount=type=secret,id=ftp_server ./use-secret.sh
# RUN --mount=type=secret,id=ftp_server ./use_secret.sh

RUN apt-get update && apt-get install -y \
libgeos-dev


RUN pip install --no-cache-dir --upgrade -r requirements.txt
#



COPY ./app /app
RUN apt-get update && apt-get install -y \
gcc \
git \
tk \
musl-dev \
linux-headers-amd64 \
postgresql \
python3-dev \
proj-bin \
libgeos-dev \
&& rm -rf /var/lib/apt/lists/* \
&& pip install --no-cache-dir -r requirements.txt

COPY . .
# COPY ../appdata ../appdata
# COPY .git /code/.git
# VOLUME /appdata
Expand Down
35 changes: 34 additions & 1 deletion fastapi/app/crud.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from shapely.geometry import Point
from shapely import geometry as geo
# from shapely import to_geojson
from app import gtfs_models
# from app import gtfs_models

from . import models, schemas,gtfs_models
from .config import Config
Expand All @@ -31,6 +31,20 @@
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token")

import aioredis

from sqlalchemy.orm import Session
from sqlalchemy import distinct

redis = aioredis.from_url("redis://localhost:6379", decode_responses=True, encoding='utf-8', socket_connect_timeout=5)

def get_all_data(db: Session, model, agency_id):
print(db.query(model).filter(model.agency_id == agency_id).all())
return db.query(model).filter(model.agency_id == agency_id).all()

def get_unique_keys(db: Session, model, key_column, agency_id):
return db.query(distinct(model.__dict__[key_column])).filter(model.agency_id == agency_id).all()

# stop_times utils
def get_stop_times_by_route_code(db, route_code: str,agency_id: str):
if route_code == 'list':
Expand Down Expand Up @@ -352,6 +366,25 @@ async def get_gtfs_rt_line_detail_updates_for_route_code(session,route_code: str
else:
yield result

async def get_gtfs_rt_vehicle_positions_trip_data_redis(db, vehicle_id: str):
# Create a unique key for this vehicle_id
key = f'vehicle:{vehicle_id}'

# Try to get data from Redis
data = await redis.get(key)

if data is None:
# If data is not in Redis, get it from the database
result = db.query(gtfs_models.VehiclePosition).filter(gtfs_models.VehiclePosition.vehicle_id == vehicle_id).all()

if not result:
return None

# Convert the result to JSON and store it in Redis
data = json.dumps([dict(row) for row in result])
await redis.set(key, data)

return data

def get_gtfs_rt_vehicle_positions_trip_data(db,vehicle_id: str,geojson:bool,agency_id: str):
result = []
Expand Down
Loading

0 comments on commit e46e2d0

Please sign in to comment.