From 9ebe806381a7f0d2cdd2f9d02d0c3e1c2ff01168 Mon Sep 17 00:00:00 2001 From: Aaron Gundel Date: Wed, 23 Oct 2024 09:14:28 -0600 Subject: [PATCH] Adds test deployment --- .github/workflows/build-ci-container.yml | 81 +++++++ arches_lingo/settings.py | 147 ++++++++--- docker/ca-shpo-online-supervisor.conf | 24 ++ docker/conf.d/ca-shpo-online-celerybeat.conf | 22 ++ docker/conf.d/ca-shpo-online-celeryd.conf | 26 ++ .../task-definition-reset-database.json | 91 +++++++ docker/deploy/task-definition.json | 88 +++++++ docker/entrypoint.sh | 229 ++++++++++++++++++ docker/env_file.env | 24 ++ docker/nginx/default.conf | 45 ++++ docker/production/Dockerfile | 82 +++++++ docker/production/entrypoint.sh | 229 ++++++++++++++++++ docker/production/env_file.env | 31 +++ docker/settings_docker.py | 64 +++++ docker/settings_local.py | 3 + docker/sql_env.env | 2 + docker/webpack/Dockerfile | 26 ++ docker/webpack/env_file.env | 33 +++ 18 files changed, 1207 insertions(+), 40 deletions(-) create mode 100644 .github/workflows/build-ci-container.yml create mode 100644 docker/ca-shpo-online-supervisor.conf create mode 100644 docker/conf.d/ca-shpo-online-celerybeat.conf create mode 100644 docker/conf.d/ca-shpo-online-celeryd.conf create mode 100644 docker/deploy/task-definition-reset-database.json create mode 100644 docker/deploy/task-definition.json create mode 100644 docker/entrypoint.sh create mode 100644 docker/env_file.env create mode 100644 docker/nginx/default.conf create mode 100644 docker/production/Dockerfile create mode 100644 docker/production/entrypoint.sh create mode 100644 docker/production/env_file.env create mode 100644 docker/settings_docker.py create mode 100644 docker/settings_local.py create mode 100644 docker/sql_env.env create mode 100644 docker/webpack/Dockerfile create mode 100644 docker/webpack/env_file.env diff --git a/.github/workflows/build-ci-container.yml b/.github/workflows/build-ci-container.yml new file mode 100644 index 00000000..8989ad66 --- /dev/null +++ b/.github/workflows/build-ci-container.yml @@ -0,0 +1,81 @@ +name: Build Container Image + +on: + push: + branches: + - "test/**" + - "deploy" + repository_dispatch: + type: + - deploy_project +jobs: + build: + name: Build Docker Image and Push + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + path: arches-lingo + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-west-1 + + - name: Login to Amazon ECR + id: login-ecr + uses: aws-actions/amazon-ecr-login@v2 + + - name: Build, tag, and push image to Amazon ECR + id: build-image + env: + ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} + ECR_REPOSITORY: lingo-repository + IMAGE_TAG: ${{ github.sha }} + run: | + # Build a docker container and + # push it to ECR so that it can + # be deployed to ECS. + docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG -f arches-lingo/docker/production/Dockerfile ./arches-lingo + docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG + echo "::set-output name=image::$ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG" + + - name: Fill in the new image ID in the Amazon ECS task definition for service + id: task-def + uses: aws-actions/amazon-ecs-render-task-definition@v1 + with: + task-definition: arches-lingo/docker/deploy/task-definition.json + container-name: arches + image: ${{ steps.build-image.outputs.image }} + + - name: Fill in the new image ID in the Amazon ECS task definition to reset db + id: task-def-run-reset + uses: aws-actions/amazon-ecs-render-task-definition@v1 + with: + task-definition: arches-lingo/docker/deploy/task-definition-reset-database.json + container-name: arches + image: ${{ steps.build-image.outputs.image }} + + - name: Deploy Reset Amazon ECS task definition to reset db + uses: aws-actions/amazon-ecs-deploy-task-definition@v1 + id: task-def-deploy-reset + with: + task-definition: ${{ steps.task-def-run-reset.outputs.task-definition }} + cluster: lingo-cluster + + - name: Reset database + id: run-reset-task + run: | + # Build a docker container and + # push it to ECR so that it can + # be deployed to ECS. + aws ecs run-task --cluster lingo-cluster --task-definition ${{ steps.task-def-deploy-reset.outputs.task-definition-arn }} --count 1 --launch-type FARGATE --network-configuration "awsvpcConfiguration={subnets=['subnet-0d66c5e9e3c174519','subnet-0ad21fa3dbc479860'],securityGroups=['sg-04ca486cc6239259b']}" + + - name: Deploy Amazon ECS task definition to arches service + uses: aws-actions/amazon-ecs-deploy-task-definition@v1 + with: + task-definition: ${{ steps.task-def.outputs.task-definition }} + service: lingo-arches-service + cluster: lingo-cluster diff --git a/arches_lingo/settings.py b/arches_lingo/settings.py index efff3068..068cefce 100644 --- a/arches_lingo/settings.py +++ b/arches_lingo/settings.py @@ -9,6 +9,7 @@ import inspect import semantic_version from datetime import datetime, timedelta +from django.core.exceptions import ImproperlyConfigured from django.utils.translation import gettext_lazy as _ try: @@ -16,9 +17,94 @@ except ImportError: pass + +def get_env_variable(var_name): + msg = "Set the %s environment variable" + try: + return os.environ[var_name] + except KeyError: + error_msg = msg % var_name + raise ImproperlyConfigured(error_msg) + + +def get_optional_env_variable(var_name, default=None) -> str: + try: + return os.environ[var_name] + except KeyError: + return default + + APP_NAME = "arches_lingo" -APP_VERSION = semantic_version.Version(major=0, minor=0, patch=0) +SECRETS_MODE = get_optional_env_variable("ARCHES_SECRETS_MODE", "ENV") + APP_ROOT = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) +DB_NAME = get_optional_env_variable("ARCHES_DB_NAME", APP_NAME) +DB_USER = get_optional_env_variable("ARCHES_PGUSERNAME", "postgres") +DB_PASSWORD = get_optional_env_variable("ARCHES_PGPASSWORD", "postgis") +DB_HOST = get_optional_env_variable("ARCHES_PGHOST", "localhost") +DB_PORT = get_optional_env_variable("ARCHES_PGPORT", "5432") +ES_USER = get_optional_env_variable("ARCHES_ESUSER", "elastic") +ES_PASSWORD = get_optional_env_variable("ARCHES_ESPASSWORD", "E1asticSearchforArche5") +ES_HOST = get_optional_env_variable("ARCHES_ESHOST", "localhost") +ES_PORT = int(get_optional_env_variable("ARCHES_ESPORT", "9200")) +WEBPACK_DEVELOPMENT_SERVER_PORT = int(get_optional_env_variable("ARCHES_WEBPACKDEVELOPMENTSERVERPORT", "8022")) +ES_PROTOCOL = get_optional_env_variable("ARCHES_ESPROTOCOL", "http") +ES_VALIDATE_CERT = get_optional_env_variable("ARCHES_ESVALIDATE", "True") == "True" +DEBUG = bool(get_optional_env_variable("ARCHES_DJANGO_DEBUG", False)) +KIBANA_URL = get_optional_env_variable("ARCHES_KIBANA_URL", "http://localhost:5601/") +KIBANA_CONFIG_BASEPATH = get_optional_env_variable("ARCHES_KIBANACONFIGBASEPATH", "kibana") +RESOURCE_IMPORT_LOG = get_optional_env_variable("ARCHES_RESOURCEIMPORTLOG", os.path.join(APP_ROOT, "logs", "resource_import.log")) +ARCHES_LOG_PATH = get_optional_env_variable("ARCHES_LOGPATH", os.path.join(ROOT_DIR, "arches.log")) + +STORAGE_BACKEND = get_optional_env_variable("ARCHES_STORAGEBACKEND", "django.core.files.storage.FileSystemStorage") + +if STORAGE_BACKEND == "storages.backends.s3.S3Storage": + import psutil + + STORAGE_OPTIONS = { + "bucket_name": get_env_variable("ARCHES_S3BUCKETNAME"), + "file_overwrite": get_optional_env_variable("ARCHES_S3FILEOVERWRITE", "True") == "True", + "signature_version": get_optional_env_variable("ARCHES_S3SIGNATUREVERSION", "s3v4"), + "region": get_optional_env_variable("ARCHES_S3REGION", "us-west-1"), + "max_memory_size": get_optional_env_variable("ARCHES_S3MAXMEMORY", str(psutil.virtual_memory().available * 0.5)), + } +else: + STORAGE_OPTIONS = {} + +STORAGES = { + "default": { + "BACKEND": STORAGE_BACKEND, + "OPTIONS": STORAGE_OPTIONS, + }, + "staticfiles": { + "BACKEND": "django.contrib.staticfiles.storage.StaticFilesStorage", + }, +} + +if SECRETS_MODE == "AWS": + try: + import boto3 + import json + + AWS_REGION = get_optional_env_variable("ARCHES_AWS_REGION", "us-west-1") + ES_SECRET_ID = get_env_variable("ARCHES_ES_SECRET_ID") + DB_SECRET_ID = get_env_variable("ARCHES_DB_SECRET_ID") + client = boto3.client("secretsmanager", region_name=AWS_REGION) + es_secret = json.loads(client.get_secret_value(SecretId=ES_SECRET_ID)["SecretString"]) + db_secret = json.loads(client.get_secret_value(SecretId=DB_SECRET_ID)["SecretString"]) + DB_NAME = APP_NAME + DB_USER = db_secret["username"] + DB_PASSWORD = db_secret["password"] + DB_HOST = db_secret["host"] + DB_PORT = db_secret["port"] + ES_USER = es_secret["user"] + ES_PASSWORD = es_secret["password"] + ES_HOST = es_secret["host"] + except (ModuleNotFoundError, ImportError): + pass + + +APP_VERSION = semantic_version.Version(major=0, minor=0, patch=0) WEBPACK_LOADER = { @@ -54,18 +140,19 @@ UPLOADED_FILES_DIR = "uploadedfiles" # SECURITY WARNING: keep the secret key used in production secret! -SECRET_KEY = "--+c7*txnosqv=flep00qp+=t-xhrj%f4==r8w*n_7pm@mi%)7" +SECRET_KEY = get_optional_env_variable("ARCHES_SECRET_KEY", "--+c7*txnosqv=flep00qp+=t-xhrj%f4==r8w*n_7pm@mi%)7") # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ROOT_URLCONF = "arches_lingo.urls" +ELASTICSEARCH_HOSTS = [{"scheme": ES_PROTOCOL, "host": ES_HOST, "port": ES_PORT}] # Modify this line as needed for your project to connect to elasticsearch with a password that you generate ELASTICSEARCH_CONNECTION_OPTIONS = { - "request_timeout": 30, - "verify_certs": False, - "basic_auth": ("elastic", "E1asticSearchforArche5"), + "timeout": 30, + "verify_certs": ES_VALIDATE_CERT, + "basic_auth": (ES_USER, ES_PASSWORD), } # If you need to connect to Elasticsearch via an API key instead of username/password, use the syntax below: @@ -81,18 +168,9 @@ # Or Kibana: https://www.elastic.co/guide/en/kibana/current/api-keys.html # a prefix to append to all elasticsearch indexes, note: must be lower case -ELASTICSEARCH_PREFIX = "arches_lingo" +ELASTICSEARCH_PREFIX = get_optional_env_variable("ARCHES_ES_INDEX_PREFIX", APP_NAME) ELASTICSEARCH_CUSTOM_INDEXES = [] -# [{ -# 'module': 'arches_lingo.search_indexes.sample_index.SampleIndex', -# 'name': 'my_new_custom_index', <-- follow ES index naming rules -# 'should_update_asynchronously': False <-- denotes if asynchronously updating the index would affect custom functionality within the project. -# }] - -KIBANA_URL = "http://localhost:5601/" -KIBANA_CONFIG_BASEPATH = "kibana" # must match Kibana config.yml setting (server.basePath) but without the leading slash, -# also make sure to set server.rewriteBasePath: true LOAD_DEFAULT_ONTOLOGY = False LOAD_PACKAGE_ONTOLOGIES = True @@ -100,7 +178,8 @@ # This is the namespace to use for export of data (for RDF/XML for example) # It must point to the url where you host your site # Make sure to use a trailing slash -ARCHES_NAMESPACE_FOR_DATA_EXPORT = "http://localhost:8000/" +PUBLIC_SERVER_ADDRESS = get_optional_env_variable("ARCHES_PUBLIC_SERVER_ADDRESS", "http://localhost:8000/") +ARCHES_NAMESPACE_FOR_DATA_EXPORT = get_optional_env_variable("ARCHES_NAMESPACE_FOR_DATA_EXPORT", PUBLIC_SERVER_ADDRESS) DATABASES = { "default": { @@ -108,17 +187,17 @@ "AUTOCOMMIT": True, "CONN_MAX_AGE": 0, "ENGINE": "django.contrib.gis.db.backends.postgis", - "HOST": "localhost", - "NAME": "arches_lingo", "OPTIONS": { "options": "-c cursor_tuple_fraction=1", }, - "PASSWORD": "postgis", - "PORT": "5432", + "HOST": DB_HOST, + "NAME": DB_NAME, + "PASSWORD": DB_PASSWORD, + "PORT": DB_PORT, "POSTGIS_TEMPLATE": "template_postgis", "TEST": {"CHARSET": None, "COLLATION": None, "MIRROR": None, "NAME": None}, "TIME_ZONE": None, - "USER": "postgres", + "USER": DB_USER, } } @@ -171,13 +250,9 @@ # "silk.middleware.SilkyMiddleware", ] -MIDDLEWARE.insert( # this must resolve to first MIDDLEWARE entry - 0, "django_hosts.middleware.HostsRequestMiddleware" -) +MIDDLEWARE.insert(0, "django_hosts.middleware.HostsRequestMiddleware") # this must resolve to first MIDDLEWARE entry -MIDDLEWARE.append( # this must resolve last MIDDLEWARE entry - "django_hosts.middleware.HostsResponseMiddleware" -) +MIDDLEWARE.append("django_hosts.middleware.HostsResponseMiddleware") # this must resolve last MIDDLEWARE entry STATICFILES_DIRS = build_staticfiles_dirs(app_root=APP_ROOT) @@ -186,11 +261,9 @@ app_root=APP_ROOT, ) -ALLOWED_HOSTS = [] +ALLOWED_HOSTS = get_optional_env_variable("ARCHES_ALLOWED_HOSTS", "*").split(',') -SYSTEM_SETTINGS_LOCAL_PATH = os.path.join( - APP_ROOT, "system_settings", "System_Settings.json" -) +SYSTEM_SETTINGS_LOCAL_PATH = os.path.join(APP_ROOT, "system_settings", "System_Settings.json") WSGI_APPLICATION = "arches_lingo.wsgi.application" # URL that handles the media served from MEDIA_ROOT, used for managing stored files. @@ -277,9 +350,7 @@ BYPASS_UNIQUE_CONSTRAINT_TILE_VALIDATION = False BYPASS_REQUIRED_VALUE_TILE_VALIDATION = False -DATE_IMPORT_EXPORT_FORMAT = ( - "%Y-%m-%d" # Custom date format for dates imported from and exported to csv -) +DATE_IMPORT_EXPORT_FORMAT = "%Y-%m-%d" # Custom date format for dates imported from and exported to csv # This is used to indicate whether the data in the CSV and SHP exports should be # ordered as seen in the resource cards or not. @@ -319,9 +390,7 @@ CELERY_BROKER_URL = "" # RabbitMQ --> "amqp://guest:guest@localhost", Redis --> "redis://localhost:6379/0" CELERY_ACCEPT_CONTENT = ["json"] -CELERY_RESULT_BACKEND = ( - "django-db" # Use 'django-cache' if you want to use your cache as your backend -) +CELERY_RESULT_BACKEND = "django-db" # Use 'django-cache' if you want to use your cache as your backend CELERY_TASK_SERIALIZER = "json" @@ -387,9 +456,7 @@ # Dictionary containing any additional context items for customising email templates EXTRA_EMAIL_CONTEXT = { "salutation": _("Hi"), - "expiration": ( - datetime.now() + timedelta(seconds=CELERY_SEARCH_EXPORT_EXPIRES) - ).strftime("%A, %d %B %Y"), + "expiration": (datetime.now() + timedelta(seconds=CELERY_SEARCH_EXPORT_EXPIRES)).strftime("%A, %d %B %Y"), } # see https://docs.djangoproject.com/en/1.9/topics/i18n/translation/#how-django-discovers-language-preference diff --git a/docker/ca-shpo-online-supervisor.conf b/docker/ca-shpo-online-supervisor.conf new file mode 100644 index 00000000..a7eae1db --- /dev/null +++ b/docker/ca-shpo-online-supervisor.conf @@ -0,0 +1,24 @@ +[unix_http_server] + file=/tmp/supervisor.sock ; path to your socket file + chmod=7770 + +[supervisord] + logfile=/var/log/supervisor/supervisord.log ; supervisord log file + logfile_maxbytes=50MB ; maximum size of logfile before rotation + logfile_backups=10 ; number of backed up logfiles + loglevel=info ; info, debug, warn, trace + pidfile=/var/run/supervisord.pid ; pidfile location + nodaemon=false ; run supervisord as a daemon + minfds=1024 ; number of startup file descriptors + minprocs=200 ; number of process descriptors + user=root ; defaults to whichever user is runs supervisor + childlogdir=/var/log/supervisor/ ; where child log files will live + +[rpcinterface:supervisor] + supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface + +[supervisorctl] + serverurl=unix:///tmp/supervisor.sock ; use unix:// schem for a unix sockets. + +[include] + files=./conf.d/arches-lingo-celeryd.conf ./conf.d/arches-lingo-celerybeat.conf diff --git a/docker/conf.d/ca-shpo-online-celerybeat.conf b/docker/conf.d/ca-shpo-online-celerybeat.conf new file mode 100644 index 00000000..8b6839ca --- /dev/null +++ b/docker/conf.d/ca-shpo-online-celerybeat.conf @@ -0,0 +1,22 @@ +; ================================ +; celery beat supervisor +; ================================ + +[program:celerybeat] + command=python3 -m celery -A arches_vgm.celery beat --loglevel=INFO + directory=/web_root/arches-vgm + + user=root + numprocs=1 + stdout_logfile=/var/log/celery/beat.log + stderr_logfile=/var/log/celery/beat.log + autostart=true + autorestart=true + startsecs=10 + + ; Causes supervisor to send the termination signal (SIGTERM) to the whole process group. + stopasgroup=true + + ; if rabbitmq is supervised, set its priority higher + ; so it starts first + priority=999 diff --git a/docker/conf.d/ca-shpo-online-celeryd.conf b/docker/conf.d/ca-shpo-online-celeryd.conf new file mode 100644 index 00000000..5a66a3fa --- /dev/null +++ b/docker/conf.d/ca-shpo-online-celeryd.conf @@ -0,0 +1,26 @@ +; ================================== +; celery worker supervisor +; ================================== + +[program:celery] + command=python3 -m celery -A arches_lingo.celery worker --loglevel=INFO + directory=/web_root/arches-lingo + + user=root + numprocs=1 + stdout_logfile=/var/log/celery/worker.log + stderr_logfile=/var/log/celery/worker.log + autostart=true + autorestart=true + startsecs=10 + + ; Need to wait for currently executing tasks to finish at shutdown. + ; Increase this if you have very long running tasks. + stopwaitsecs = 600 + + ; Causes supervisor to send the termination signal (SIGTERM) to the whole process group. + stopasgroup=true + + ; Set Celery priority higher than default (999) + ; so, if rabbitmq is supervised, it will start first. + priority=1000 diff --git a/docker/deploy/task-definition-reset-database.json b/docker/deploy/task-definition-reset-database.json new file mode 100644 index 00000000..70a208eb --- /dev/null +++ b/docker/deploy/task-definition-reset-database.json @@ -0,0 +1,91 @@ +{ + "requiresCompatibilities": [ + "FARGATE" + ], + "inferenceAccelerators": [], + "containerDefinitions": [ + { + "name": "arches", + "image": "ecs-devops-sandbox-repository:00000", + "resourceRequirements": null, + "essential": true, + "portMappings": [ + { + "containerPort": "443", + "protocol": "tcp" + } + ], + "healthCheck": { + "command": [ + "CMD-SHELL", + "test $(echo | ps aux | grep -c manage.py) > 1 || exit 1" + ] + }, + "command": [ + "reset_database" + ], + "environment": [ + { + "name": "ARCHES_ES_SECRET_ID", + "value": "prod/lingo/elastic" + }, + { + "name": "ARCHES_DB_SECRET_ID", + "value": "archesDbSecretC996653C-9RvtCl4Ud1ud" + }, + { + "name": "ARCHES_PUBLIC_SERVER_ADDRESS", + "value": "https://lingo.dev.fargeo.com" + }, + { + "name": "ARCHES_SECRETS_MODE", + "value": "AWS" + }, + { + "name": "ARCHES_S3FILEOVERWRITE", + "value": "False" + }, + { + "name": "ARCHES_ALLOWED_HOSTS", + "value": "*" + }, + { + "name": "ARCHES_ESPROTOCOL", + "value": "https" + }, + { + "name": "ARCHES_ESVALIDATE", + "value": "False" + }, + { + "name": "ARCHES_PROJECT", + "value": "arches-lingo" + }, + { + "name": "ARCHES_STORAGEBACKEND", + "value": "storages.backends.s3.S3Storage" + }, + { + "name": "ARCHES_S3BUCKETNAME", + "value": "fargeo-lingo-media-files" + } + ], + "logConfiguration": { + "logDriver": "awslogs", + "options": { + "awslogs-stream-prefix": "arches", + "awslogs-group": "lingo-base-infrastructure-lingocontainerloggroup79B923D7-o5bIiMZjbylI", + "awslogs-region": "us-west-1" + } + } + } + ], + "volumes": [], + "networkMode": "awsvpc", + "memory": "8192", + "cpu": "4096", + "executionRoleArn": "arn:aws:iam::889276910795:role/arches-execution-role-lingo", + "family": "reset-database-container-lingo", + "taskRoleArn": "arn:aws:iam::889276910795:role/arches-task-role-lingo", + "placementConstraints": [] +} \ No newline at end of file diff --git a/docker/deploy/task-definition.json b/docker/deploy/task-definition.json new file mode 100644 index 00000000..5a94ec76 --- /dev/null +++ b/docker/deploy/task-definition.json @@ -0,0 +1,88 @@ +{ + "requiresCompatibilities": [ + "FARGATE" + ], + "inferenceAccelerators": [], + "containerDefinitions": [ + { + "name": "arches", + "image": "ecs-devops-sandbox-repository:00000", + "resourceRequirements": null, + "essential": true, + "portMappings": [ + { + "containerPort": "443", + "protocol": "tcp" + } + ], + "environment": [ + { + "name": "ARCHES_ES_SECRET_ID", + "value": "prod/lingo/elastic" + }, + { + "name": "ARCHES_DB_SECRET_ID", + "value": "archesDbSecretC996653C-9RvtCl4Ud1ud" + }, + { + "name": "ARCHES_PUBLIC_SERVER_ADDRESS", + "value": "https://lingo.dev.fargeo.com" + }, + { + "name": "ARCHES_SECRETS_MODE", + "value": "AWS" + }, + { + "name": "ARCHES_ALLOWED_HOSTS", + "value": "*" + }, + { + "name": "ARCHES_ESVALIDATE", + "value": "False" + }, + { + "name": "ARCHES_PROJECT", + "value": "arches-lingo" + }, + { + "name": "ARCHES_ESPROTOCOL", + "value": "https" + }, + { + "name": "ARCHES_S3FILEOVERWRITE", + "value": "False" + }, + { + "name": "ARCHES_STORAGEBACKEND", + "value": "storages.backends.s3.S3Storage" + }, + { + "name": "ARCHES_S3BUCKETNAME", + "value": "fargeo-1-lingo-media-files" + } + ], + "healthCheck": { + "command": [ + "CMD-SHELL", + "curl --insecure -f https://localhost/ || exit 1" + ] + }, + "logConfiguration": { + "logDriver": "awslogs", + "options": { + "awslogs-stream-prefix": "arches", + "awslogs-group": "lingo-base-infrastructure-lingocontainerloggroup79B923D7-o5bIiMZjbylI", + "awslogs-region": "us-west-1" + } + } + } + ], + "volumes": [], + "networkMode": "awsvpc", + "memory": "8192", + "cpu": "4096", + "executionRoleArn": "arn:aws:iam::889276910795:role/arches-execution-role-lingo", + "family": "lingobaseinfrastructureesArchesTaskDefinition127C8007", + "taskRoleArn": "arn:aws:iam::889276910795:role/arches-task-role-lingo", + "placementConstraints": [] +} \ No newline at end of file diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh new file mode 100644 index 00000000..34f10c3a --- /dev/null +++ b/docker/entrypoint.sh @@ -0,0 +1,229 @@ +#!/bin/bash + +# APP and npm folder locations +# ${WEB_ROOT} and ${ARCHES_ROOT} is defined in the Dockerfile, ${ARCHES_PROJECT} in env_file.env +if [[ -z ${ARCHES_PROJECT} ]]; then + APP_FOLDER=${ARCHES_ROOT} + PACKAGE_JSON_FOLDER=${ARCHES_ROOT} +else + APP_FOLDER=${WEB_ROOT}/${ARCHES_PROJECT_ROOT_DIRECTORY} + PACKAGE_JSON_FOLDER=${ARCHES_ROOT} +fi + +# npm_MODULES_FOLDER=${PACKAGE_JSON_FOLDER}/$(awk \ +# -F '--install.modules-folder' '{print $2}' ${PACKAGE_JSON_FOLDER}/.npmrc \ +# | awk '{print $1}' \ +# | tr -d $'\r' \ +# | tr -d '"' \ +# | sed -e "s/^\.\///g") + +# Environmental Variables +export DJANGO_PORT=${DJANGO_PORT:-8000} + +#Utility functions that check db status +wait_for_db() { + echo "Testing if database server is up..." + while [[ ! ${return_code} == 0 ]] + do + psql --host=${PGHOST} --port=${PGPORT} --user=${PGUSERNAME} --dbname=postgres -c "select 1" >&/dev/null + return_code=$? + sleep 1 + done + echo "Database server is up" + + echo "Testing if Elasticsearch is up..." + while [[ ! ${return_code} == 0 ]] + do + curl -s "http://${ESHOST}:${ESPORT}/_cluster/health?wait_for_status=green&timeout=60s" >&/dev/null + return_code=$? + sleep 1 + done + echo "Elasticsearch is up" +} + +db_exists() { + echo "Checking if database "${PGDBNAME}" exists..." + count=`psql --host=${PGHOST} --port=${PGPORT} --user=${PGUSERNAME} --dbname=postgres -Atc "SELECT COUNT(*) FROM pg_catalog.pg_database WHERE datname='${PGDBNAME}'"` + + # Check if returned value is a number and not some error message + re='^[0-9]+$' + if ! [[ ${count} =~ $re ]] ; then + echo "Error: Something went wrong when checking if database "${PGDBNAME}" exists..." >&2; + echo "Exiting..." + exit 1 + fi + + # Return 0 (= true) if database exists + if [[ ${count} > 0 ]]; then + return 0 + else + return 1 + fi +} + +#### Install +init_arches() { + echo "Checking if Arches project "${ARCHES_PROJECT}" exists..." + if [[ ! -d ${APP_FOLDER} ]] || [[ ! "$(ls ${APP_FOLDER})" ]]; then + echo "" + echo "----- Custom Arches project '${ARCHES_PROJECT}' does not exist. -----" + echo "----- Creating '${ARCHES_PROJECT}'... -----" + echo "" + + cd ${WEB_ROOT} + + arches-project create ${ARCHES_PROJECT} + run_setup_db + + exit_code=$? + if [[ ${exit_code} != 0 ]]; then + echo "Something went wrong when creating your Arches project: ${ARCHES_PROJECT}." + echo "Exiting..." + exit ${exit_code} + fi + else + echo "Custom Arches project '${ARCHES_PROJECT}' exists." + wait_for_db + if db_exists; then + echo "Database ${PGDBNAME} already exists." + echo "Skipping Package Loading" + else + echo "Database ${PGDBNAME} does not exists yet." + run_load_package #change to run_load_package if preferred + fi + fi +} + +# npm +# install_npm_components() { +# if [[ ! -d ${npm_MODULES_FOLDER} ]] || [[ ! "$(ls ${npm_MODULES_FOLDER})" ]]; then +# echo "npm modules do not exist, installing..." +# cd ${PACKAGE_JSON_FOLDER} +# npm install +# fi +# } + +#### Misc +copy_settings_local() { + # The settings_local.py in ${ARCHES_ROOT}/arches/ gets ignored if running manage.py from a custom Arches project instead of Arches core app + echo "Copying ${APP_FOLDER}/docker/settings_docker.py to ${APP_FOLDER}/${ARCHES_PROJECT}/settings_docker.py..." + cp ${APP_FOLDER}/docker/settings_docker.py ${APP_FOLDER}/${ARCHES_PROJECT}/settings_docker.py + + # Copy settings_local if it does not exist + cp -n ${APP_FOLDER}/docker/settings_local.py ${APP_FOLDER}/${ARCHES_PROJECT}/settings_local.py +} + +#### Run commands + +start_celery_supervisor() { + cd ${APP_FOLDER} + supervisord -c docker/arches-lingo-supervisor.conf +} + +run_migrations() { + echo "" + echo "----- RUNNING DATABASE MIGRATIONS -----" + echo "" + cd ${APP_FOLDER} + ../ENV/bin/python manage.py migrate +} + +run_setup_db() { + echo "" + echo "----- RUNNING SETUP_DB -----" + echo "" + cd ${APP_FOLDER} + ../ENV/bin/python manage.py setup_db --force +} + +run_load_package() { + echo "" + echo "----- *** LOADING PACKAGE: ${ARCHES_PROJECT} *** -----" + echo "" + cd ${APP_FOLDER} + ../ENV/bin/python manage.py packages -o load_package -s arches_lingo/pkg -db -dev -y +} + +# "exec" means that it will finish building??? +run_django_server() { + echo "" + echo "----- *** RUNNING DJANGO DEVELOPMENT SERVER *** -----" + echo "" + cd ${APP_FOLDER} + echo "Running Django" + exec /bin/bash -c "source ../ENV/bin/activate && pip3 install debugpy -t /tmp && python -Wdefault /tmp/debugpy --listen 0.0.0.0:5678 manage.py runserver 0.0.0.0:${DJANGO_PORT}" +} + +#### Main commands +run_arches() { + init_arches + run_django_server +} + +run_webpack() { + echo "" + echo "----- *** RUNNING WEBPACK DEVELOPMENT SERVER *** -----" + echo "" + cd ${APP_FOLDER} + # echo "Running Webpack" + eval `ssh-agent -s` && cat /run/secrets/ssh_passphrase | SSH_ASKPASS=/bin/cat setsid -w ssh-add 2>> /dev/null + exec /bin/bash -c "source ../ENV/bin/activate && cd /web_root/arches-lingo && npm i && wait-for-it arches-lingo:80 -t 1200 && npm start" +} +### Starting point ### + +# Use -gt 1 to consume two arguments per pass in the loop +# (e.g. each argument has a corresponding value to go with it). +# Use -gt 0 to consume one or more arguments per pass in the loop +# (e.g. some arguments don't have a corresponding value to go with it, such as --help ). + +# If no arguments are supplied, assume the server needs to be run +if [[ $# -eq 0 ]]; then + start_celery_supervisor + wait_for_db + run_arches +fi + +# Else, process arguments +echo "Full command: $@" +while [[ $# -gt 0 ]] +do + key="$1" + echo "Command: ${key}" + + case ${key} in + run_arches) + start_celery_supervisor + copy_settings_local + wait_for_db + run_arches + ;; + setup_arches) + start_celery_supervisor + copy_settings_local + wait_for_db + setup_arches + ;; + run_tests) + copy_settings_local + wait_for_db + run_tests + ;; + run_migrations) + copy_settings_local + wait_for_db + run_migrations + ;; + install_npm_components) + install_npm_components + ;; + help|-h) + display_help + ;; + *) + cd ${APP_FOLDER} + "$@" + exit 0 + ;; + esac + shift # next argument or value +done diff --git a/docker/env_file.env b/docker/env_file.env new file mode 100644 index 00000000..68ccfbe1 --- /dev/null +++ b/docker/env_file.env @@ -0,0 +1,24 @@ +#arches +ARCHES_PROJECT=arches-lingo +ARCHES_PROJECT_ROOT_DIRECTORY=arches-lingo +INSTALL_DEFAULT_GRAPHS=False +INSTALL_DEFAULT_CONCEPTS=False +PGUSERNAME=postgres +PGPASSWORD=postgis +PGDBNAME=$ARCHES_PROJECT +PGHOST=postgres14-3_arches7-0 +PGPORT=5432 +ESHOST=elasticsearch8-3_arches7-0 +ESPORT=9200 +DJANGO_MODE=DEV +DJANGO_DEBUG=True +DOMAIN_NAMES=* +DJANGO_PORT=80 +#DJANGO_REMOTE_DEBUG=False +PYTHONUNBUFFERED=0 +TZ=PST +ELASTICSEARCH_PREFIX=$ARCHES_PROJECT + +#rabbitmq +RABBITMQ_USER=guest +RABBITMQ_PASS=guest \ No newline at end of file diff --git a/docker/nginx/default.conf b/docker/nginx/default.conf new file mode 100644 index 00000000..809398ee --- /dev/null +++ b/docker/nginx/default.conf @@ -0,0 +1,45 @@ +server { + # use 'listen 80 deferred;' for Linux + # use 'listen 80 accept_filter=httpready;' for FreeBSD + listen 443; + client_max_body_size 4G; + + ssl on; + ssl_certificate /etc/ssl/certs/nginx-selfsigned.crt; + ssl_certificate_key /etc/ssl/private/nginx-selfsigned.key; + ssl_session_cache shared:SSL:5m; + ssl_session_timeout 10m; + ssl_protocols TLSv1.1 TLSv1.2; + ssl_prefer_server_ciphers on; + #ssl_dhparam /etc/nginx/certs/dhparams.pem; + # use the line above if you generated a dhparams file + ssl_ciphers 'ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-DSS-AES128-GCM-SHA256:kEDH+AESGCM:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:ECDHE-ECDSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-DSS-AES128-SHA256:DHE-RSA-AES256-SHA256:DHE-DSS-AES256-SHA:DHE-RSA-AES256-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:AES:CAMELLIA:DES-CBC3-SHA:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!aECDH:!EDH-DSS-DES-CBC3-SHA:!EDH-RSA-DES-CBC3-SHA:!KRB5-DES-CBC3-SHA'; + ssl_buffer_size 8k; + keepalive_timeout 1; + + # path for static files + location /static/ { + alias /var/www/media/; + autoindex off; + } + + location / { + # checks for static file, if not found proxy to app + try_files $uri @proxy_to_app; + } + + location @proxy_to_app { + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header Host $host; + # we don't want nginx trying to do something clever with + # redirects, we set the Host: header above already. + proxy_redirect off; + proxy_pass http://localhost:8000; + } + + error_page 500 502 503 504 /500.html; + location = /500.html { + root /path/to/app/current/public; + } +} \ No newline at end of file diff --git a/docker/production/Dockerfile b/docker/production/Dockerfile new file mode 100644 index 00000000..15b07bc2 --- /dev/null +++ b/docker/production/Dockerfile @@ -0,0 +1,82 @@ +FROM public.ecr.aws/l1p7h1f9/archesproject-fargeo:7.6.x-base-prod AS base +ENV APP_ROOT=${WEB_ROOT}/arches-lingo + +WORKDIR ${WEB_ROOT} +RUN apt-get install nginx -y && ENV/bin/pip install gunicorn botocore boto3 django-storages psutil + +COPY docker/nginx/default.conf /etc/nginx/sites-available/default + +RUN apt-get install wget -y && wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | gpg --dearmor -o /usr/share/keyrings/elasticsearch-keyring.gpg \ + && apt-get install apt-transport-https \ + && echo "deb [signed-by=/usr/share/keyrings/elasticsearch-keyring.gpg] https://artifacts.elastic.co/packages/8.x/apt stable main" | tee /etc/apt/sources.list.d/elastic-8.x.list \ + && apt-get update +RUN apt-get install sudo && echo 'Y' | adduser elasticsearch --disabled-password --ingroup "sudo" --gecos "First Last,RoomNumber,WorkPhone,HomePhone" && echo "elasticsearch ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers && su -c "sudo apt-get install elasticsearch" elasticsearch && exit +RUN apt-get install postgresql-contrib-14 postgresql-14-postgis-3 -y && sed -i '1s/^/local all all trust\n/' /etc/postgresql/14/main/pg_hba.conf && (service postgresql start) \ + && echo "ALTER USER postgres WITH PASSWORD 'postgis';CREATE DATABASE template_postgis;CREATE EXTENSION postgis;\\q" | psql -Upostgres + +RUN chown elasticsearch:elasticsearch /etc/elasticsearch -R && \ + chown elasticsearch:elasticsearch /usr/share/elasticsearch -R && \ + chown elasticsearch:elasticsearch /etc/default/elasticsearch + +RUN openssl ecparam -out /etc/ssl/private/nginx-selfsigned.key -name prime256v1 -genkey && openssl req -x509 -nodes -days 365 -newkey rsa:2048 -key /etc/ssl/private/nginx-selfsigned.key -out /etc/ssl/certs/nginx-selfsigned.crt -subj "/C=US/ST=Denial/L=Springfield/O=Dis/CN=localhost" + +COPY . ${WEB_ROOT}/arches-lingo +RUN source ${WEB_ROOT}/ENV/bin/activate && cd ${APP_ROOT} && pip install . + +WORKDIR ${APP_ROOT} + +RUN ESPASSWORD='$(echo "y" | sudo /usr/share/elasticsearch/bin/elasticsearch-reset-password -u elastic | grep "New value:" | cut -d " " -f 3)' && export ES_JAVA_OPTS="-Xmx1g" && \ + sed -i 's/xpack.security.enabled: true/xpack.security.enabled: false/g' /etc/elasticsearch/elasticsearch.yml && \ + sed -i 's/xpack.security.enrollment.enabled: true/xpack.security.enrollment.enabled: false/g' /etc/elasticsearch/elasticsearch.yml && \ + (su -c "/usr/share/elasticsearch/bin/elasticsearch" elasticsearch>/dev/null 2>&1&) && \ + (service postgresql start) && sleep 30 && source ../ENV/bin/activate && (python3 manage.py setup_db --force) + +RUN source ../ENV/bin/activate && service postgresql start && \ + (/etc/init.d/nginx start&) && (gunicorn arches_lingo.wsgi&) && npm i && \ + npm run build_development && python3 manage.py collectstatic + + +FROM public.ecr.aws/l1p7h1f9/archesproject-fargeo:7.6.x-base-prod AS deploy +USER root +ENV DEBIAN_FRONTEND=noninteractive +ENV WEB_ROOT=/web_root +ENV APP_ROOT=${WEB_ROOT}/arches-lingo +ENV BASE_WEB=/web_root +ENV BASE_APP=${BASE_WEB}/arches-lingo +COPY --from=base ${BASE_APP}/arches_lingo/staticfiles /var/www/media +COPY --from=base ${BASE_APP}/webpack/webpack-stats.json ${APP_ROOT}/webpack/webpack-stats.json +COPY --from=base /etc/ssl/private/nginx-selfsigned.key /etc/ssl/private/nginx-selfsigned.key +COPY --from=base /etc/ssl/certs/nginx-selfsigned.crt /etc/ssl/certs/nginx-selfsigned.crt + +RUN chgrp www-data /var/www/media -R +RUN apt-get update && apt-get install -y make software-properties-common && apt-get install -y ca-certificates curl gnupg nginx vim && apt-add-repository ppa:deadsnakes/ppa && apt-get update + +# Root project folder +ENV WHEELS=/wheels +ENV PYTHONUNBUFFERED=1 + +WORKDIR ${WEB_ROOT} + +# Install the Arches application +# FIXME: ADD from github repository instead? + +# From here, run commands from ARCHES_ROOT +RUN source ${WEB_ROOT}/ENV/bin/activate + +COPY docker/production/entrypoint.sh ${WEB_ROOT}/entrypoint.sh +RUN chmod -R 700 ${WEB_ROOT}/entrypoint.sh &&\ + dos2unix ${WEB_ROOT}/entrypoint.sh + +COPY . ${WEB_ROOT}/arches-lingo + +# Set default workdir +WORKDIR ${APP_ROOT} +COPY docker/nginx/default.conf /etc/nginx/sites-available/default +RUN source ${WEB_ROOT}/ENV/bin/activate && pip install . && pip install gunicorn boto boto3 django-storages psutil + +# # Set entrypoint +ENTRYPOINT ["../entrypoint.sh"] +CMD ["run_gunicorn"] + +# Expose port 8000 +EXPOSE 8000 diff --git a/docker/production/entrypoint.sh b/docker/production/entrypoint.sh new file mode 100644 index 00000000..810916c9 --- /dev/null +++ b/docker/production/entrypoint.sh @@ -0,0 +1,229 @@ +#!/bin/bash + +# APP and npm folder locations +# ${WEB_ROOT} and ${ARCHES_ROOT} is defined in the Dockerfile, ${ARCHES_PROJECT} in env_file.env +if [[ -z ${ARCHES_PROJECT} ]]; then + APP_FOLDER=${ARCHES_ROOT} + PACKAGE_JSON_FOLDER=${ARCHES_ROOT} +else + APP_FOLDER=${WEB_ROOT}/${ARCHES_PROJECT} + PACKAGE_JSON_FOLDER=${ARCHES_ROOT} +fi + +# Environmental Variables +export DJANGO_PORT=${DJANGO_PORT:-8000} + +#Utility functions that check db status +wait_for_db() { + echo "Testing if database server is up..." + while [[ ! ${return_code} == 0 ]] + do + psql --host=${PGHOST} --port=${PGPORT} --user=${PGUSERNAME} --dbname=postgres -c "select 1" >&/dev/null + return_code=$? + sleep 1 + done + echo "Database server is up" + + echo "Testing if Elasticsearch is up..." + while [[ ! ${return_code} == 0 ]] + do + curl -s "http://${ESHOST}:${ESPORT}/_cluster/health?wait_for_status=green&timeout=60s" >&/dev/null + return_code=$? + sleep 1 + done + echo "Elasticsearch is up" +} + +db_exists() { + echo "Checking if database "${PGDBNAME}" exists..." + count=`psql --host=${PGHOST} --port=${PGPORT} --user=${PGUSERNAME} --dbname=postgres -Atc "SELECT COUNT(*) FROM pg_catalog.pg_database WHERE datname='${PGDBNAME}'"` + + # Check if returned value is a number and not some error message + re='^[0-9]+$' + if ! [[ ${count} =~ $re ]] ; then + echo "Error: Something went wrong when checking if database "${PGDBNAME}" exists..." >&2; + echo "Exiting..." + exit 1 + fi + + # Return 0 (= true) if database exists + if [[ ${count} > 0 ]]; then + return 0 + else + return 1 + fi +} + +#### Install +init_arches() { + echo "Checking if Arches project "${ARCHES_PROJECT}" exists..." + if [[ ! -d ${APP_FOLDER} ]] || [[ ! "$(ls ${APP_FOLDER})" ]]; then + echo "" + echo "----- Custom Arches project '${ARCHES_PROJECT}' does not exist. -----" + echo "----- Creating '${ARCHES_PROJECT}'... -----" + echo "" + + cd ${WEB_ROOT} + + arches-project create ${ARCHES_PROJECT} + run_setup_db + + exit_code=$? + if [[ ${exit_code} != 0 ]]; then + echo "Something went wrong when creating your Arches project: ${ARCHES_PROJECT}." + echo "Exiting..." + exit ${exit_code} + fi + else + echo "Custom Arches project '${ARCHES_PROJECT}' exists." + wait_for_db + if db_exists; then + echo "Database ${PGDBNAME} already exists." + echo "Skipping Package Loading" + else + echo "Database ${PGDBNAME} does not exists yet." + run_load_package #change to run_load_package if preferred + fi + fi +} + +#### Misc +copy_settings_local() { + # The settings_local.py in ${ARCHES_ROOT}/arches/ gets ignored if running manage.py from a custom Arches project instead of Arches core app + echo "Copying ${APP_FOLDER}/docker/settings_docker.py to ${APP_FOLDER}/${ARCHES_PROJECT}/settings_docker.py..." + cp ${APP_FOLDER}/docker/settings_docker.py ${APP_FOLDER}/${ARCHES_PROJECT}/settings_docker.py + + # Copy settings_local if it does not exist + cp -n ${APP_FOLDER}/docker/settings_local.py ${APP_FOLDER}/${ARCHES_PROJECT}/settings_local.py +} + +#### Run commands + +start_celery_supervisor() { + cd ${APP_FOLDER} + supervisord -c docker/disco-supervisor.conf +} + +run_migrations() { + echo "" + echo "----- RUNNING DATABASE MIGRATIONS -----" + echo "" + cd ${APP_FOLDER} + python3 manage.py migrate +} + +run_setup_db() { + echo "" + echo "----- RUNNING SETUP_DB -----" + echo "" + cd ${APP_FOLDER} + python3 manage.py setup_db --force +} + +run_load_package() { + echo "" + echo "----- *** LOADING PACKAGE: ${ARCHES_PROJECT} *** -----" + echo "" + cd ${APP_FOLDER} + python3 manage.py packages -o load_package -a arches_lingo -db -dev -y +} + +# "exec" means that it will finish building??? +run_django_server() { + echo "" + echo "----- *** RUNNING DJANGO DEVELOPMENT SERVER *** -----" + echo "" + cd ${APP_FOLDER} + echo "Running Django" + exec /bin/bash -c "source ${WEB_ROOT}/ENV/bin/activate && gunicorn arches_lingo.wsgi" +} + +# "exec" means that it will finish building??? +run_gunicorn() { + echo "" + echo "----- *** RUNNING DJANGO PRODUCTION SERVER *** -----" + echo "" + cd ${APP_ROOT} + echo "Running Django" + exec /bin/bash -c "source ../ENV/bin/activate && (/etc/init.d/nginx start&) && gunicorn arches_lingo.wsgi" +} + + +reset_database() { + echo "" + echo "----- RESETTING DATABASE -----" + echo "" + cd ${APP_ROOT} + pwd && ../ENV/bin/python --version + (test $(echo "SELECT FROM pg_database WHERE datname = 'template_postgis'" | ../ENV/bin/python manage.py dbshell | grep -c "1 row") = 1 || \ + (echo "CREATE DATABASE template_postgis" | ../ENV/bin/python manage.py dbshell --database postgres && \ + echo "CREATE EXTENSION postgis" | ../ENV/bin/python manage.py dbshell --database postgres)) + ../ENV/bin/python manage.py setup_db --force + ../ENV/bin/python manage.py packages -o load_package -a arches_lingo -db -dev -y +} + +activate_virtualenv() { + . ${WEB_ROOT}/ENV/bin/activate +} + +#### Main commands +run_arches() { + run_django_server +} + +### Starting point ### + +# Use -gt 1 to consume two arguments per pass in the loop +# (e.g. each argument has a corresponding value to go with it). +# Use -gt 0 to consume one or more arguments per pass in the loop +# (e.g. some arguments don't have a corresponding value to go with it, such as --help ). + +# If no arguments are supplied, assume the server needs to be run +if [[ $# -eq 0 ]]; then + start_celery_supervisor + wait_for_db + run_arches +fi + +# Else, process arguments +echo "Full command: $@" +while [[ $# -gt 0 ]] +do + key="$1" + echo "Command: ${key}" + + case ${key} in + run_arches) + start_celery_supervisor + copy_settings_local + wait_for_db + run_arches + ;; + setup_arches) + start_celery_supervisor + copy_settings_local + wait_for_db + setup_arches + ;; + run_tests) + copy_settings_local + wait_for_db + run_tests + ;; + run_migrations) + copy_settings_local + wait_for_db + run_migrations + ;; + + help|-h) + display_help + ;; + *) + cd ${APP_FOLDER} + "$@" + exit 0 + ;; + esac + shift # next argument or value +done diff --git a/docker/production/env_file.env b/docker/production/env_file.env new file mode 100644 index 00000000..130d9c73 --- /dev/null +++ b/docker/production/env_file.env @@ -0,0 +1,31 @@ +#arches +ARCHES_PROJECT=arches-lingo +INSTALL_DEFAULT_GRAPHS=False +INSTALL_DEFAULT_CONCEPTS=False +PGUSERNAME=postgres +PGPASSWORD=postgis +PGDBNAME=disco +PGHOST=postgres14-3_arches7-0 +PGPORT=5432 +ESHOST=elasticsearch8-3_arches7-0 +ESPORT=9200 +DJANGO_MODE=DEV +DJANGO_DEBUG=True +DOMAIN_NAMES=* +DJANGO_PORT=80 +#DJANGO_REMOTE_DEBUG=False +PYTHONUNBUFFERED=0 +TZ=PST +ELASTICSEARCH_PREFIX=disco + +#rabbitmq +RABBITMQ_USER=guest +RABBITMQ_PASS=guest + +#cantaloupe +CANTALOUPE_ENDPOINT_ADMIN_ENABLED=true +CANTALOUPE_ENDPOINT_ADMIN_USERNAME=admin +CANTALOUPE_ENDPOINT_ADMIN_SECRET=admin +CANTALOUPE_HOST=cantaloupe_disco +CANTALOUPE_PORT=8182 +CANTALOUPE_FILESYSTEMSOURCE_BASICLOOKUPSTRATEGY_PATH_PREFIX=/imageroot/ diff --git a/docker/settings_docker.py b/docker/settings_docker.py new file mode 100644 index 00000000..495d3cbe --- /dev/null +++ b/docker/settings_docker.py @@ -0,0 +1,64 @@ +import os +from django.core.exceptions import ImproperlyConfigured +import ast + + +def get_env_variable(var_name): + msg = "Set the %s environment variable" + try: + return os.environ[var_name] + except KeyError: + error_msg = msg % var_name + raise ImproperlyConfigured(error_msg) + + +def get_optional_env_variable(var_name): + try: + return os.environ[var_name] + except KeyError: + return None + + +# options are either "PROD" or "DEV" (installing with Dev mode set gets you extra dependencies) +MODE = get_env_variable("DJANGO_MODE") + +DEBUG = ast.literal_eval(get_env_variable("DJANGO_DEBUG")) + +DATABASES = { + "default": { + "ENGINE": "django.contrib.gis.db.backends.postgis", + "NAME": get_env_variable("PGDBNAME"), + "USER": get_env_variable("PGUSERNAME"), + "PASSWORD": get_env_variable("PGPASSWORD"), + "HOST": get_env_variable("PGHOST"), + "PORT": get_env_variable("PGPORT"), + "POSTGIS_TEMPLATE": "template_postgis", + } +} + +PUBLIC_SERVER_ADDRESS = "http://arches-lingo/" +CELERY_BROKER_URL = "amqp://{}:{}@arches_rabbitmq".format( + get_env_variable("RABBITMQ_USER"), get_env_variable("RABBITMQ_PASS") +) # RabbitMQ --> "amqp://guest:guest@localhost", Redis --> "redis://localhost:6379/0" + +ELASTICSEARCH_HTTP_PORT = get_env_variable("ESPORT") +ELASTICSEARCH_HOSTS = [ + { + "scheme": "http", + "host": get_env_variable("ESHOST"), + "port": int(ELASTICSEARCH_HTTP_PORT), + } +] + +USER_ELASTICSEARCH_PREFIX = get_optional_env_variable("ELASTICSEARCH_PREFIX") +if USER_ELASTICSEARCH_PREFIX: + ELASTICSEARCH_PREFIX = USER_ELASTICSEARCH_PREFIX + +ALLOWED_HOSTS = get_env_variable("DOMAIN_NAMES").split() + +USER_SECRET_KEY = get_optional_env_variable("DJANGO_SECRET_KEY") +if USER_SECRET_KEY: + # Make this unique, and don't share it with anybody. + SECRET_KEY = USER_SECRET_KEY + +STATIC_ROOT = "/static_root" diff --git a/docker/settings_local.py b/docker/settings_local.py new file mode 100644 index 00000000..13e7caf6 --- /dev/null +++ b/docker/settings_local.py @@ -0,0 +1,3 @@ +DOCKER = True +PUBLIC_SERVER_ADDRESS = "http://arches-lingo/" +WEBPACK_DEVELOPMENT_SERVER_PORT = 8022 diff --git a/docker/sql_env.env b/docker/sql_env.env new file mode 100644 index 00000000..d433dbab --- /dev/null +++ b/docker/sql_env.env @@ -0,0 +1,2 @@ +ACCEPT_EULA=Y +MSSQL_SA_PASSWORD=Fargeo12# \ No newline at end of file diff --git a/docker/webpack/Dockerfile b/docker/webpack/Dockerfile new file mode 100644 index 00000000..0ac2ae42 --- /dev/null +++ b/docker/webpack/Dockerfile @@ -0,0 +1,26 @@ +FROM public.ecr.aws/l1p7h1f9/archesproject-fargeo:7.6.x-base-dev +ENV WEB_ROOT=/web_root +ENV ARCHES_ROOT=${WEB_ROOT}/arches +ENV APP_ROOT=${WEB_ROOT}/arches-lingo +ENV NODE_MAJOR=20 +ENV DEBIAN_FRONTEND=noninteractive + +COPY ./arches ${ARCHES_ROOT} +RUN apt update && apt install wait-for-it openssh-client -y + +COPY ../arches-lingo ${APP_ROOT} + +WORKDIR ${APP_ROOT} +RUN source ../ENV/bin/activate && pip install -e '.[dev]' && pip uninstall arches -y + +WORKDIR ${ARCHES_ROOT} +RUN source ../ENV/bin/activate && pip install -e . && pip install -e '.[dev]' --no-binary :all: + +RUN mkdir /root/.ssh/ + +COPY /arches-lingo/docker/entrypoint.sh ${WEB_ROOT}/entrypoint.sh +RUN chmod -R 700 ${WEB_ROOT}/entrypoint.sh +WORKDIR ${WEB_ROOT} +ENTRYPOINT [ "./entrypoint.sh" ] +CMD ["run_webpack"] +EXPOSE 8021 diff --git a/docker/webpack/env_file.env b/docker/webpack/env_file.env new file mode 100644 index 00000000..ac6671e8 --- /dev/null +++ b/docker/webpack/env_file.env @@ -0,0 +1,33 @@ + +NODE_OPTIONS=--max-old-space-size=10000 +ARCHES_PROJECT=arches_lingo +ARCHES_PROJECT_ROOT_DIRECTORY=arches-lingo +#arches +INSTALL_DEFAULT_GRAPHS=False +INSTALL_DEFAULT_CONCEPTS=False +PGUSERNAME=postgres +PGPASSWORD=postgis +PGDBNAME=$ARCHES_PROJECT +PGHOST=postgres14-3_arches7-0 +PGPORT=5432 +ESHOST=elasticsearch8-3_arches7-0 +ESPORT=9200 +DJANGO_MODE=DEV +DJANGO_DEBUG=True +DOMAIN_NAMES=* +#DJANGO_REMOTE_DEBUG=False +PYTHONUNBUFFERED=0 +TZ=PST +ELASTICSEARCH_PREFIX=$ARCHES_PROJECT + +#rabbitmq +RABBITMQ_USER=guest +RABBITMQ_PASS=guest + +#cantaloupe +CANTALOUPE_ENDPOINT_ADMIN_ENABLED=true +CANTALOUPE_ENDPOINT_ADMIN_USERNAME=admin +CANTALOUPE_ENDPOINT_ADMIN_SECRET=admin +CANTALOUPE_HOST=cantaloupe_disco +CANTALOUPE_PORT=8182 +CANTALOUPE_FILESYSTEMSOURCE_BASICLOOKUPSTRATEGY_PATH_PREFIX=/imageroot/