diff --git a/.env.docker-compose.prod b/.env.docker-compose.prod index 12e928e7..e04d8964 100644 --- a/.env.docker-compose.prod +++ b/.env.docker-compose.prod @@ -6,4 +6,5 @@ PAYLOAD_PORT=3001 NEXT_REVALIDATION_KEY="veryprivatekey" PUBLIC_FRONTEND_URL="http://localhost:3000" -SERVER_URL="http://cms:3001" +PUBLIC_SERVER_URL="http://cms:3001" +PUBLIC_ILMOMASIINA_URL="https://ilmo.tietokilta.fi" diff --git a/.env.example b/.env.example index ae79abb2..0ccfafe4 100644 --- a/.env.example +++ b/.env.example @@ -2,7 +2,14 @@ PAYLOAD_MONGO_CONNECTION_STRING="mongodb://127.0.0.1/payload" PAYLOAD_SECRET="verysecretkey" PAYLOAD_REVALIDATION_KEY="veryprivatekey" PAYLOAD_PORT=3001 -# for LOCAL_DEVELOPMENT autologin setup, DO NOT USE IN PRODUCTION :) +# default user setup, this user will be created if no users exist in the database +# NOTE: this is only used for seeding data, not for autologin +# if these are not set, the admin UI will ask for a user to be created on first login +PAYLOAD_DEFAULT_USER_EMAIL=root@tietokilta.fi +PAYLOAD_DEFAULT_USER_PASSWORD=root + +# for LOCAL_DEVELOPMENT autologin setup, DO NOT USE THESE IN PRODUCTION :) +#these should be set to the same values as the default user above PAYLOAD_PUBLIC_DEVELOPMENT_AUTOLOGIN_EMAIL=root@tietokilta.fi PAYLOAD_PUBLIC_DEVELOPMENT_AUTOLOGIN_PASSWORD=root PAYLOAD_PUBLIC_LOCAL_DEVELOPMENT=true @@ -10,8 +17,8 @@ PAYLOAD_PUBLIC_LOCAL_DEVELOPMENT=true NEXT_REVALIDATION_KEY="veryprivatekey" PUBLIC_FRONTEND_URL="http://localhost:3000" -SERVER_URL="http://localhost:3001" -PUBLIC_ILMOMASIINA_URL="https://tik-ilmo-prod-app.azurewebsites.net" +PUBLIC_SERVER_URL="http://localhost:3001" +PUBLIC_ILMOMASIINA_URL="https://ilmo.tietokilta.fi" # variables required for Google OAuth 2.0, otherwise disabled #GOOGLE_OAUTH_CLIENT_ID= diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 906c877d..3bc8c7e4 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -3,6 +3,13 @@ on: push: release: types: [published] + +permissions: + id-token: write + contents: read +env: + # Change this to upload the built image to your own organization. + docker_tag_prefix: ghcr.io/tietokilta jobs: build: name: Format, Lint, Check types & Build @@ -32,7 +39,7 @@ jobs: run: pnpm codegen && git diff --quiet - name: Build all run: pnpm build - docker: + docker-build-and-push: if: (github.event_name == 'push' && github.ref == 'refs/heads/main') || (github.event_name == 'release' && github.event.action == 'published') needs: build strategy: @@ -58,11 +65,11 @@ jobs: id: meta uses: docker/metadata-action@v4 with: - images: ghcr.io/tietokilta/${{matrix.project}} + images: ${{env.docker_tag_prefix}}/${{matrix.project}} tags: | type=semver,pattern={{version}},enable=${{ github.ref_type == 'tag' }} type=semver,pattern={{major}}.{{minor}},enable=${{ github.ref_type == 'tag' }} - type=sha + type=raw,value=sha-${{github.sha}} type=raw,value=latest,enable={{is_default_branch}} - name: Push to GitHub Packages @@ -74,3 +81,26 @@ jobs: labels: ${{ steps.meta.outputs.labels }} build-args: | PROJECT=${{matrix.project}} + GIT_COMMIT_SHA=${{github.sha}} + cache-from: type=gha + cache-to: type=gha,mode=max + deploy: + # only run on published releases, enable this when we release + #if: github.event_name == 'release' && github.event.action == 'published' + name: Deploy to Azure + runs-on: ubuntu-latest + needs: docker-build-and-push + strategy: + matrix: + project: [web, cms] + steps: + - name: Login via Azure CLI + uses: azure/login@v1 + with: + client-id: ${{secrets.AZURE_CLIENT_ID}} + subscription-id: ${{secrets.AZURE_SUBSCRIPTION_ID}} + tenant-id: ${{secrets.AZURE_TENANT_ID}} + - uses: azure/webapps-deploy@v2 + with: + app-name: tikweb-${{matrix.project}}-prod # TODO: if we ever setup more envs than prod, make this variable customizable + images: "${{ env.docker_tag_prefix }}/${{matrix.project}}:sha-${{ github.sha }}" diff --git a/Dockerfile b/Dockerfile index 38b90837..5b8aa7bf 100644 --- a/Dockerfile +++ b/Dockerfile @@ -17,7 +17,8 @@ RUN pnpm install --global turbo # Build argument for specifying the project # Introduce a build argument 'PROJECT' to specify which project in the monorepo to build. ARG PROJECT=web - +ARG GIT_COMMIT_SHA=development +ENV GIT_COMMIT_SHA=$GIT_COMMIT_SHA # Install all dependencies in the monorepo # Start a new stage for handling dependencies. This stage uses the previously setup image with pnpm and turbo installed. FROM setup AS dependencies @@ -26,7 +27,6 @@ WORKDIR /app COPY packages/ ./packages/ COPY turbo.json ./ COPY package.json turbo.json packages ./ -COPY apps/${PROJECT} ./apps/${PROJECT} COPY pnpm-lock.yaml pnpm-workspace.yaml ./ # Install dependencies as per the lockfile to ensure consistent dependency resolution. RUN pnpm install --frozen-lockfile @@ -34,6 +34,7 @@ RUN pnpm install --frozen-lockfile # Prune projects to focus on the specified project scope # Start a new stage to prune the monorepo, focusing only on the necessary parts for the specified project. FROM dependencies AS pruner +COPY apps/${PROJECT} ./apps/${PROJECT} RUN turbo prune --scope=${PROJECT} --docker # Remove all empty node_modules folders. This is a cleanup step to remove unnecessary directories and reduce image size. RUN rm -rf /app/out/full/*/*/node_modules @@ -71,6 +72,8 @@ RUN rm -rf ./**/*/src FROM base AS runner #this needs to be here for some reason again, otherwise the WORKDIR command doesn't pick it up ARG PROJECT=web +ARG GIT_COMMIT_SHA=development +ENV GIT_COMMIT_SHA=$GIT_COMMIT_SHA # Create a non-root user and group for better security. RUN addgroup --system --gid 1001 nodejs RUN adduser --system --uid 1001 nodejs diff --git a/apps/cms/package.json b/apps/cms/package.json index 27c9ba8c..2855b8bf 100644 --- a/apps/cms/package.json +++ b/apps/cms/package.json @@ -5,7 +5,7 @@ "license": "MIT", "main": "dist/server.js", "scripts": { - "build": "pnpm copyfiles && pnpm build:payload && pnpm build:server", + "build": "NODE_ENV=production pnpm copyfiles && pnpm build:payload && pnpm build:server", "build:payload": "payload build", "build:server": "tsc", "clean": "rm -rf dist", @@ -15,7 +15,7 @@ "generate:graphQLSchema": "PAYLOAD_CONFIG_PATH=src/payload.config.ts payload generate:graphQLSchema", "generate:types": "PAYLOAD_CONFIG_PATH=src/payload.config.ts payload generate:types", "lint": "eslint \"./src/**/*.{js,ts}\"", - "start": "PAYLOAD_CONFIG_PATH=dist/payload.config.js node dist/server.js", + "start": "NODE_ENV=production PAYLOAD_CONFIG_PATH=dist/payload.config.js node dist/server.js", "typecheck": "tsc --noEmit" }, "dependencies": { diff --git a/apps/cms/src/hooks/revalidate-page.ts b/apps/cms/src/hooks/revalidate-page.ts index 1872aa5b..a9aeb4c2 100644 --- a/apps/cms/src/hooks/revalidate-page.ts +++ b/apps/cms/src/hooks/revalidate-page.ts @@ -18,17 +18,26 @@ export const revalidatePage = (!("_status" in doc) || doc._status === "published") ) { const revalidate = async (): Promise => { + const revalidationKey = process.env.PAYLOAD_REVALIDATION_KEY; + if (!revalidationKey) { + req.payload.logger.error( + "PAYLOAD_REVALIDATION_KEY not set, cannot revalidate", + ); + return; + } try { const fetchData = JSON.stringify(await getFetchData(doc, req)); - const res = await fetch( - `${ - process.env.PUBLIC_FRONTEND_URL - }/api/revalidate?${new URLSearchParams({ - secret: process.env.PAYLOAD_REVALIDATION_KEY ?? "", - collection, - fetchData, - }).toString()}`, + const fetchUrl = `${ + process.env.PUBLIC_FRONTEND_URL + }/next_api/revalidate?${new URLSearchParams({ + secret: revalidationKey, + collection, + fetchData, + }).toString()}`; + req.payload.logger.info( + `sending revalidate request ${fetchUrl.replace(revalidationKey, "REDACTED")}`, ); + const res = await fetch(fetchUrl); if (res.ok) { req.payload.logger.info( `Revalidated collection ${collection} with data ${fetchData}`, diff --git a/apps/cms/src/payload.config.ts b/apps/cms/src/payload.config.ts index 462b43f6..459e2d74 100644 --- a/apps/cms/src/payload.config.ts +++ b/apps/cms/src/payload.config.ts @@ -53,7 +53,7 @@ const { export default buildConfig({ // TODO: should probably enable this for production but it breaks auth in development - // serverURL: process.env.SERVER_URL, + // serverURL: process.env.PUBLIC_SERVER_URL, admin: { // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment -- stupid eslint doesn't find the type bundler: webpackBundler(), @@ -70,7 +70,7 @@ export default buildConfig({ }, upload: { limits: { - fileSize: 5000000, // 5MB, written in bytes + fileSize: 10000000, // 10MB, written in bytes }, }, collections: [Users, Pages, Media, Topics, BoardMembers, Boards], @@ -99,8 +99,8 @@ export default buildConfig({ connectOptions: { dbName: process.env.PAYLOAD_MONGO_DB_NAME, }, - // @ts-expect-error DATABASE_URL is validated by payload on start - url: process.env.PAYLOAD_MONGO_CONNECTION_STRING, + // webpack build crashes if these are not set i.e. have to default empty + url: process.env.PAYLOAD_MONGO_CONNECTION_STRING ?? "", }), editor: lexicalEditor({ features: [ diff --git a/apps/cms/src/preview.ts b/apps/cms/src/preview.ts index a99fa68e..aeef1ddb 100644 --- a/apps/cms/src/preview.ts +++ b/apps/cms/src/preview.ts @@ -3,4 +3,4 @@ import type { GeneratePreviewURL } from "payload/config"; export const generatePreviewUrl = (getUrl: (doc: T) => string): GeneratePreviewURL => (doc) => - `${process.env.PUBLIC_FRONTEND_URL}/api/preview?url=${getUrl(doc as T)}`; + `${process.env.PUBLIC_FRONTEND_URL}/next_api/preview?url=${getUrl(doc as T)}`; diff --git a/apps/cms/src/server.ts b/apps/cms/src/server.ts index 8c37d306..6c47ea45 100644 --- a/apps/cms/src/server.ts +++ b/apps/cms/src/server.ts @@ -6,8 +6,13 @@ const secret = process.env.PAYLOAD_SECRET; if (!secret) { throw new Error("PAYLOAD_SECRET is not set"); } -const app = express(); +const gitSha = process.env.GIT_COMMIT_SHA ?? "dev"; +const app = express(); +app.use((_, res, next) => { + res.setHeader("X-Git-Commit-Sha", gitSha); + next(); +}); // Redirect root to Admin panel app.get("/", (_, res) => { res.redirect("/admin"); @@ -27,13 +32,14 @@ const start = async (): Promise => { if (useGoogleAuth()) { payloadInstance.logger.info("Using Google OAuth2"); } - if (process.env.PAYLOAD_PUBLIC_LOCAL_DEVELOPMENT === "true") { - const email = process.env.PAYLOAD_PUBLIC_DEVELOPMENT_AUTOLOGIN_EMAIL; - const password = - process.env.PAYLOAD_PUBLIC_DEVELOPMENT_AUTOLOGIN_PASSWORD; + const { PAYLOAD_DEFAULT_USER_EMAIL, PAYLOAD_DEFAULT_USER_PASSWORD } = + process.env; + if (PAYLOAD_DEFAULT_USER_EMAIL && PAYLOAD_DEFAULT_USER_PASSWORD) { + const email = PAYLOAD_DEFAULT_USER_EMAIL; + const password = PAYLOAD_DEFAULT_USER_PASSWORD; if (!email || !password) { - throw new Error( - "PAYLOAD_PUBLIC_DEVELOPMENT_AUTOLOGIN_EMAIL and PAYLOAD_PUBLIC_DEVELOPMENT_AUTOLOGIN_PASSWORD must be set when PAYLOAD_PUBLIC_LOCAL_DEVELOPMENT is true", + payloadInstance.logger.warn( + `PAYLOAD_DEFAULT_USER_EMAIL and PAYLOAD_DEFAULT_USER_PASSWORD are not set, first user has to be created manually through the admin panel`, ); } // check if the user exists, if not, create it @@ -43,9 +49,11 @@ const start = async (): Promise => { }); if (user.totalDocs === 0) { payloadInstance.logger.warn(`user ${email} not found, creating...`); - payloadInstance.logger.warn( - "NOTE that it is recommended to use the seeding scripts (`pnpm db:reset`) to a get filled database for local development", - ); + if (process.env.NODE_ENV !== "production") { + payloadInstance.logger.warn( + "NOTE that it is recommended to use the seeding scripts (`pnpm db:reset`) to a get filled database for local development", + ); + } await payloadInstance.create({ collection: "users", data: { @@ -53,7 +61,6 @@ const start = async (): Promise => { password, }, }); - payloadInstance.logger.warn("Payload autologin enabled!"); } } }, diff --git a/apps/web/next.config.js b/apps/web/next.config.js index af418237..ff798fc7 100644 --- a/apps/web/next.config.js +++ b/apps/web/next.config.js @@ -1,5 +1,7 @@ /** @type {import("next").NextConfig} */ const isProd = process.env.NODE_ENV === "production"; +const gitSha = process.env.GIT_COMMIT_SHA ?? "dev"; +const cdnUrl = "https://next-cdn-endpoint-prod.azureedge.net"; module.exports = { reactStrictMode: true, images: { @@ -15,5 +17,22 @@ module.exports = { ] : undefined, }, - assetPrefix: isProd ? "https://cdn.alpha.tietokilta.fi" : undefined, + async headers() { + return [ + { + source: "/:path*", + headers: [ + { + key: "x-git-commit-sha", + value: gitSha, + }, + // { + // key: "Access-Control-Allow-Origin", + // value: cdnUrl, + // }, + ], + }, + ]; + }, + // assetPrefix: isProd ? cdnUrl : undefined, }; diff --git a/apps/web/src/app/api/exit-preview/route.ts b/apps/web/src/app/next_api/exit-preview/route.ts similarity index 100% rename from apps/web/src/app/api/exit-preview/route.ts rename to apps/web/src/app/next_api/exit-preview/route.ts diff --git a/apps/web/src/app/next_api/health/route.ts b/apps/web/src/app/next_api/health/route.ts new file mode 100644 index 00000000..2ed496f9 --- /dev/null +++ b/apps/web/src/app/next_api/health/route.ts @@ -0,0 +1,13 @@ +import type { NextRequest } from "next/server"; +import { NextResponse } from "next/server"; +// this is here for CDN probePath +export function GET(_: NextRequest): NextResponse { + return NextResponse.json( + { + status: "ok", + }, + { + status: 200, + }, + ); +} diff --git a/apps/web/src/app/api/preview/route.ts b/apps/web/src/app/next_api/preview/route.ts similarity index 92% rename from apps/web/src/app/api/preview/route.ts rename to apps/web/src/app/next_api/preview/route.ts index 9e67ecd9..5b065ec5 100644 --- a/apps/web/src/app/api/preview/route.ts +++ b/apps/web/src/app/next_api/preview/route.ts @@ -25,7 +25,7 @@ export async function GET( } // validate the Payload token - const userReq = await fetch(`${process.env.SERVER_URL}/api/users/me`, { + const userReq = await fetch(`${process.env.PUBLIC_SERVER_URL}/api/users/me`, { headers: { Authorization: `JWT ${payloadToken}`, }, diff --git a/apps/web/src/app/api/revalidate/route.ts b/apps/web/src/app/next_api/revalidate/route.ts similarity index 100% rename from apps/web/src/app/api/revalidate/route.ts rename to apps/web/src/app/next_api/revalidate/route.ts diff --git a/apps/web/src/components/admin-bar-client.tsx b/apps/web/src/components/admin-bar-client.tsx index 89ffb9ea..c6fff064 100644 --- a/apps/web/src/components/admin-bar-client.tsx +++ b/apps/web/src/components/admin-bar-client.tsx @@ -19,7 +19,7 @@ export function AdminBarClient({ return ( - {events.map((event) => ( + {events.data.map((event) => ( { + ok: true; + error: null; + data: T; +} + +export interface ErrorResponse { + ok: false; + error: ErrorType; + data: null; +} + +export type ApiResponse = OkResponse | ErrorResponse; + +export const ok = (data: T): OkResponse => ({ + ok: true, + error: null, + data, +}); + +export const err = (error: ErrorType): ErrorResponse => ({ + ok: false, + error, + data: null, +}); diff --git a/apps/web/src/lib/api/external/ilmomasiina.ts b/apps/web/src/lib/api/external/ilmomasiina.ts index 7dc60e55..ab71c8ae 100644 --- a/apps/web/src/lib/api/external/ilmomasiina.ts +++ b/apps/web/src/lib/api/external/ilmomasiina.ts @@ -1,3 +1,6 @@ +import type { ApiResponse } from "./helpers"; +import { err, ok } from "./helpers"; + export type IlmomasiinaResponse = IlmomasiinaEvent[]; export interface IlmomasiinaEvent { @@ -39,9 +42,18 @@ export interface EventQuota { // eslint-disable-next-line @typescript-eslint/no-non-null-assertion -- ideally would throw during build, but let's at least throw here if it's missing const baseUrl = process.env.PUBLIC_ILMOMASIINA_URL!; -export const fetchEvents = async (): Promise => { - const response = await fetch(`${baseUrl}/api/events`); - const data = (await response.json()) as IlmomasiinaResponse; +export const fetchEvents = async (): Promise< + ApiResponse +> => { + try { + const response = await fetch(`${baseUrl}/api/events`); + if (!response.ok) { + return err("ilmomasiina-fetch-fail"); + } + const data = (await response.json()) as IlmomasiinaResponse; - return data; + return ok(data); + } catch (error) { + return err("ilmomasiina-fetch-fail"); + } }; diff --git a/apps/web/src/lib/api/fetcher.ts b/apps/web/src/lib/api/fetcher.ts index 0ceb98ad..6ce22ce5 100644 --- a/apps/web/src/lib/api/fetcher.ts +++ b/apps/web/src/lib/api/fetcher.ts @@ -56,7 +56,7 @@ export const getAll = < (req) => `get_${path}_${stringify(req)}`, async (req, draft, fetchOptions): Promise => { const result = await fetch( - `${process.env.SERVER_URL}${path}?${qsStringify({ + `${process.env.PUBLIC_SERVER_URL}${path}?${qsStringify({ ...req, ...(draft ? { draft: "true" } : {}), }).toString()}`, @@ -81,7 +81,7 @@ export const getGlobal = (path: string, locale?: string) => () => `getGlobal_${path}`, async (_, draft, fetchOptions): Promise => { const result = await fetch( - `${process.env.SERVER_URL}${path}?${qsStringify({ + `${process.env.PUBLIC_SERVER_URL}${path}?${qsStringify({ depth: 10, // TODO: remove this when we have a better way to handle depth for example with GraphQL // Needs to be bigger than 1 to get media / images ...(draft ? { draft: "true" } : {}), diff --git a/apps/web/src/middleware.ts b/apps/web/src/middleware.ts index d919f86f..9026bc4c 100644 --- a/apps/web/src/middleware.ts +++ b/apps/web/src/middleware.ts @@ -9,7 +9,7 @@ export function middleware(request: NextRequest): NextResponse { pathname.startsWith("/api") || pathname.startsWith("/oauth2") ) { - const destination = new URL(process.env.SERVER_URL || ""); + const destination = new URL(process.env.PUBLIC_SERVER_URL || ""); const url = request.nextUrl.clone(); url.host = destination.host; url.port = destination.port; @@ -17,9 +17,9 @@ export function middleware(request: NextRequest): NextResponse { return NextResponse.rewrite(url); } return NextResponse.redirect( - new URL(`/fi/${request.nextUrl.pathname}`, request.url), + new URL(`/fi${request.nextUrl.pathname}`, request.url), ); } export const config = { - matcher: ["/((?!_next|fi|en).*)"], + matcher: ["/((?!_next|fi|en|next_api).*)"], }; diff --git a/docker-compose.prod.yml b/docker-compose.prod.yml index 921b8160..72df35bd 100644 --- a/docker-compose.prod.yml +++ b/docker-compose.prod.yml @@ -17,6 +17,7 @@ services: context: . args: - PROJECT=cms + - GIT_COMMIT_SHA=test depends_on: - mongo env_file: @@ -29,6 +30,7 @@ services: context: . args: - PROJECT=web + - GIT_COMMIT_SHA=test depends_on: - cms env_file: diff --git a/package.json b/package.json index 2be474a7..3705918b 100644 --- a/package.json +++ b/package.json @@ -6,13 +6,13 @@ "codegen": "dotenv -- turbo run codegen", "db:clear": "docker compose down -v", "db:export": "dotenv -- ./scripts/generate_seeding_data.sh", - "db:populate": "dotenv -- ./scripts/import_seeding_data.sh", + "db:populate": "dotenv -- ./scripts/import_seeding_data.sh --all", "db:reset": "pnpm db:clear && pnpm db:start && pnpm db:populate", "db:start": "docker compose up -d", "db:stop": "docker compose down", "dev": "pnpm db:start && dotenv -- turbo run dev", - "docker:build:cms": "docker build . --tag cms --build-arg PROJECT=cms", - "docker:build:web": "docker build . --tag web --build-arg PROJECT=web", + "docker:build:cms": "docker build . --tag web-cms --build-arg PROJECT=cms", + "docker:build:web": "docker build . --tag web-web --build-arg PROJECT=web", "docker:compose:build": "docker compose -f docker-compose.prod.yml up --build", "docker:compose:start": "docker compose -f docker-compose.prod.yml up", "docker:start:cms": "docker run -p 3001:3001 cms", diff --git a/scripts/import_seeding_data.sh b/scripts/import_seeding_data.sh index 99a73474..2ec95c30 100755 --- a/scripts/import_seeding_data.sh +++ b/scripts/import_seeding_data.sh @@ -5,14 +5,67 @@ if [ -z "$PAYLOAD_MONGO_CONNECTION_STRING" ]; then echo "PAYLOAD_MONGO_CONNECTION_STRING is not set. Using default value." PAYLOAD_MONGO_CONNECTION_STRING="mongodb://127.0.0.1/payload" fi - -# Loop to import each JSON file into a MongoDB collection -for file_path in data/gen/db/*.json; do - filename=$(basename "$file_path") - COLLECTION_NAME="${filename%.json}" - echo "Importing $COLLECTION_NAME collection..." - mongoimport --uri="$PAYLOAD_MONGO_CONNECTION_STRING" --collection="$COLLECTION_NAME" --file="$file_path" --jsonArray +#if no args, print usage +if [ $# -eq 0 ]; then + echo "Usage: ./scripts/import_seeding_data.sh [-u|--upsert] [collection_name]" + echo " -u, --upsert Use upsert mode when importing data" + echo " -a, --all Import all collections, " + exit 0 +fi +# iterate through arguments and process them +UPSERT_FLAG=false +ALL_FLAG=false +for arg in "$@" +do + case $arg in + -u|--upsert) + UPSERT_FLAG=true + shift # Remove --upsert from processing + ;; + esac + case $arg in + -h|--help) + echo "Usage: ./scripts/import_seeding_data.sh [-u|--upsert] [collection_name]" + echo " -u, --upsert Use upsert mode when importing data" + exit 0 + ;; + esac + case $arg in + -a|--all) + ALL_FLAG=true + shift # Remove --all from processing + ;; + esac done +# add --upsert flag to mongoimport command if UPSERT_FLAG is true +if [ "$UPSERT_FLAG" = true ] ; then + echo "Using upsert mode" + UPSERT_FLAG="--upsert" +else + UPSERT_FLAG="" +fi +# if argument is passed, only import that collection +if [ -n "$1" ]; then + if [ "$ALL_FLAG" = true ] ; then + echo "Cannot use --all flag with collection name" + exit 1 + fi + COLLECTION_NAME="$1" + echo "Importing $COLLECTION_NAME collection..." + mongoimport --uri="$PAYLOAD_MONGO_CONNECTION_STRING" --collection="$COLLECTION_NAME" --file="data/gen/db/$COLLECTION_NAME.json" --jsonArray $UPSERT_FLAG + exit 0 +fi +# Loop to import each JSON file into a MongoDB collection +if [ "$ALL_FLAG" = true ] ; then + echo "Importing all collections..." + for file_path in data/gen/db/*.json; do + filename=$(basename "$file_path") + COLLECTION_NAME="${filename%.json}" + echo "Importing $COLLECTION_NAME collection..." + mongoimport --uri="$PAYLOAD_MONGO_CONNECTION_STRING" --collection="$COLLECTION_NAME" --file="$file_path" --jsonArray $UPSERT_FLAG + done + exit 0 +fi # Copy images from images to ../uploads folder # TODO: change this implementation when using cloud storage plugin diff --git a/turbo.json b/turbo.json index ee186d4c..cd0ef51f 100644 --- a/turbo.json +++ b/turbo.json @@ -7,12 +7,14 @@ "PAYLOAD_SECRET", "PAYLOAD_REVALIDATION_KEY", "PAYLOAD_PORT", + "PAYLOAD_DEFAULT_USER_EMAIL", + "PAYLOAD_DEFAULT_USER_PASSWORD", "PAYLOAD_PUBLIC_DEVELOPMENT_AUTOLOGIN_EMAIL", "PAYLOAD_PUBLIC_DEVELOPMENT_AUTOLOGIN_PASSWORD", "PAYLOAD_PUBLIC_LOCAL_DEVELOPMENT", "NEXT_REVALIDATION_KEY", "PUBLIC_FRONTEND_URL", - "SERVER_URL", + "PUBLIC_SERVER_URL", "PUBLIC_ILMOMASIINA_URL" ], "pipeline": {