diff --git a/.github/actions/setup-env.yml b/.github/actions/setup-env.yml new file mode 100644 index 0000000..677e671 --- /dev/null +++ b/.github/actions/setup-env.yml @@ -0,0 +1,56 @@ +name: Setup Environment +description: Check out the repo, if necessary, and set up node and pnpm using the versions defined in `package.json::engines` +inputs: + npm-token: + description: Your npm token for installing private packages from npm (optional if you don't use private packages) +runs: + using: composite + steps: + - name: Pre-Setup + shell: bash + run: | + CHECKED_OUT="$([ -e ./scripts/.internal/getEngineVersion.js ] && echo true || echo false)" + echo "CHECKED_OUT=$CHECKED_OUT" + echo "CHECKED_OUT=$CHECKED_OUT" >> $GITHUB_ENV + + - name: Checkout + if: env.CHECKED_OUT == 'false' + uses: actions/checkout@v4 + + - name: Get Versions + shell: bash + run: | + NODE_VERSION="$(./scripts/.internal/getEngineVersion.js node)" + echo "NODE_VERSION=$NODE_VERSION" + echo "NODE_VERSION=$NODE_VERSION" >> $GITHUB_ENV + + PNPM_VERSION="$(./scripts/.internal/getEngineVersion.js pnpm)" + echo "PNPM_VERSION=$PNPM_VERSION" + echo "PNPM_VERSION=$PNPM_VERSION" >> $GITHUB_ENV + + - name: Install pnpm + shell: bash + run: | + npm i -g pnpm@${{ env.PNPM_VERSION }} + echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV + + - name: Install node + uses: actions/setup-node@v4 + with: + node-version: ${{ env.NODE_VERSION }} + + - name: Set up caching for pnpm + uses: actions/cache@v4 + with: + path: ${{ env.STORE_PATH }} + key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} + restore-keys: | + ${{ runner.os }}-pnpm-store- + + - name: Install deps + shell: bash + run: | + if [ -n "${{ inputs.npm-token }}" ]; then + echo "//registry.npmjs.org/:_authToken=${{ inputs.npm-token }}" >> ~/.npmrc + fi + pnpm --frozen-lockfile install diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..2a17c53 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,306 @@ +name: CI + +on: + # We use a `vX.Y` branch naming convention for our "main" branches, so we'll target pushes against those (including + # merged PRs) + push: + branches: + - v* + # Additionally, we'll target PR updates so that CI runs against open PRs + pull_request: + types: + - opened + - synchronize + - reopened + # Finally, we'll allow the workflow to be triggered manually + workflow_dispatch: + +# Only one run at a time per ref +concurrency: + group: ci-${{ github.ref }} + cancel-in-progress: true + +jobs: + + ## + ## PREPARE + ## + + get-deploy-env: + runs-on: ubuntu-latest + # Necessary for nrwl/nx-set-shas + permissions: + contents: 'read' + actions: 'read' + outputs: + node-version: ${{ steps.initial-vars.outputs.NODE_VERSION }} + pnpm-version: ${{ steps.initial-vars.outputs.PNPM_VERSION }} + affected-shas-base: ${{ steps.get-affected-sha-spread.outputs.base }} + affected-shas-head: ${{ steps.get-affected-sha-spread.outputs.head }} + pnpmFilter: ${{ steps.get-pnpm-params.outputs.pnpmFilter }} + pnpmIgnorePattern: ${{ steps.get-pnpm-params.outputs.pnpmIgnorePattern }} + allAppsJson: ${{ steps.get-all-pkgs.outputs.allAppsJson }} + allLibsJson: ${{ steps.get-all-pkgs.outputs.allLibsJson }} + affectedAppsJson: ${{ steps.get-affected-pkgs.outputs.affectedAppsJson }} + affectedLibsJson: ${{ steps.get-affected-pkgs.outputs.affectedLibsJson }} + deployable: ${{ steps.is-deployable.outputs.deployable }} + steps: + - name: Check out repo + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + # Output some initial variables that we'll need later + - name: Initial Vars + id: initial-vars + run: | + # Replace this with 'v1.x' or another branch to simulate a CI run on that branch + REFNAME=$GITHUB_REF_NAME + # REFNAME=v1.x + echo "REFNAME=$REFNAME" + echo "REFNAME=$REFNAME" >> $GITHUB_ENV + + BASEBRANCH="$(echo "$REFNAME" | grep -q '^develop|release-|main-' && echo $REFNAME || echo develop)" + echo "BASEBRANCH=$BASEBRANCH" + echo "BASEBRANCH=$BASEBRANCH" >> $GITHUB_ENV + + NODE_VERSION="$(./scripts/.internal/getEngineVersion.sh node)" + echo "NODE_VERSION=$NODE_VERSION" + echo "NODE_VERSION=$NODE_VERSION" >> $GITHUB_OUTPUTS + + PNPM_VERSION="$(./scripts/.internal/getEngineVersion.sh pnpm)" + echo "PNPM_VERSION=$PNPM_VERSION" + echo "PNPM_VERSION=$PNPM_VERSION" >> $GITHUB_OUTPUTS + + # Output some info for debugging purposes + - name: Output Versions for Debugging + run: | + echo effective branch: $REFNAME + echo "git: $(git --version)" + echo "jq: $(jq --version)" + echo "docker: $(docker --version 2>/dev/null)" + + # Install pnpm (we don't need the whole node setup and all the deps, so we're just doing a simple pnpm install here) + - name: Install pnpm + run: npm i -g pnpm@${{ steps.initial-vars.outputs.PNPM_VERSION }} + + # This is what allows us not to have to run our linting and testing against all of our code + - name: Determine the spread of affected commits + id: get-affected-sha-spread + uses: nrwl/nx-set-shas@v4 + with: + main-branch-name: ${{ env.BASEBRANCH }} + + # Use our current info to get a pnpm filter and ignore pattern to use in future commands, as well as a list of + # affected apps and libs + - name: Get pnpm params + id: get-pnpm-params + run: ./.github/workflows/get-pnpm-params.sh + - name: Get list of all packages from the repo + id: get-all-pkgs + run: ./.github/workflows/get-all-pkgs.sh + - name: Get lists of affected libs and apps + id: get-affected-pkgs + run: ./.github/workflows/get-affected-pkgs.sh + env: + hasTopLevelChanges: ${{ steps.get-pnpm-params.outputs.hasTopLevelChanges }} + pnpmFilter: ${{ steps.get-pnpm-params.outputs.pnpmFilter }} + pnpmIgnorePattern: ${{ steps.get-pnpm-params.outputs.pnpmIgnorePattern }} + allAppsJson: ${{ steps.get-all-pkgs.outputs.allAppsJson }} + allLibsJson: ${{ steps.get-all-pkgs.outputs.allLibsJson }} + + # Finally, determine whether the current branch is deployable + - name: Determine whether the current branch is deployable + id: is-deployable + run: | + deployable="$(echo "$REFNAME" | grep -Eq '^v[0-9]+\..+$' && [ '${{ steps.get-affected-pkgs.outputs.affectedAppsJson }}' != '[]' ] && echo "true" || echo "false")" + echo "deployable=$deployable" + echo "deployable=$deployable" >> $GITHUB_OUTPUT + + ## + ## Run linting/typechecks/tests + ## + + test: + needs: [get-deploy-env] + runs-on: ubuntu-latest + steps: + - name: Set up node and pnpm + uses: ./.github/actions/setup-env.yml + + - name: ESLint Cache + uses: actions/cache@v3 + with: + key: eslint-cache-${{ hashFiles('libs/**/*', 'apps/**/*', 'pnpm-lock.yaml') }} + restore-keys: | + eslint-cache- + path: | + ./apps/*/.eslintcache + ./packages/*/.eslintcache + + - name: Jest Cache + uses: actions/cache@v3 + with: + key: jest-cache-${{ env.BRANCH }} + restore-keys: | + jest-cache- + path: | + /tmp/jest_* + + - name: Typecheck and build + run: pnpm build:libs + + - name: Prepare app envs + run: | + for d in ./apps/*; do + # TODO:TEMPORARY: Remove this once we've finished https://neatcapital.atlassian.net/browse/LOANS-8353 + if ! [ -d "$d/.env" ] && [ -f "$d/.env.sample" ]; then + cp "$d/.env.sample" "$d/.env" + fi + done + + - name: Lint + run: | + pnpm \ + --parallel \ + --filter="${{ needs.get-deploy-env.outputs.pnpmFilter }}" \ + --changed-files-ignore-pattern="${{ needs.get-deploy-env.outputs.pnpmIgnorePattern }}" \ + lint + + # Don't need to typecheck because our build step above does that for us + # pnpm \ + # --parallel \ + # --filter="${{ needs.get-deploy-env.outputs.pnpmFilter }}" \ + # --changed-files-ignore-pattern="${{ needs.get-deploy-env.outputs.pnpmIgnorePattern }}" \ + # typecheck + + - name: Run Tests + run: pnpm test:raw --changedSince=${{ needs.get-deploy-env.outputs.affected-shas-base }} + env: + DATABASE_URL: postgres://postgres:postgres@localhost:${{ job.services.postgres.ports[5432] }}/testdb + CLOUDAMQP_URL: amqp://localhost:${{ job.services.rabbitmq.ports[5672] }} + + ## + ## Prime our docker layer cache + ## + + # TODO: We need to figure out how to do docker layer caching on our own (without build-push-action) so that our + # internal `deploy-to-heroku` action works properly. + + # prime-docker-cache: + # name: Prime Docker Cache + # # needs: [test, get-deploy-env] + # # needs: [get-deploy-env] + # # if: "!cancelled() && !failure() && needs.get-deploy-env.outputs.deployable == 'true'" + # runs-on: ubuntu-latest + # steps: + # - uses: actions/checkout@v4 + + # - name: PNPM Cache for Docker + # uses: actions/cache@v3 + # with: + # path: .pnpm-store + # key: ${{ runner.os }}-pnpm-store-${{ hashFiles('./pnpm-lock.yaml') }} + # restore-keys: | + # ${{ runner.os }}-pnpm-store- + + # - uses: docker/setup-buildx-action@v3 + + # - name: Inject pnpm cache into Docker + # uses: reproducible-containers/buildkit-cache-dance@v2.1.2 + # with: + # cache-source: .pnpm-store + # cache-target: /monorepo/.pnpm-store + + # # WARNING: File mutations prior to this step will be ignored in the docker build. See documentation at + # # https://github.com/docker/build-push-action + # - uses: docker/build-push-action@v5 + # with: + # context: . + # cache-from: type=gha + # cache-to: type=gha,mode=max + # file: ./deploy/Dockerfile + # target: libs-builder + # build-args: | + # NODE_VERSION=18 + # PM_VERSION=^8 + # # build-args: | + # # NODE_VERSION=${{ needs.get-deploy-env.outputs.node-version }} + # # PM_VERSION=${{ needs.get-deploy-env.outputs.pm-version }} + # secrets: | + # npmrc=//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }} + + ## + ## Build and deploy services + ## + + build-and-deploy: + name: Build and Deploy ${{ matrix.service && format('{0}-{1}', matrix.service, 'service') || '' }} + needs: [get-deploy-env, test] #, prime-docker-cache] + if: "!cancelled() && !failure() && needs.get-deploy-env.outputs.deployable == 'true'" + runs-on: ubuntu-latest + strategy: + matrix: + service: ${{ fromJson(needs.get-deploy-env.outputs.affectedAppsJson) }} + steps: + - uses: actions/checkout@v4 + + # - name: PNPM Cache for Docker + # uses: actions/cache@v3 + # with: + # path: .pnpm-store + # key: ${{ runner.os }}-pnpm-store-${{ hashFiles('./pnpm-lock.yaml') }} + # restore-keys: | + # ${{ runner.os }}-pnpm-store- + + # - name: Inject pnpm cache into Docker + # uses: reproducible-containers/buildkit-cache-dance@v2.1.2 + # with: + # cache-source: .pnpm-store + # cache-target: /monorepo/.pnpm-store + + - name: Fix app name + id: fix-app-name + run: | + herokuAppName="$(echo "${{ needs.get-deploy-env.outputs.heroku-app-name }}" | sed 's#SVC#${{ matrix.service }}#g')" + + # Hack to work around qa-pricing already existing error in heroku + if echo "$herokuAppName" | grep -q pricing-qa; then + herokuAppName="pricing-service-qa" + fi + + echo "heroku-app-name=$herokuAppName" + echo "heroku-app-name=$herokuAppName" >> $GITHUB_OUTPUT + + displayName="$( + echo " + v = '${{ matrix.service }}'; + console.log(v[0].toUpperCase() + v.slice(1).toLowerCase()); + " | node + )" + echo "display-name=$displayName" + echo "display-name=$displayName" >> $GITHUB_OUTPUT + + - name: Get deploy targets + id: get-deploy-targets + run: | + deployTargets="$(head -n1 ./apps/${{ matrix.service }}-service/deploy/targets)" + echo "deploy-targets=$deployTargets" + echo "deploy-targets=$deployTargets" >> $GITHUB_OUTPUT + + - uses: NeatCapitalOrg/devops/actions/deploy-to-heroku@main + with: + type: docker + docker-secrets: | + npmrc="//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" + docker-build-args: | + SERVICE_NAME=${{ matrix.service }}-service + dockerfiles: './deploy/Dockerfile ./apps/${{ matrix.service }}-service/deploy/Dockerfile' + tenant: ${{ needs.get-deploy-env.outputs.tenant }} + tenant-abrv-uc: ${{ needs.get-deploy-env.outputs.tenant-abrv-uc }} + env-abrv-uc: ${{ needs.get-deploy-env.outputs.env-abrv-uc }} + heroku-app-name: ${{ steps.fix-app-name.outputs.heroku-app-name }} + heroku-api-key: ${{ secrets.HEROKU_API_KEY }} + heroku-targets: ${{ steps.get-deploy-targets.outputs.deploy-targets }} + gchat-message-webhook: ${{ secrets.GOOGLE_CHAT_BUILDS_WEBHOOK }} + gchat-message-display-name: ${{ steps.fix-app-name.outputs.display-name }} Service \ No newline at end of file diff --git a/.github/workflows/get-affected-pkgs.sh b/.github/workflows/get-affected-pkgs.sh new file mode 100755 index 0000000..3a2e477 --- /dev/null +++ b/.github/workflows/get-affected-pkgs.sh @@ -0,0 +1,59 @@ +#!/bin/bash + +set -e + +# Expects: +# +# * to be run from repo root +# * for repo to be checked out +# * to have pnpm installed +# * to have env: +# * hasTopLevelChanges +# * allAppsJson +# * allLibsJson +# * pnpmFilter +# * pnpmIgnorePattern +# +# Outputs: +# +# * affectedAppsJson +# * affectedLibsJson + +if [ -n "$hasTopLevelChanges" ]; then + + affectedAppsJson="$allAppsJson" + affectedLibsJson="$allLibsJson" + +else + + affected="$(pnpm --filter="$pnpmFilter" --changed-files-ignore-pattern="$pnpmIgnorePattern" exec pwd | grep -v 'No projects matched' | tr '\n' ' ')" + + + for d in $affected; do + pkg="$(basename "$d")" + if echo "$d" | grep -q /apps/; then + if [ -z "$affectedAppsJson" ]; then + affectedAppsJson='"'$pkg'"' + else + affectedAppsJson="${affectedAppsJson},"'"'$pkg'"' + fi + + else + if [ -z "$affectedLibsJson" ]; then + affectedLibsJson='"'$pkg'"' + else + affectedLibsJson="${affectedLibsJson},"'"'$pkg'"' + fi + fi + + done + + affectedAppsJson="[$affectedAppsJson]" + affectedLibsJson="[$affectedLibsJson]" + +fi + +echo "affectedAppsJson=$affectedAppsJson" +echo "affectedAppsJson=$affectedAppsJson" >> $GITHUB_OUTPUT +echo "affectedLibsJson=$affectedLibsJson" +echo "affectedLibsJson=$affectedLibsJson" >> $GITHUB_OUTPUT diff --git a/.github/workflows/get-all-pkgs.sh b/.github/workflows/get-all-pkgs.sh new file mode 100755 index 0000000..c5e2ada --- /dev/null +++ b/.github/workflows/get-all-pkgs.sh @@ -0,0 +1,42 @@ +#!/bin/bash + +set -e + +# Expects: +# +# * to be run from repo root +# * for repo to be checked out +# +# Outputs: +# +# * allAppsJson +# * allLibsJson + +allAppsJson= +allLibsJson= + +for d in libs/* apps/*; do + + pkg="$(basename "$d")" + + if echo "$d" | grep -q apps/; then + if [ -z "$allAppsJson" ]; then + allAppsJson='"'$pkg'"' + else + allAppsJson="${allAppsJson},"'"'$pkg'"' + fi + + else + if [ -z "$allLibsJson" ]; then + allLibsJson='"'$pkg'"' + else + allLibsJson="${allLibsJson},"'"'$pkg'"' + fi + fi + +done + +echo "allAppsJson=[$allAppsJson]" +echo "allAppsJson=[$allAppsJson]" >> $GITHUB_OUTPUT +echo "allLibsJson=[$allLibsJson]" +echo "allLibsJson=[$allLibsJson]" >> $GITHUB_OUTPUT diff --git a/.github/workflows/get-pnpm-params.sh b/.github/workflows/get-pnpm-params.sh new file mode 100755 index 0000000..3d61f28 --- /dev/null +++ b/.github/workflows/get-pnpm-params.sh @@ -0,0 +1,35 @@ +#!/bin/bash + +set -e + +# Expects: +# +# * to be run from repo root +# * for repo to be checked out +# * to have env: +# * NX_BASE +# +# Outputs: +# +# * hasTopLevelChanges +# * pnpmFilter +# * pnpmIgnorePattern + + +# Determine if any important top-level files have changed that would require a full re-deploy of everything +hasTopLevelChanges="$( + git diff --name-only "$NX_BASE" | \ + grep -qE '^(\.github|\.dockerignore|deploy/.*\.dockerfile|pnpm-lock|pnpm-workspace|tsconfig)' && \ + echo 1 || \ + echo "" +)" + +pnpmFilter="$([ -n "$hasTopLevelChanges" ] && echo "*" || echo "...[$NX_BASE]")" +pnpmIgnorePattern="**/tests,**/*.md" + +echo "hasTopLevelChanges=$hasTopLevelChanges" +echo "hasTopLevelChanges=$hasTopLevelChanges" >> $GITHUB_OUTPUT +echo "pnpmFilter=$pnpmFilter" +echo "pnpmFilter=$pnpmFilter" >> $GITHUB_OUTPUT +echo "pnpmIgnorePattern=$pnpmIgnorePattern" +echo "pnpmIgnorePattern=$pnpmIgnorePattern" >> $GITHUB_OUTPUT diff --git a/README.md b/README.md index 8e527d6..ec6b6ef 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,9 @@ Generic Typescript Monorepo Philosophy ================================================================================================================= -_**NOTE: THIS IS AN ONGOING EXPERIMENT.** This repository is a reference implementation of the monorepo philosophy -outlined in this readme. It may work for you, but it may not. Use at your own risk, and be ready to take what's valuable -from it while leaving behind what's not._ +_**NOTE: THIS IS A WORK IN PROGRESS.** This repository is a reference implementation of the monorepo philosophy outlined +in this readme. It may work for you, but it may not. Use at your own risk, and be ready to take what's valuable from it +while leaving behind what's not._ _Furthermore, all views expressed in this readme are intended to represent **my personal ideas,** not any particular assertion of "the truth"._ @@ -81,10 +81,10 @@ The primary things `pnpm` provides that make monorepo management possible/easier my packages, I can run `pnpm -r typecheck` to easily run that npm script in all packages for which it is defined. What's more, `pnpm` will run the scripts against the packages in dependency order, meaning if package A depends on B and I run `pnpm -r tsc`, it will run `tsc` in package B first, then package A. -* **Advanced filtering.** It also includes a very advanced filtering mechanism that includes selecting packages that are - dependent on a given package or that are dependencies of a given package, as well as selecting packages according to - code changes between two git commits. This allows you to run testing and linting against only things that have - actually changed since the last relevant commit. +* **Advanced filtering.** It also includes a very advanced [filtering mechanism](https://pnpm.io/filtering) that + includes selecting packages that are dependent on a given package or that are dependencies of a given package, as well + as selecting packages according to code changes between two git commits. This allows you to run testing and linting + against only things that have actually changed since the last relevant commit. * **Advanced version management for publishing.** For packages within the monorepo that are dependent on other packages within the monorepo (such as `libs/shared-be` in this repo),`pnpm` allows you to publish the dependent packages with concrete dependency version specs while transparently linking the dependency to the live version in the monorepo for @@ -229,6 +229,46 @@ this is to concatenate a base file from the monorepo root with a service-specifi pipe that into `docker build` via stdin. See `./deploy` and `./apps/*/deploy` along with the `./scripts/docker-build.sh` script to see how it all comes together. +In general, I consider the docker infrastructure in this monorepo to be both funky and also reasonably functional and +stable. This is the setup I implemented at my last company, and it's a pattern I'll likely use again in the future. + +Some key points: + +* `./deploy/dockerfile.base` is the base dockerfile for all builds +* GOTCHA: There's a dynamic section in `dockerfile.base` to mount all the libs and apps when installing deps. This + allows us to not have to risk having an out-of-date dockerfile when we add new libs and apps, but it also makes the + dockerfile unusable in raw form. Trade-offs. +* The front-end dockerfile (`./deploy/dockerfile.react-ext`) has not been battle tested and is considered a starting + point. +* At this time, there are no service-specific extensions, but the implemented dockerfile system allows you to provide + a file at `apps/*/deploy/dockerfile.service` if you wish to make additional changes to the final built service. If + you need to change the build target as a result, you can change the given package's `docker:build` npm script to + contain the `DOCKER_TARGET` env var before calling the script, e.g., + `"docker:build": "DOCKER_TARGET=my-targ ../../scripts/docker-build.sh"` +* The dev docker image is simply the monorepo. And since the `/monorepo` directory is actually replaced with your live + monorepo, the image really just serves as a fixed runtime environment. +* You can build all containers using `pnpm docker:build` +* You can bring the system up using `pnpm docker:compose up -d` (dev). This brings the system up in dev mode with your + local monorepo linked in. If you want to run the actual built containers statically, try `pnpm docker:compose prod up -d`. + You can bring the system back down by running `pnpm docker:compose [ENV] down`, and you can additionally pass any + arguments you'd like to docker compose, e.g., `pnpm docker:compose [ENV] logs -f`. + + +### ESLint + +I respect that the folks who created eslint did a good enough job to achieve massive world-wide adoption, but wow is it +ever a mess. + +I've tried a number of times to finally overcome my shortcomings in eslint and the more I study it the more confused I +get. The people who built it are surely brilliant, but the product itself is awful.... + +With that caveat, here's what to know about my eslint setup: + +* I used the beta "flat-file" config format for forward compatibility. This caused even more problems than just standard + eslint, but it's supposedly been feature complete for a while now so I figured I'd go for it. +* I'm not by any means an expert in eslint config, so I may have messed this up. Take what I've done as inspiration, but + be ready to forge your own path here. + ### Boilerplate diff --git a/apps/my-microservice/src/main.ts b/apps/my-microservice/src/main.ts index 6dc8247..0506608 100644 --- a/apps/my-microservice/src/main.ts +++ b/apps/my-microservice/src/main.ts @@ -15,7 +15,7 @@ app.use(loggingMiddleware(`my-microservice`)); app.use((req, res, next) => { res.setHeader('Access-Control-Allow-Origin', '*'); next(); -}) +}); app.get('/', (req, res) => { const thing: MyThing = myThing; @@ -35,7 +35,7 @@ app.get('/proxy', async (req, res, next) => { } }); -app.use(((err, req, res, next) => { +app.use(((err, req, res) => { if (!err) { res.status(404).json({ status: 'error', error: 'Not found' }); } else { diff --git a/apps/my-react-app/src/App.tsx b/apps/my-react-app/src/App.tsx index 68dde24..7c5da3b 100644 --- a/apps/my-react-app/src/App.tsx +++ b/apps/my-react-app/src/App.tsx @@ -10,7 +10,6 @@ import { ApiDemo } from './containers/ApiDemo'; const thing: MyThing = myThing; function App(p: { config: Config }) { - const deps = assembleDeps(p.config); if (!deps) { diff --git a/apps/my-react-app/src/apiClient.ts b/apps/my-react-app/src/apiClient.ts index 7fa54e7..62dd2cb 100644 --- a/apps/my-react-app/src/apiClient.ts +++ b/apps/my-react-app/src/apiClient.ts @@ -1,10 +1,10 @@ -export const ApiClient = (config: { api: { baseUrl: string; } }) => ({ +export const ApiClient = (config: { api: { baseUrl: string } }) => ({ get: async (path: string) => { const res = await fetch(`${config.api.baseUrl}${path}`); if (!res.ok) { const body = await res.text(); - throw new Error(`Failed to fetch ${path}: ${res.status}: ${res.text()}`); + throw new Error(`Failed to fetch ${path}: ${res.status}: ${body}`); } return (await res.json()) as T; - } -}); \ No newline at end of file + }, +}); diff --git a/apps/my-react-app/src/containers/ApiDemo.tsx b/apps/my-react-app/src/containers/ApiDemo.tsx index 7c15d62..7c6dc2e 100644 --- a/apps/my-react-app/src/containers/ApiDemo.tsx +++ b/apps/my-react-app/src/containers/ApiDemo.tsx @@ -14,11 +14,14 @@ export const ApiDemo = () => { const [data, setData] = React.useState(null); const [dataSrc, setDataSrc] = React.useState(DATA_OPTS.MY_MICROSERVICE); - const changeDataSrc = React.useCallback((newValue: Opts | null) => { - if (newValue && newValue.value !== dataSrc) { - setDataSrc(newValue.value); - } - }, [dataSrc, setDataSrc]); + const changeDataSrc = React.useCallback( + (newValue: Opts | null) => { + if (newValue && newValue.value !== dataSrc) { + setDataSrc(newValue.value); + } + }, + [dataSrc, setDataSrc], + ); const refreshData = React.useCallback(async () => { try { @@ -33,13 +36,15 @@ export const ApiDemo = () => { const selectedOpt = opts.find((o) => o.value === dataSrc) || null; return ( -
+

API Demo

-

+

{data ? JSON.stringify(data, null, 2) : '(Awaiting fetch)'}
- ) -} + ); +}; diff --git a/apps/my-react-app/src/containers/Counter.tsx b/apps/my-react-app/src/containers/Counter.tsx index b441d37..44c62b2 100644 --- a/apps/my-react-app/src/containers/Counter.tsx +++ b/apps/my-react-app/src/containers/Counter.tsx @@ -10,4 +10,4 @@ export const Counter = () => {

); -} \ No newline at end of file +}; diff --git a/apps/my-react-app/src/containers/Logos.tsx b/apps/my-react-app/src/containers/Logos.tsx index 2e47efd..88140ff 100644 --- a/apps/my-react-app/src/containers/Logos.tsx +++ b/apps/my-react-app/src/containers/Logos.tsx @@ -13,4 +13,4 @@ export const Logos = () => { ); -} +}; diff --git a/apps/my-react-app/src/deps.ts b/apps/my-react-app/src/deps.ts index 739f2c3..1124a4d 100644 --- a/apps/my-react-app/src/deps.ts +++ b/apps/my-react-app/src/deps.ts @@ -6,15 +6,16 @@ export const assembleDeps = (config: Config) => { const deps = { config, apiClient: ApiClient(config), - } + }; return deps; -} +}; export type Deps = ReturnType; // I'm normally quite meticulous about types, but in cases like this where the app will basically always be started with // the correct deps and you can depend on a catastrophic fast-fail if not, we can just any-cast for the default null // value. +// eslint-disable-next-line @typescript-eslint/no-explicit-any export const DepsContext = React.createContext(null as any); export const useDeps = () => React.useContext(DepsContext); diff --git a/apps/my-react-app/src/main.tsx b/apps/my-react-app/src/main.tsx index 972cf14..a1846ba 100644 --- a/apps/my-react-app/src/main.tsx +++ b/apps/my-react-app/src/main.tsx @@ -9,8 +9,8 @@ import './index.css'; const config: Config = { api: { baseUrl: import.meta.env.VITE_APP_API_URL || 'http://localhost:3000', - } -} + }, +}; ReactDOM.createRoot(document.getElementById('root')!).render( diff --git a/apps/my-react-app/src/types.ts b/apps/my-react-app/src/types.ts index 24f1938..590546c 100644 --- a/apps/my-react-app/src/types.ts +++ b/apps/my-react-app/src/types.ts @@ -1,5 +1,5 @@ export type Config = { api: { baseUrl: string; - } -} + }; +}; diff --git a/apps/other-microservice/src/main.ts b/apps/other-microservice/src/main.ts index 2823557..3583968 100644 --- a/apps/other-microservice/src/main.ts +++ b/apps/other-microservice/src/main.ts @@ -14,7 +14,7 @@ app.get('/', (req, res) => { res.json({ status: 'ok', url }); }); -app.use(((err, req, res, next) => { +app.use(((err, req, res) => { if (!err) { res.status(404).json({ status: 'error', error: 'Not found' }); } else { diff --git a/eslint.config.js b/eslint.config.js index 3c9ef3a..ab15b5e 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -1,17 +1,33 @@ +// NOTE: @typescript-eslint hasn't caught up to the new eslint config format yet, so we have to use compatibility tooling +const { FlatCompat } = require("@eslint/eslintrc"); +const ESLintJS = require("@eslint/js"); +const ESLintTSParser = require("@typescript-eslint/parser"); +const reactRefresh = require("eslint-plugin-react-refresh"); +const prettier = require("eslint-config-prettier"); + +const compat = new FlatCompat({ resolvePluginsRelativeTo: __dirname }); + module.exports = [ // For all code + ESLintJS.configs.recommended, + ...compat.extends("plugin:@typescript-eslint/recommended"), + ...compat.extends('plugin:react-hooks/recommended'), { - "extends": [ - "eslint:recommended", - "plugin:@typescript-eslint/recommended", - ], - "parser": "@typescript-eslint/parser", + "linterOptions": { + "reportUnusedDisableDirectives": "error" + } + }, + { + "files": ["**/*.{ts,tsx}"], + "languageOptions": { + "parser": ESLintTSParser, + }, }, // For all back-end code { - "files": ["apps/*/{src,tests}/**/*","libs/*/{src,tests}/**/*"], - "ignores": ["apps/my-react-app/**/*", "libs/shared-fe/**"], + "files": ["apps/*/{src,tests}/**","libs/*/{src,tests}/**"], + "ignores": ["apps/my-react-app/**", "libs/shared-fe/**"], "env": { "browser": false, "node": true, @@ -31,7 +47,9 @@ module.exports = [ "files": ["apps/my-react-app/{src,tests}/**", "libs/shared-fe/{src,tests}/**"], "extends": [ 'plugin:react-hooks/recommended' ], "env": { "browser": true, "es2020": true }, - "plugins": ["react-refresh"], + "plugins": { + "react-refresh": reactRefresh, + }, "rules": { "react-refresh/only-export-components": [ "warn", @@ -41,9 +59,5 @@ module.exports = [ }, // All code will be prettified, so make sure this is at the end - { - "extends": [ - "prettier", - ], - }, + prettier, ] diff --git a/libs/shared-fe/src/MyComponent.tsx b/libs/shared-fe/src/MyComponent.tsx index 8dd5497..593a93f 100644 --- a/libs/shared-fe/src/MyComponent.tsx +++ b/libs/shared-fe/src/MyComponent.tsx @@ -9,10 +9,8 @@ export const MyComponent = (p: { thing?: MyThing }) => { return (

- My Component - {' '} - + My Component -{' '} +

{showThing &&

Thing: {thing}

}
diff --git a/package.json b/package.json index b177c1a..168ffc6 100644 --- a/package.json +++ b/package.json @@ -3,6 +3,10 @@ "version": "1.0.0", "description": "A proof of concept for a monorepo using pnpm and typescript", "private": true, + "engines": { + "pnpm": "^8", + "node": "20" + }, "scripts": { "build": "pnpm build:libs && pnpm build:apps", "build:libs": "pnpm --filter './libs/*' build", @@ -18,6 +22,8 @@ "author": "Kael Shipman", "license": "ISC", "dependencies": { + "@eslint/eslintrc": "^3.0.0", + "@eslint/js": "^8.56.0", "@types/express": "^4.17.21", "@types/node": "^20.10.6", "@types/react": "^18.2.46", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index e60ce10..ef7a73b 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -18,6 +18,12 @@ importers: .: dependencies: + '@eslint/eslintrc': + specifier: ^3.0.0 + version: 3.0.0 + '@eslint/js': + specifier: ^8.56.0 + version: 8.56.0 '@types/express': specifier: ^4.17.21 version: 4.17.21 @@ -818,6 +824,23 @@ packages: - supports-color dev: false + /@eslint/eslintrc@3.0.0: + resolution: {integrity: sha512-R8p3jN1kdWvFRiRfgpUxZ4PMgfJJFt6NuLGDnnqLb7RKmsd5Xa0KqRMjmaqRO7e38ZbG/9zKPgDjeJeqsDofSA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + dependencies: + ajv: 6.12.6 + debug: 4.3.4(supports-color@5.5.0) + espree: 9.6.1 + globals: 13.24.0 + ignore: 5.3.0 + import-fresh: 3.3.0 + js-yaml: 4.1.0 + minimatch: 3.1.2 + strip-json-comments: 3.1.1 + transitivePeerDependencies: + - supports-color + dev: false + /@eslint/js@8.56.0: resolution: {integrity: sha512-gMsVel9D7f2HLkBma9VbtzZRehRogVRfbr++f06nL2vnCGCNlzOD+/MUov/F4p8myyAHspEhVobgjpX64q5m6A==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} diff --git a/scripts/.internal/getEngineVersion.js b/scripts/.internal/getEngineVersion.js new file mode 100755 index 0000000..0f96968 --- /dev/null +++ b/scripts/.internal/getEngineVersion.js @@ -0,0 +1,45 @@ +#!/bin/env node +const { existsSync, readFileSync } = require('fs'); + +let root = process.cwd(); +for (let attempts = 1; !existsSync(`${root}/pnpm-workspace.yaml`); attempts++) { + if (attempts >= 3 || !existsSync(`${root}/..`)) { + root = null; + break; + } + root = `${root}/..`; +} +if (!root) { + console.error(`E: Couldn't find repo root. Current working directory is '${process.cwd()}'`); + process.exit(1); +} + +const engines = JSON.parse(readFileSync(`${root}/package.json`, 'utf8')).engines; +const available = Object.keys(engines).sort(); + +const echoHelp = (out) => { + if (!out) { + out = console.log + } + out(`Usage: ${process.argv[1].split(/\//g).pop()} [ENGINE]`); + out(); + out(`Available Engines: '${available.join(`', '`)}'`); + out(); +} + +let selected = null; +for (let i = 2; i < process.argv.length; i++) { + if (selected || !available.includes(process.argv[i])) { + echoHelp(console.error); + console.error(`E: Unknown argument '${process.argv[i]}'`); + process.exit(1); + } + selected = process.argv[i]; +} +if (!selected) { + echoHelp(console.error); + console.error(`E: No engine selected. Please pass one of the available engines as argument.`); + process.exit(1); +} + +process.stdout.write(engines[selected]); diff --git a/scripts/lint.sh b/scripts/lint.sh index 5ed5ece..d95c910 100755 --- a/scripts/lint.sh +++ b/scripts/lint.sh @@ -1,6 +1,11 @@ #!/bin/bash set -e -SCRIPTS="$(dirname "$0")" +ROOT="$(dirname "$0")/.." -ESLINT_USE_FLAT_CONFIG=1 eslint -c "$SCRIPTS/../eslint.config.js" --cache --cache-location ./node_modules/.cache/ $@ +FILES=('./src/**/*.{ts,tsx,js,jsx}') +if [ -d ./tests ]; then + FILES+=('./tests/**/*.{ts,tsx,js,jsx}') +fi + +ESLINT_USE_FLAT_CONFIG=1 eslint -c "$ROOT/eslint.config.js" --cache --cache-location ./node_modules/.cache/eslint-cache $@ "${FILES[@]}"