diff --git a/.eslintignore b/.eslintignore index c13a17faa..be4acda78 100644 --- a/.eslintignore +++ b/.eslintignore @@ -8,3 +8,4 @@ examples **/playground integration-tests/tests/prisma/*/client integration-tests/tests/prisma/*/drizzle +drizzle-kit/* diff --git a/.github/workflows/release-feature-branch.yaml b/.github/workflows/release-feature-branch.yaml index cce886c3a..5c2d76fb7 100644 --- a/.github/workflows/release-feature-branch.yaml +++ b/.github/workflows/release-feature-branch.yaml @@ -14,6 +14,7 @@ jobs: matrix: package: - drizzle-orm + - drizzle-kit - drizzle-zod - drizzle-typebox - drizzle-valibot @@ -135,12 +136,6 @@ jobs: cd drizzle-orm pnpm prisma generate --schema src/prisma/schema.prisma ) - ( - cd integration-tests - pnpm prisma generate --schema tests/prisma/pg/schema.prisma - pnpm prisma generate --schema tests/prisma/mysql/schema.prisma - pnpm prisma generate --schema tests/prisma/sqlite/schema.prisma - ) pnpm build - name: Run tests diff --git a/.github/workflows/release-latest.yaml b/.github/workflows/release-latest.yaml index b801c6824..d81a0bcab 100644 --- a/.github/workflows/release-latest.yaml +++ b/.github/workflows/release-latest.yaml @@ -10,6 +10,7 @@ jobs: matrix: package: - drizzle-orm + - drizzle-kit - drizzle-zod - drizzle-typebox - drizzle-valibot @@ -138,12 +139,6 @@ jobs: cd drizzle-orm pnpm prisma generate --schema src/prisma/schema.prisma ) - ( - cd integration-tests - pnpm prisma generate --schema tests/prisma/pg/schema.prisma - pnpm prisma generate --schema tests/prisma/mysql/schema.prisma - pnpm prisma generate --schema tests/prisma/sqlite/schema.prisma - ) pnpm build - name: Run tests @@ -232,3 +227,35 @@ jobs: } catch (e) { core.setFailed(e.message); } + + - name: Create GitHub release for KIT package + uses: actions/github-script@v6 + if: matrix.package == 'drizzle-kit' && steps.checks.outputs.has_new_release == 'true' + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + try { + const fs = require("fs"); + const path = require("path"); + + const version = "${{ steps.checks.outputs.version }}"; + const changelog = fs.readFileSync("${{ steps.checks.outputs.changelog_path }}", "utf8"); + + const release = await github.rest.repos.createRelease({ + owner: context.repo.owner, + repo: context.repo.repo, + tag_name: `drizzle-kit@${version}`, + name: `drizzle-kit@${version}`, + body: changelog, + }); + + await github.rest.repos.uploadReleaseAsset({ + owner: context.repo.owner, + repo: context.repo.repo, + release_id: release.data.id, + name: `${{ matrix.package }}-${version}-dist.tgz`, + data: fs.readFileSync(path.resolve("${{ matrix.package }}", "package.tgz")), + }); + } catch (e) { + core.setFailed(e.message); + } \ No newline at end of file diff --git a/.github/workflows/unpublish-release-feature-branch.yaml b/.github/workflows/unpublish-release-feature-branch.yaml index 1f0d30624..44542c24e 100644 --- a/.github/workflows/unpublish-release-feature-branch.yaml +++ b/.github/workflows/unpublish-release-feature-branch.yaml @@ -9,6 +9,7 @@ jobs: matrix: package: - drizzle-orm + - drizzle-kit - drizzle-zod - drizzle-typebox - drizzle-valibot diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 8b289dc2e..a5f91755e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -39,25 +39,27 @@ There are several ways how you can provide a feedback --- -- [Contributing](#contributing) - - [Submitting bug report](#-submitting-bug-report) - - [Submitting feature request](#-submitting-feature-request) - - [Providing feedback](#-providing-feedback) - - [Contribution guidelines](#-contribution-guidelines) - - [General setup](#-general-setup) - - [Installing node](#-installing-node) - - [Install pnpm](#-install-pnpm) - - [Install docker](#-install-docker) - - [Local project setup](#-local-project-setup) - - [Clone project](#-clone-project) - - [Building project](#-building-project) - - [Build project](#-build-project) - - [Run tests](#-run-tests) - - [Commits and PRs](#-commits-and-prs) - - [Commit guideline](#-commit-guideline) +- [Pre-Contribution setup](#pre-contribution) + - [Installing node](#-installing-node) + - [Install pnpm](#-install-pnpm) + - [Install docker](#-install-docker) + - [Clone project](#-clone-project) + - [Repository Structure](#repo-structure) + - [Build project](#-build-project) +- [Contributing to `drizzle-orm`](#contributing-orm) + - [Project structure](#project-structure-orm) + - [Run tests](#run-tests-orm) + - [Commits and PRs](#commits-and-prs-orm) + - [Commit guideline](#commit-guideline-orm) + - [PR guideline](#pr-guideline-orm) +- [Contributing to `drizzle-kit`](#contributing-kit) + - [Project structure](#project-structure-kit) + - [Run tests](#run-tests-kit) + - [Commits and PRs](#commits-and-prs-kit) + - [Commit guideline](#commit-guideline-kit) - [PR guideline](#-pr-guideline) -## General setup +## Pre-Contribution setup ### Installing node @@ -99,14 +101,22 @@ git clone https://github.com/drizzle-team/drizzle-orm.git cd drizzle-orm ``` -## Building project +### Repository Structure ``` -Project sctructure +📂 drizzle-orm/ - orm core package with all main logic for each dialect -📂 drizzle-orm/ - core package with all main logic for each dialect +📂 drizzle-kit/ - kit core package with all main logic and tests for each dialect -📂 changelogs/ - all changelogs for drizzle-orm module +📂 drizzle-typebox/ - all the code related to drizzle+typebox extension + +📂 drizzle-valibot/ - all the code related to drizzle+valibot extension + +📂 drizzle-zod/ - all the code related to drizzle+zod extension + +📂 eslint-plugin-drizzle/ - all the code related to drizzle eslint plugin + +📂 changelogs/ - all changelogs for drizzle-orm, drizzle-kit, drizzle-typebox, drizzle-zod, drizzle-valibot modules 📂 examples/ - package with Drizzle ORM usage examples @@ -119,7 +129,21 @@ Project sctructure - `"pnpm i && pnpm build"` -> if you run this script from root folder - it will build whole monorepo. Running this script from specific package folder will only build current package -### Run tests +## Contributing to `drizzle-orm` + +### Project structure + +``` +Project structure + +📂 pg-core, mysql-core, sqlite-core - core packages for each dialect with all the main logic for relation and query builder + +📂 sql/ - package containing all expressions and SQL template implementation + +All other folders are for specific drivers that Drizzle ORM supports. +``` + +### Run tests --- All tests for Drizzle ORM are integration tests, that are simulating real database and different queries and responses from database. Each file in `integration-tests` has a list of different scenarios for different dialect+driver. Each file is creating a docker instance with needed database and running test cases there. Right after all tests were run - docker container with database will be deleted @@ -130,9 +154,9 @@ If you have added additional logic to core package - make sure that all tests we - `"cd integration-tests && pnpm test"` -> will run all tests in integration test folder -## Commits and PRs +## Commits and PRs -### Commit guideline +### Commit guideline --- @@ -160,7 +184,7 @@ In specific case, groupBy was responding with unreadable error > **Warning**: > All commits should be signed, before submitting PR. Please check detailed info on [how to sign commits](https://docs.github.com/en/authentication/managing-commit-signature-verification/about-commit-signature-verification) -### PR guideline +### PR guideline --- @@ -183,3 +207,91 @@ Example - Tests on bugs, that was fixed; To understand how test should be created and run - please check [Run tests](#-run-tests) section + + +## Contributing to `drizzle-kit` + +### Project structure + +``` +📂 cli/ + | + | -> 📄 schema.ts - all the commands defined using brocli + | + | -> 📂 commands - all the business logic for drizzle-kit commands + +📂 extensions/ - all the extension helpers for databases + +📂 serialaizer/ - all the necessary logic to read from the Drizzle ORM schema and convert it to a common JSON format, as well as the logic to introspect all tables, types, and other database elements and convert them to a common JSON format + +📄 introspect-pg.ts, introspect-mysql.ts, introspect-sqlite.ts - these files are responsible for mapping JSON snapshots to TypeScript files during introspect commands + +📄 snapshotsDiffer.ts - this file handles the mapping from JSON snapshot format to JSON statement objects. + +📄 jsonStatements.ts - this file defines JSON statement types, interfaces, and helper functions. + +📄 sqlgenerator.ts - this file converts JSON statements to SQL strings. +``` + +### Run tests + +--- +All tests for Drizzle Kit are integration tests, that are simulating real database and different queries and responses from database. Each file in `drizzle-kit/tests` has a list of different scenarios for different commands. Each MySQL file is creating a docker instance with needed database and running test cases there. Right after all tests were run - docker container with database will be deleted. For PostgreSQL we are using PgLite and for SQLite we are using SQLite files. + +If you are in the root of repo: + +- `"cd drizzle-kit && pnpm test"` -> will run all tests + +## Commits and PRs + +### Commit guideline + +--- + +We have specific rules on how commit messages should be structured. + +It's important to make sure your commit messages are clear, concise, and informative to make it easier for others to understand the changes you are making + +Commit message pattern + +``` + + + +``` + +Example + +``` +Add groupBy error message + +In specific case, groupBy was responding with unreadable error +... +``` + +> **Warning**: +> All commits should be signed, before submitting PR. Please check detailed info on [how to sign commits](https://docs.github.com/en/authentication/managing-commit-signature-verification/about-commit-signature-verification) + +### PR guideline + +--- + +1. PR should be created with specific name pattern + +``` +[-kit]: +``` + +Example + +``` +[Pg-kit] Add PostGIS extension support +``` + +2. PR should contain detailed description with everything, that was changed + +3. Each PR should contain + - Tests on feature, that was created; + - Tests on bugs, that was fixed; + +To understand how test should be created and run - please check [Run tests](#run-tests) section diff --git a/changelogs/drizzle-kit/0.23.2.md b/changelogs/drizzle-kit/0.23.2.md new file mode 100644 index 000000000..c69643b8a --- /dev/null +++ b/changelogs/drizzle-kit/0.23.2.md @@ -0,0 +1,2 @@ +- Fixed a bug in PostgreSQL with push and introspect where the `schemaFilter` object was passed. It was detecting enums even in schemas that were not defined in the schemaFilter. +- Fixed the `drizzle-kit up` command to work as expected, starting from the sequences release. diff --git a/changelogs/drizzle-kit/0.24.0.md b/changelogs/drizzle-kit/0.24.0.md new file mode 100644 index 000000000..b766e018b --- /dev/null +++ b/changelogs/drizzle-kit/0.24.0.md @@ -0,0 +1,24 @@ +## Breaking changes (for SQLite users) + +#### Fixed [Composite primary key order is not consistent](https://github.com/drizzle-team/drizzle-kit-mirror/issues/342) by removing `sort` in SQLite and to be consistant with the same logic in PostgreSQL and MySQL + +The issue that may arise for SQLite users with any driver using composite primary keys is that the order in the database may differ from the Drizzle schema. + +- If you are using `push`, you **MAY** be prompted to update your table with a new order of columns in the composite primary key. You will need to either change it manually in the database or push the changes, but this may lead to data loss, etc. + +- If you are using `generate`, you **MAY** also be prompted to update your table with a new order of columns in the composite primary key. You can either keep that migration or skip it by emptying the SQL migration file. + +If nothing works for you and you are blocked, please reach out to me @AndriiSherman. I will try to help you! + + +## Bug fixes + +- [[BUG] When using double type columns, import is not inserted](https://github.com/drizzle-team/drizzle-kit-mirror/issues/403) - thanks @Karibash +- [[BUG] A number value is specified as the default for a column of type char](https://github.com/drizzle-team/drizzle-kit-mirror/issues/404) - thanks @Karibash +- [[BUG]: Array default in migrations are wrong](https://github.com/drizzle-team/drizzle-orm/issues/2621) - thanks @L-Mario564 +- [[FEATURE]: Simpler default array fields](https://github.com/drizzle-team/drizzle-orm/issues/2709) - thanks @L-Mario564 +- [[BUG]: drizzle-kit generate succeeds but generates invalid SQL for default([]) - Postgres](https://github.com/drizzle-team/drizzle-orm/issues/2432) - thanks @L-Mario564 +- [[BUG]: Incorrect type for array column default value](https://github.com/drizzle-team/drizzle-orm/issues/2334) - thanks @L-Mario564 +- [[BUG]: error: column is of type integer[] but default expression is of type integer](https://github.com/drizzle-team/drizzle-orm/issues/2224) - thanks @L-Mario564 +- [[BUG]: Default value in array generating wrong migration file](https://github.com/drizzle-team/drizzle-orm/issues/1003) - thanks @L-Mario564 +- [[BUG]: enum as array, not possible?](https://github.com/drizzle-team/drizzle-orm/issues/1564) - thanks @L-Mario564 \ No newline at end of file diff --git a/changelogs/drizzle-kit/0.24.1.md b/changelogs/drizzle-kit/0.24.1.md new file mode 100644 index 000000000..d70f6ebbe --- /dev/null +++ b/changelogs/drizzle-kit/0.24.1.md @@ -0,0 +1,32 @@ +## Bug fixes + +> Big thanks to @L-Mario564 for his [PR](https://github.com/drizzle-team/drizzle-orm/pull/2804). It conflicted in most cases with a PR that was merged, but we incorporated some of his logic. Merging it would have caused more problems and taken more time to resolve, so we just took a few things from his PR, like removing "::" mappings in introspect and some array type default handlers + +### What was fixed + +1. The Drizzle Kit CLI was not working properly for the `introspect` command. +2. Added the ability to use column names with special characters for all dialects. +3. Included PostgreSQL sequences in the introspection process. +4. Reworked array type introspection and added all test cases. +5. Fixed all (we hope) default issues in PostgreSQL, where `::` was included in the introspected output. +6. `preserve` casing option was broken + +### Tickets that were closed + +- [[BUG]: invalid schema generation with drizzle-kit introspect:pg](https://github.com/drizzle-team/drizzle-orm/issues/1210) +- [[BUG][mysql introspection]: TS error when introspect column including colon](https://github.com/drizzle-team/drizzle-orm/issues/1928) +- [[BUG]: Unhandled defaults when introspecting postgres db](https://github.com/drizzle-team/drizzle-orm/issues/1625) +- [[BUG]: PostgreSQL Enum Naming and Schema Typing Issue](https://github.com/drizzle-team/drizzle-orm/issues/2315) +- [[BUG]: drizzle-kit instrospect command generates syntax error on varchar column types](https://github.com/drizzle-team/drizzle-orm/issues/2714) +- [[BUG]: Introspecting varchar[] type produces syntactically invalid schema.ts](https://github.com/drizzle-team/drizzle-orm/issues/1633) +- [[BUG]: introspect:pg column not using generated enum name](https://github.com/drizzle-team/drizzle-orm/issues/1648) +- [[BUG]: drizzle-kit introspect casing "preserve" config not working](https://github.com/drizzle-team/drizzle-orm/issues/2773) +- [[BUG]: drizzle-kit introspect fails on required param that is defined](https://github.com/drizzle-team/drizzle-orm/issues/2719) +- [[BUG]: Error when running npx drizzle-kit introspect: "Expected object, received string"](https://github.com/drizzle-team/drizzle-orm/issues/2657) +- [[BUG]: Missing index names when running introspect command [MYSQL]](https://github.com/drizzle-team/drizzle-orm/issues/2525) +- [[BUG]: drizzle-kit introspect TypeError: Cannot read properties of undefined (reading 'toLowerCase')](https://github.com/drizzle-team/drizzle-orm/issues/2338) +- [[BUG]: Wrong column name when using PgEnum.array()](https://github.com/drizzle-team/drizzle-orm/issues/2100) +- [[BUG]: Incorrect Schema Generated when introspecting extisting pg database](https://github.com/drizzle-team/drizzle-orm/issues/1985) +- [[⚠️🐞BUG]: index() missing argument after introspection, causes tsc error that fails the build](https://github.com/drizzle-team/drizzle-orm/issues/1870) +- [[BUG]: drizzle-kit introspect small errors](https://github.com/drizzle-team/drizzle-orm/issues/1738) +- [[BUG]: Missing bigint import in drizzle-kit introspect](https://github.com/drizzle-team/drizzle-orm/issues/1020) \ No newline at end of file diff --git a/changelogs/drizzle-kit/0.24.2.md b/changelogs/drizzle-kit/0.24.2.md new file mode 100644 index 000000000..962a29acc --- /dev/null +++ b/changelogs/drizzle-kit/0.24.2.md @@ -0,0 +1,24 @@ +## New Features + +### 🎉 Support for `pglite` driver + +You can now use pglite with all drizzle-kit commands, including Drizzle Studio! + +```ts +import { defineConfig } from "drizzle-kit"; + +export default defineConfig({ + dialect: "postgresql", + driver: "pglite", + schema: "./schema.ts", + dbCredentials: { + url: "local-pg.db", + }, + verbose: true, + strict: true, +}); +``` + +## Bug fixes + +- mysql-kit: fix GENERATED ALWAYS AS ... NOT NULL - [#2824](https://github.com/drizzle-team/drizzle-orm/pull/2824) \ No newline at end of file diff --git a/changelogs/drizzle-orm/0.32.2.md b/changelogs/drizzle-orm/0.32.2.md new file mode 100644 index 000000000..9ce68473c --- /dev/null +++ b/changelogs/drizzle-orm/0.32.2.md @@ -0,0 +1,4 @@ +- Fix AWS Data API type hints bugs in RQB +- Fix set transactions in MySQL bug - thanks @roguesherlock +- Add forwaring dependencies within useLiveQuery, fixes [#2651](https://github.com/drizzle-team/drizzle-orm/issues/2651) - thanks @anstapol +- Export additional types from SQLite package, like `AnySQLiteUpdate` - thanks @veloii \ No newline at end of file diff --git a/changelogs/drizzle-orm/0.33.0.md b/changelogs/drizzle-orm/0.33.0.md new file mode 100644 index 000000000..0093c9d05 --- /dev/null +++ b/changelogs/drizzle-orm/0.33.0.md @@ -0,0 +1,64 @@ +## Breaking changes (for some of postgres.js users) + +#### Bugs fixed for this breaking change + +- [Open +[BUG]: jsonb always inserted as a json string when using postgres-js](https://github.com/drizzle-team/drizzle-orm/issues/724) +- [[BUG]: jsonb type on postgres implement incorrectly](https://github.com/drizzle-team/drizzle-orm/issues/1511) + +> As we are doing with other drivers, we've changed the behavior of PostgreSQL-JS to pass raw JSON values, the same as you see them in the database. So if you are using the PostgreSQL-JS driver and passing data to Drizzle elsewhere, please check the new behavior of the client after it is passed to Drizzle. + +> We will update it to ensure it does not override driver behaviors, but this will be done as a complex task for everything in Drizzle in other releases + +If you were using `postgres-js` with `jsonb` fields, you might have seen stringified objects in your database, while drizzle insert and select operations were working as expected. + +You need to convert those fields from strings to actual JSON objects. To do this, you can use the following query to update your database: + +**if you are using jsonb:** +```sql +update table_name +set jsonb_column = (jsonb_column #>> '{}')::jsonb; +``` + +**if you are using json:** +```sql +update table_name +set json_column = (json_column #>> '{}')::json; +``` + +We've tested it in several cases, and it worked well, but only if all stringified objects are arrays or objects. If you have primitives like strings, numbers, booleans, etc., you can use this query to update all the fields + +**if you are using jsonb:** +```sql +UPDATE table_name +SET jsonb_column = CASE + -- Convert to JSONB if it is a valid JSON object or array + WHEN jsonb_column #>> '{}' LIKE '{%' OR jsonb_column #>> '{}' LIKE '[%' THEN + (jsonb_column #>> '{}')::jsonb + ELSE + jsonb_column +END +WHERE + jsonb_column IS NOT NULL; +``` + +**if you are using json:** +```sql +UPDATE table_name +SET json_column = CASE + -- Convert to JSON if it is a valid JSON object or array + WHEN json_column #>> '{}' LIKE '{%' OR json_column #>> '{}' LIKE '[%' THEN + (json_column #>> '{}')::json + ELSE + json_column +END +WHERE json_column IS NOT NULL; +``` + +If nothing works for you and you are blocked, please reach out to me @AndriiSherman. I will try to help you! + +## Bug Fixes + +- [[BUG]: boolean mode not working with prepared statements (bettersqlite)](https://github.com/drizzle-team/drizzle-orm/issues/2568) - thanks @veloii +- [[BUG]: isTable helper function is not working](https://github.com/drizzle-team/drizzle-orm/issues/2672) - thanks @hajek-raven +- [[BUG]: Documentation is outdated on inArray and notInArray Methods](https://github.com/drizzle-team/drizzle-orm/issues/2690) - thanks @RemiPeruto \ No newline at end of file diff --git a/drizzle-kit/.gitignore b/drizzle-kit/.gitignore new file mode 100644 index 000000000..8d2cf5a81 --- /dev/null +++ b/drizzle-kit/.gitignore @@ -0,0 +1,23 @@ +/* +**/.DS_Store + +!src +!tests +!vitest.config.ts +!README.md +!CONTRIBUTING.md +!schema.ts + +!.eslint +!.gitignore +!package.json +!tsconfig.json +!tsconfig.cli-types.json +!pnpm-lock.yaml +!.github +!build.ts +!build.dev.ts + +tests/test.ts + +!patches diff --git a/drizzle-kit/README.md b/drizzle-kit/README.md new file mode 100644 index 000000000..d2a4191b7 --- /dev/null +++ b/drizzle-kit/README.md @@ -0,0 +1,79 @@ +## Drizzle Kit + +DrizzleKit - is a CLI migrator tool for DrizzleORM. It is probably one and only tool that lets you completely automatically generate SQL migrations and covers ~95% of the common cases like deletions and renames by prompting user input. + - is a mirror repository for issues. + +## Documentation + +Check the full documenation on [the website](https://orm.drizzle.team/kit-docs/overview) + +### How it works + +`drizzle-kit` will traverse `schema folder` or `schema file`, generate schema snapshot and compare it to the previous version, if there's one. + Based on the difference it will generate all needed SQL migrations and if there are any `automatically unresolvable` cases like `renames` it will prompt user for input. + +For schema file: + +```typescript +// ./src/db/schema.ts + +import { integer, pgTable, serial, text, varchar } from "drizzle-orm/pg-core"; + +const users = pgTable("users", { + id: serial("id").primaryKey(), + fullName: varchar("full_name", { length: 256 }), + }, (table) => ({ + nameIdx: index("name_idx", table.fullName), + }) +); + +export const authOtp = pgTable("auth_otp", { + id: serial("id").primaryKey(), + phone: varchar("phone", { length: 256 }), + userId: integer("user_id").references(() => users.id), +}); +``` + +It will generate: + +```SQL +CREATE TABLE IF NOT EXISTS auth_otp ( + "id" SERIAL PRIMARY KEY, + "phone" character varying(256), + "user_id" INT +); + +CREATE TABLE IF NOT EXISTS users ( + "id" SERIAL PRIMARY KEY, + "full_name" character varying(256) +); + +DO $$ BEGIN + ALTER TABLE auth_otp ADD CONSTRAINT auth_otp_user_id_fkey FOREIGN KEY ("user_id") REFERENCES users(id); +EXCEPTION + WHEN duplicate_object THEN null; +END $$; + +CREATE INDEX IF NOT EXISTS users_full_name_index ON users (full_name); +``` + +### Installation & configuration + +```shell +npm install -D drizzle-kit +``` + +Running with CLI options + +```jsonc +// package.json +{ + "scripts": { + "generate": "drizzle-kit generate --out migrations-folder --schema src/db/schema.ts" + } +} +``` + +```shell +npm run generate +``` diff --git a/drizzle-kit/build.dev.ts b/drizzle-kit/build.dev.ts new file mode 100644 index 000000000..58879d9c1 --- /dev/null +++ b/drizzle-kit/build.dev.ts @@ -0,0 +1,51 @@ +import * as esbuild from 'esbuild'; +import { cpSync } from 'node:fs'; + +const driversPackages = [ + // postgres drivers + 'pg', + 'postgres', + '@vercel/postgres', + '@neondatabase/serverless', + // mysql drivers + 'mysql2', + '@planetscale/database', + // sqlite drivers + '@libsql/client', + 'better-sqlite3', +]; + +esbuild.buildSync({ + entryPoints: ['./src/utils.ts'], + bundle: true, + outfile: 'dist/utils.js', + format: 'cjs', + target: 'node16', + platform: 'node', + external: ['drizzle-orm', 'esbuild', ...driversPackages], + banner: { + js: `#!/usr/bin/env -S node --loader @esbuild-kit/esm-loader --no-warnings`, + }, +}); + +esbuild.buildSync({ + entryPoints: ['./src/cli/index.ts'], + bundle: true, + outfile: 'dist/index.cjs', + format: 'cjs', + target: 'node16', + platform: 'node', + external: [ + 'commander', + 'json-diff', + 'glob', + 'esbuild', + 'drizzle-orm', + ...driversPackages, + ], + banner: { + js: `#!/usr/bin/env -S node --loader ./dist/loader.mjs --no-warnings`, + }, +}); + +cpSync('./src/loader.mjs', 'dist/loader.mjs'); diff --git a/drizzle-kit/build.ts b/drizzle-kit/build.ts new file mode 100644 index 000000000..701e9c84c --- /dev/null +++ b/drizzle-kit/build.ts @@ -0,0 +1,109 @@ +import * as esbuild from 'esbuild'; +import { readFileSync, writeFileSync } from 'node:fs'; +import * as tsup from 'tsup'; +import pkg from './package.json'; + +const driversPackages = [ + // postgres drivers + 'pg', + 'postgres', + '@vercel/postgres', + '@neondatabase/serverless', + '@electric-sql/pglite', + // mysql drivers + 'mysql2', + '@planetscale/database', + // sqlite drivers + '@libsql/client', + 'better-sqlite3', +]; + +esbuild.buildSync({ + entryPoints: ['./src/utils.ts'], + bundle: true, + outfile: 'dist/utils.js', + format: 'cjs', + target: 'node16', + platform: 'node', + external: [ + 'commander', + 'json-diff', + 'glob', + 'esbuild', + 'drizzle-orm', + ...driversPackages, + ], + banner: { + js: `#!/usr/bin/env node`, + }, +}); + +esbuild.buildSync({ + entryPoints: ['./src/utils.ts'], + bundle: true, + outfile: 'dist/utils.mjs', + format: 'esm', + target: 'node16', + platform: 'node', + external: [ + 'commander', + 'json-diff', + 'glob', + 'esbuild', + 'drizzle-orm', + ...driversPackages, + ], + banner: { + js: `#!/usr/bin/env node`, + }, +}); + +esbuild.buildSync({ + entryPoints: ['./src/cli/index.ts'], + bundle: true, + outfile: 'dist/bin.cjs', + format: 'cjs', + target: 'node16', + platform: 'node', + define: { + 'process.env.DRIZZLE_KIT_VERSION': `"${pkg.version}"`, + }, + external: [ + 'esbuild', + 'drizzle-orm', + ...driversPackages, + ], + banner: { + js: `#!/usr/bin/env node`, + }, +}); + +const main = async () => { + await tsup.build({ + entryPoints: ['./src/index.ts', './src/api.ts'], + outDir: './dist', + splitting: false, + dts: true, + format: ['cjs', 'esm'], + outExtension: (ctx) => { + if (ctx.format === 'cjs') { + return { + dts: '.d.ts', + js: '.js', + }; + } + return { + dts: '.d.mts', + js: '.mjs', + }; + }, + }); + + const apiCjs = readFileSync('./dist/api.js', 'utf8').replace(/await import\(/g, 'require('); + writeFileSync('./dist/api.js', apiCjs); +}; + +main().catch((e) => { + console.error(e); + process.exit(1); +}); diff --git a/drizzle-kit/package.json b/drizzle-kit/package.json new file mode 100644 index 000000000..9d9e1d227 --- /dev/null +++ b/drizzle-kit/package.json @@ -0,0 +1,140 @@ +{ + "name": "drizzle-kit", + "version": "0.24.2", + "homepage": "https://orm.drizzle.team", + "keywords": [ + "drizzle", + "orm", + "pg", + "mysql", + "postgresql", + "postgres", + "sqlite", + "database", + "sql", + "typescript", + "ts", + "drizzle-kit", + "migrations", + "schema" + ], + "publishConfig": { + "provenance": true + }, + "repository": { + "type": "git", + "url": "git+https://github.com/drizzle-team/drizzle-orm.git" + }, + "author": "Drizzle Team", + "license": "MIT", + "bin": { + "drizzle-kit": "./bin.cjs" + }, + "scripts": { + "api": "tsx ./dev/api.ts", + "migrate:old": "drizzle-kit generate:mysql", + "cli": "tsx ./src/cli/index.ts", + "test": "TEST_CONFIG_PATH_PREFIX=./tests/cli/ vitest", + "build": "rm -rf ./dist && tsx build.ts && cp package.json dist/ && attw --pack dist", + "build:dev": "rm -rf ./dist && tsx build.dev.ts && tsc -p tsconfig.cli-types.json && chmod +x ./dist/index.cjs", + "pack": "cp package.json README.md dist/ && (cd dist && npm pack --pack-destination ..) && rm -f package.tgz && mv *.tgz package.tgz", + "tsc": "tsc -p tsconfig.build.json", + "publish": "npm publish package.tgz" + }, + "dependencies": { + "@drizzle-team/brocli": "^0.10.1", + "@esbuild-kit/esm-loader": "^2.5.5", + "esbuild": "^0.19.7", + "esbuild-register": "^3.5.0" + }, + "devDependencies": { + "@arethetypeswrong/cli": "^0.15.3", + "@aws-sdk/client-rds-data": "^3.556.0", + "@cloudflare/workers-types": "^4.20230518.0", + "@electric-sql/pglite": "^0.1.5", + "@hono/node-server": "^1.9.0", + "@hono/zod-validator": "^0.2.1", + "@libsql/client": "^0.4.2", + "@neondatabase/serverless": "^0.9.1", + "@originjs/vite-plugin-commonjs": "^1.0.3", + "@planetscale/database": "^1.16.0", + "@types/better-sqlite3": "^7.6.4", + "@types/dockerode": "^3.3.28", + "@types/glob": "^8.1.0", + "@types/json-diff": "^1.0.3", + "@types/minimatch": "^5.1.2", + "@types/node": "^18.11.15", + "@types/pg": "^8.10.7", + "@types/pluralize": "^0.0.33", + "@types/semver": "^7.5.5", + "@types/uuid": "^9.0.8", + "@types/ws": "^8.5.10", + "@typescript-eslint/eslint-plugin": "^7.2.0", + "@typescript-eslint/parser": "^7.2.0", + "@vercel/postgres": "^0.8.0", + "ava": "^5.1.0", + "better-sqlite3": "^9.4.3", + "camelcase": "^7.0.1", + "chalk": "^5.2.0", + "commander": "^12.1.0", + "dockerode": "^3.3.4", + "dotenv": "^16.0.3", + "drizzle-kit": "0.21.2", + "drizzle-orm": "workspace:./drizzle-orm/dist", + "env-paths": "^3.0.0", + "esbuild-node-externals": "^1.9.0", + "eslint": "^8.57.0", + "eslint-config-prettier": "^9.1.0", + "eslint-plugin-prettier": "^5.1.3", + "get-port": "^6.1.2", + "glob": "^8.1.0", + "hanji": "^0.0.5", + "hono": "^4.1.5", + "json-diff": "1.0.6", + "minimatch": "^7.4.3", + "mysql2": "3.3.3", + "node-fetch": "^3.3.2", + "pg": "^8.11.5", + "pluralize": "^8.0.0", + "postgres": "^3.4.4", + "prettier": "^2.8.1", + "semver": "^7.5.4", + "superjson": "^2.2.1", + "tsup": "^8.0.2", + "tsx": "^3.12.1", + "typescript": "^5.4.3", + "uuid": "^9.0.1", + "vite-tsconfig-paths": "^4.3.2", + "vitest": "^1.4.0", + "wrangler": "^3.22.1", + "ws": "^8.16.0", + "zod": "^3.20.2", + "zx": "^7.2.2" + }, + "exports": { + ".": { + "import": { + "types": "./index.d.mts", + "default": "./index.mjs" + }, + "require": { + "types": "./index.d.ts", + "default": "./index.js" + }, + "types": "./index.d.mts", + "default": "./index.mjs" + }, + "./api": { + "import": { + "types": "./api.d.mts", + "default": "./api.mjs" + }, + "require": { + "types": "./api.d.ts", + "default": "./api.js" + }, + "types": "./api.d.mts", + "default": "./api.mjs" + } + } +} diff --git a/drizzle-kit/patches/difflib@0.2.4.patch b/drizzle-kit/patches/difflib@0.2.4.patch new file mode 100644 index 000000000..8cd9a76b2 --- /dev/null +++ b/drizzle-kit/patches/difflib@0.2.4.patch @@ -0,0 +1,31 @@ +diff --git a/lib/difflib.js b/lib/difflib.js +index 80d250e7e18bdc972df3621ee5c05ffff0e3659f..94916f33dbae0d3eea6f74e2c619c4c6f52cc125 100644 +--- a/lib/difflib.js ++++ b/lib/difflib.js +@@ -17,7 +17,7 @@ Function restore(delta, which): + + Function unifiedDiff(a, b): + For two lists of strings, return a delta in unified diff format. +- ++. + Class SequenceMatcher: + A flexible class for comparing pairs of sequences of any type. + +@@ -75,7 +75,7 @@ Class Differ: + + SequenceMatcher = (function() { + +- SequenceMatcher.name = 'SequenceMatcher'; ++ // SequenceMatcher.name = 'SequenceMatcher'; + + /* + SequenceMatcher is a flexible class for comparing pairs of sequences of +@@ -737,7 +737,7 @@ Class Differ: + + Differ = (function() { + +- Differ.name = 'Differ'; ++ // Differ.name = 'Differ'; + + /* + Differ is a class for comparing sequences of lines of text, and diff --git a/drizzle-kit/pnpm-lock.yaml b/drizzle-kit/pnpm-lock.yaml new file mode 100644 index 000000000..8f4d58f55 --- /dev/null +++ b/drizzle-kit/pnpm-lock.yaml @@ -0,0 +1,7603 @@ +lockfileVersion: '9.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +patchedDependencies: + difflib@0.2.4: + hash: jq4t3ysdpnbunjeje4v7nrqn2q + path: patches/difflib@0.2.4.patch + +importers: + + .: + dependencies: + '@esbuild-kit/esm-loader': + specifier: ^2.5.5 + version: 2.6.5 + esbuild: + specifier: ^0.19.7 + version: 0.19.12 + esbuild-register: + specifier: ^3.5.0 + version: 3.5.0(esbuild@0.19.12) + devDependencies: + '@arethetypeswrong/cli': + specifier: ^0.15.3 + version: 0.15.3 + '@aws-sdk/client-rds-data': + specifier: ^3.556.0 + version: 3.577.0 + '@cloudflare/workers-types': + specifier: ^4.20230518.0 + version: 4.20240512.0 + '@electric-sql/pglite': + specifier: ^0.1.5 + version: 0.1.5 + '@hono/node-server': + specifier: ^1.9.0 + version: 1.11.1 + '@hono/zod-validator': + specifier: ^0.2.1 + version: 0.2.1(hono@4.3.9)(zod@3.23.8) + '@libsql/client': + specifier: ^0.4.2 + version: 0.4.3(bufferutil@4.0.8)(utf-8-validate@6.0.3) + '@neondatabase/serverless': + specifier: ^0.9.1 + version: 0.9.3 + '@originjs/vite-plugin-commonjs': + specifier: ^1.0.3 + version: 1.0.3 + '@planetscale/database': + specifier: ^1.16.0 + version: 1.18.0 + '@types/better-sqlite3': + specifier: ^7.6.4 + version: 7.6.10 + '@types/dockerode': + specifier: ^3.3.28 + version: 3.3.29 + '@types/glob': + specifier: ^8.1.0 + version: 8.1.0 + '@types/json-diff': + specifier: ^1.0.3 + version: 1.0.3 + '@types/minimatch': + specifier: ^5.1.2 + version: 5.1.2 + '@types/node': + specifier: ^18.11.15 + version: 18.19.33 + '@types/pg': + specifier: ^8.10.7 + version: 8.11.6 + '@types/pluralize': + specifier: ^0.0.33 + version: 0.0.33 + '@types/semver': + specifier: ^7.5.5 + version: 7.5.8 + '@types/uuid': + specifier: ^9.0.8 + version: 9.0.8 + '@types/ws': + specifier: ^8.5.10 + version: 8.5.10 + '@typescript-eslint/eslint-plugin': + specifier: ^7.2.0 + version: 7.10.0(@typescript-eslint/parser@7.10.0(eslint@8.57.0)(typescript@5.4.5))(eslint@8.57.0)(typescript@5.4.5) + '@typescript-eslint/parser': + specifier: ^7.2.0 + version: 7.10.0(eslint@8.57.0)(typescript@5.4.5) + '@vercel/postgres': + specifier: ^0.8.0 + version: 0.8.0 + ava: + specifier: ^5.1.0 + version: 5.3.1 + better-sqlite3: + specifier: ^9.4.3 + version: 9.6.0 + camelcase: + specifier: ^7.0.1 + version: 7.0.1 + chalk: + specifier: ^5.2.0 + version: 5.3.0 + commander: + specifier: ^12.1.0 + version: 12.1.0 + dockerode: + specifier: ^3.3.4 + version: 3.3.5 + dotenv: + specifier: ^16.0.3 + version: 16.4.5 + drizzle-kit: + specifier: 0.21.2 + version: 0.21.2 + drizzle-orm: + specifier: 0.32.0-85c8008 + version: 0.32.0-85c8008(@aws-sdk/client-rds-data@3.577.0)(@cloudflare/workers-types@4.20240512.0)(@electric-sql/pglite@0.1.5)(@libsql/client@0.4.3(bufferutil@4.0.8)(utf-8-validate@6.0.3))(@neondatabase/serverless@0.9.3)(@planetscale/database@1.18.0)(@types/better-sqlite3@7.6.10)(@types/pg@8.11.6)(@vercel/postgres@0.8.0)(better-sqlite3@9.6.0)(mysql2@2.3.3)(pg@8.11.5)(postgres@3.4.4) + env-paths: + specifier: ^3.0.0 + version: 3.0.0 + esbuild-node-externals: + specifier: ^1.9.0 + version: 1.13.1(esbuild@0.19.12) + eslint: + specifier: ^8.57.0 + version: 8.57.0 + eslint-config-prettier: + specifier: ^9.1.0 + version: 9.1.0(eslint@8.57.0) + eslint-plugin-prettier: + specifier: ^5.1.3 + version: 5.1.3(eslint-config-prettier@9.1.0(eslint@8.57.0))(eslint@8.57.0)(prettier@2.8.8) + get-port: + specifier: ^6.1.2 + version: 6.1.2 + glob: + specifier: ^8.1.0 + version: 8.1.0 + hanji: + specifier: ^0.0.5 + version: 0.0.5 + hono: + specifier: ^4.1.5 + version: 4.3.9 + json-diff: + specifier: 1.0.6 + version: 1.0.6 + minimatch: + specifier: ^7.4.3 + version: 7.4.6 + mysql2: + specifier: 2.3.3 + version: 2.3.3 + node-fetch: + specifier: ^3.3.2 + version: 3.3.2 + pg: + specifier: ^8.11.5 + version: 8.11.5 + pluralize: + specifier: ^8.0.0 + version: 8.0.0 + postgres: + specifier: ^3.4.4 + version: 3.4.4 + prettier: + specifier: ^2.8.1 + version: 2.8.8 + semver: + specifier: ^7.5.4 + version: 7.6.2 + superjson: + specifier: ^2.2.1 + version: 2.2.1 + tsup: + specifier: ^8.0.2 + version: 8.0.2(postcss@8.4.38)(typescript@5.4.5) + tsx: + specifier: ^3.12.1 + version: 3.14.0 + typescript: + specifier: ^5.4.3 + version: 5.4.5 + uuid: + specifier: ^9.0.1 + version: 9.0.1 + vite-tsconfig-paths: + specifier: ^4.3.2 + version: 4.3.2(typescript@5.4.5)(vite@5.2.11(@types/node@18.19.33)) + vitest: + specifier: ^1.4.0 + version: 1.6.0(@types/node@18.19.33) + wrangler: + specifier: ^3.22.1 + version: 3.57.0(@cloudflare/workers-types@4.20240512.0)(bufferutil@4.0.8)(utf-8-validate@6.0.3) + ws: + specifier: ^8.16.0 + version: 8.17.0(bufferutil@4.0.8)(utf-8-validate@6.0.3) + zod: + specifier: ^3.20.2 + version: 3.23.8 + zx: + specifier: ^7.2.2 + version: 7.2.3 + +packages: + + '@andrewbranch/untar.js@1.0.3': + resolution: {integrity: sha512-Jh15/qVmrLGhkKJBdXlK1+9tY4lZruYjsgkDFj08ZmDiWVBLJcqkok7Z0/R0In+i1rScBpJlSvrTS2Lm41Pbnw==} + + '@arethetypeswrong/cli@0.15.3': + resolution: {integrity: sha512-sIMA9ZJBWDEg1+xt5RkAEflZuf8+PO8SdKj17x6PtETuUho+qlZJg4DgmKc3q+QwQ9zOB5VLK6jVRbFdNLdUIA==} + engines: {node: '>=18'} + hasBin: true + + '@arethetypeswrong/core@0.15.1': + resolution: {integrity: sha512-FYp6GBAgsNz81BkfItRz8RLZO03w5+BaeiPma1uCfmxTnxbtuMrI/dbzGiOk8VghO108uFI0oJo0OkewdSHw7g==} + engines: {node: '>=18'} + + '@aws-crypto/ie11-detection@3.0.0': + resolution: {integrity: sha512-341lBBkiY1DfDNKai/wXM3aujNBkXR7tq1URPQDL9wi3AUbI80NR74uF1TXHMm7po1AcnFk8iu2S2IeU/+/A+Q==} + + '@aws-crypto/sha256-browser@3.0.0': + resolution: {integrity: sha512-8VLmW2B+gjFbU5uMeqtQM6Nj0/F1bro80xQXCW6CQBWgosFWXTx77aeOF5CAIAmbOK64SdMBJdNr6J41yP5mvQ==} + + '@aws-crypto/sha256-js@3.0.0': + resolution: {integrity: sha512-PnNN7os0+yd1XvXAy23CFOmTbMaDxgxXtTKHybrJ39Y8kGzBATgBFibWJKH6BhytLI/Zyszs87xCOBNyBig6vQ==} + + '@aws-crypto/supports-web-crypto@3.0.0': + resolution: {integrity: sha512-06hBdMwUAb2WFTuGG73LSC0wfPu93xWwo5vL2et9eymgmu3Id5vFAHBbajVWiGhPO37qcsdCap/FqXvJGJWPIg==} + + '@aws-crypto/util@3.0.0': + resolution: {integrity: sha512-2OJlpeJpCR48CC8r+uKVChzs9Iungj9wkZrl8Z041DWEWvyIHILYKCPNzJghKsivj+S3mLo6BVc7mBNzdxA46w==} + + '@aws-sdk/client-rds-data@3.577.0': + resolution: {integrity: sha512-24a27II6UkNhe2RB6ZwtQPcM3QB/DuRcKvzMmfvipgWS72Q5FEtuq3CO66IObWUel/pxi3ucE6mSvVCFnm7tBQ==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/client-sso-oidc@3.577.0': + resolution: {integrity: sha512-njmKSPDWueWWYVFpFcZ2P3fI6/pdQVDa0FgCyYZhOnJLgEHZIcBBg1AsnkVWacBuLopp9XVt2m+7hO6ugY1/1g==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/client-sso@3.577.0': + resolution: {integrity: sha512-BwujdXrydlk6UEyPmewm5GqG4nkQ6OVyRhS/SyZP/6UKSFv2/sf391Cmz0hN0itUTH1rR4XeLln8XCOtarkrzg==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/client-sts@3.577.0': + resolution: {integrity: sha512-509Kklimva1XVlhGbpTpeX3kOP6ORpm44twJxDHpa9TURbmoaxj7veWlnLCbDorxDTrbsDghvYZshvcLsojVpg==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/core@3.576.0': + resolution: {integrity: sha512-KDvDlbeipSTIf+ffKtTg1m419TK7s9mZSWC8bvuZ9qx6/sjQFOXIKOVqyuli6DnfxGbvRcwoRuY99OcCH1N/0w==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/credential-provider-env@3.577.0': + resolution: {integrity: sha512-Jxu255j0gToMGEiqufP8ZtKI8HW90lOLjwJ3LrdlD/NLsAY0tOQf1fWc53u28hWmmNGMxmCrL2p66IOgMDhDUw==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/credential-provider-http@3.577.0': + resolution: {integrity: sha512-n++yhCp67b9+ZRGEdY1jhamB5E/O+QsIDOPSuRmdaSGMCOd82oUEKPgIVEU1bkqxDsBxgiEWuvtfhK6sNiDS0A==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/credential-provider-ini@3.577.0': + resolution: {integrity: sha512-q7lHPtv6BjRvChUE3m0tIaEZKxPTaZ1B3lKxGYsFl3VLAu5N8yGCUKwuA1izf4ucT+LyKscVGqK6VDZx1ev3nw==} + engines: {node: '>=16.0.0'} + peerDependencies: + '@aws-sdk/client-sts': ^3.577.0 + + '@aws-sdk/credential-provider-node@3.577.0': + resolution: {integrity: sha512-epZ1HOMsrXBNczc0HQpv0VMjqAEpc09DUA7Rg3gUJfn8umhML7A7bXnUyqPA+S54q397UYg1leQKdSn23OiwQQ==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/credential-provider-process@3.577.0': + resolution: {integrity: sha512-Gin6BWtOiXxIgITrJ3Nwc+Y2P1uVT6huYR4EcbA/DJUPWyO0n9y5UFLewPvVbLkRn15JeEqErBLUrHclkiOKtw==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/credential-provider-sso@3.577.0': + resolution: {integrity: sha512-iVm5SQvS7EgZTJsRaqUOmDQpBQPPPat42SCbWFvFQOLrl8qewq8OP94hFS5w2mP62zngeYzqhJnDel79HXbxew==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/credential-provider-web-identity@3.577.0': + resolution: {integrity: sha512-ZGHGNRaCtJJmszb9UTnC7izNCtRUttdPlLdMkh41KPS32vfdrBDHs1JrpbZijItRj1xKuOXsiYSXLAaHGcLh8Q==} + engines: {node: '>=16.0.0'} + peerDependencies: + '@aws-sdk/client-sts': ^3.577.0 + + '@aws-sdk/middleware-host-header@3.577.0': + resolution: {integrity: sha512-9ca5MJz455CODIVXs0/sWmJm7t3QO4EUa1zf8pE8grLpzf0J94bz/skDWm37Pli13T3WaAQBHCTiH2gUVfCsWg==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/middleware-logger@3.577.0': + resolution: {integrity: sha512-aPFGpGjTZcJYk+24bg7jT4XdIp42mFXSuPt49lw5KygefLyJM/sB0bKKqPYYivW0rcuZ9brQ58eZUNthrzYAvg==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/middleware-recursion-detection@3.577.0': + resolution: {integrity: sha512-pn3ZVEd2iobKJlR3H+bDilHjgRnNrQ6HMmK9ZzZw89Ckn3Dcbv48xOv4RJvu0aU8SDLl/SNCxppKjeLDTPGBNA==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/middleware-user-agent@3.577.0': + resolution: {integrity: sha512-P55HAXgwmiHHpFx5JEPvOnAbfhN7v6sWv9PBQs+z2tC7QiBcPS0cdJR6PfV7J1n4VPK52/OnrK3l9VxdQ7Ms0g==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/region-config-resolver@3.577.0': + resolution: {integrity: sha512-4ChCFACNwzqx/xjg3zgFcW8Ali6R9C95cFECKWT/7CUM1D0MGvkclSH2cLarmHCmJgU6onKkJroFtWp0kHhgyg==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/token-providers@3.577.0': + resolution: {integrity: sha512-0CkIZpcC3DNQJQ1hDjm2bdSy/Xjs7Ny5YvSsacasGOkNfk+FdkiQy6N67bZX3Zbc9KIx+Nz4bu3iDeNSNplnnQ==} + engines: {node: '>=16.0.0'} + peerDependencies: + '@aws-sdk/client-sso-oidc': ^3.577.0 + + '@aws-sdk/types@3.577.0': + resolution: {integrity: sha512-FT2JZES3wBKN/alfmhlo+3ZOq/XJ0C7QOZcDNrpKjB0kqYoKjhVKZ/Hx6ArR0czkKfHzBBEs6y40ebIHx2nSmA==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/util-endpoints@3.577.0': + resolution: {integrity: sha512-FjuUz1Kdy4Zly2q/c58tpdqHd6z7iOdU/caYzoc8jwgAHBDBbIJNQLCU9hXJnPV2M8pWxQDyIZsoVwtmvErPzw==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/util-locate-window@3.568.0': + resolution: {integrity: sha512-3nh4TINkXYr+H41QaPelCceEB2FXP3fxp93YZXB/kqJvX0U9j0N0Uk45gvsjmEPzG8XxkPEeLIfT2I1M7A6Lig==} + engines: {node: '>=16.0.0'} + + '@aws-sdk/util-user-agent-browser@3.577.0': + resolution: {integrity: sha512-zEAzHgR6HWpZOH7xFgeJLc6/CzMcx4nxeQolZxVZoB5pPaJd3CjyRhZN0xXeZB0XIRCWmb4yJBgyiugXLNMkLA==} + + '@aws-sdk/util-user-agent-node@3.577.0': + resolution: {integrity: sha512-XqvtFjbSMtycZTWVwDe8DRWovuoMbA54nhUoZwVU6rW9OSD6NZWGR512BUGHFaWzW0Wg8++Dj10FrKTG2XtqfA==} + engines: {node: '>=16.0.0'} + peerDependencies: + aws-crt: '>=1.0.0' + peerDependenciesMeta: + aws-crt: + optional: true + + '@aws-sdk/util-utf8-browser@3.259.0': + resolution: {integrity: sha512-UvFa/vR+e19XookZF8RzFZBrw2EUkQWxiBW0yYQAhvk3C+QVGl0H3ouca8LDBlBfQKXwmW3huo/59H8rwb1wJw==} + + '@balena/dockerignore@1.0.2': + resolution: {integrity: sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q==} + + '@cloudflare/kv-asset-handler@0.3.2': + resolution: {integrity: sha512-EeEjMobfuJrwoctj7FA1y1KEbM0+Q1xSjobIEyie9k4haVEBB7vkDvsasw1pM3rO39mL2akxIAzLMUAtrMHZhA==} + engines: {node: '>=16.13'} + + '@cloudflare/workerd-darwin-64@1.20240512.0': + resolution: {integrity: sha512-VMp+CsSHFALQiBzPdQ5dDI4T1qwLu0mQ0aeKVNDosXjueN0f3zj/lf+mFil5/9jBbG3t4mG0y+6MMnalP9Lobw==} + engines: {node: '>=16'} + cpu: [x64] + os: [darwin] + + '@cloudflare/workerd-darwin-arm64@1.20240512.0': + resolution: {integrity: sha512-lZktXGmzMrB5rJqY9+PmnNfv1HKlj/YLZwMjPfF0WVKHUFdvQbAHsi7NlKv6mW9uIvlZnS+K4sIkWc0MDXcRnA==} + engines: {node: '>=16'} + cpu: [arm64] + os: [darwin] + + '@cloudflare/workerd-linux-64@1.20240512.0': + resolution: {integrity: sha512-wrHvqCZZqXz6Y3MUTn/9pQNsvaoNjbJpuA6vcXsXu8iCzJi911iVW2WUEBX+MpUWD+mBIP0oXni5tTlhkokOPw==} + engines: {node: '>=16'} + cpu: [x64] + os: [linux] + + '@cloudflare/workerd-linux-arm64@1.20240512.0': + resolution: {integrity: sha512-YPezHMySL9J9tFdzxz390eBswQ//QJNYcZolz9Dgvb3FEfdpK345cE/bsWbMOqw5ws2f82l388epoenghtYvAg==} + engines: {node: '>=16'} + cpu: [arm64] + os: [linux] + + '@cloudflare/workerd-windows-64@1.20240512.0': + resolution: {integrity: sha512-SxKapDrIYSscMR7lGIp/av0l6vokjH4xQ9ACxHgXh+OdOus9azppSmjaPyw4/ePvg7yqpkaNjf9o258IxWtvKQ==} + engines: {node: '>=16'} + cpu: [x64] + os: [win32] + + '@cloudflare/workers-types@4.20240512.0': + resolution: {integrity: sha512-o2yTEWg+YK/I1t/Me+dA0oarO0aCbjibp6wSeaw52DSE9tDyKJ7S+Qdyw/XsMrKn4t8kF6f/YOba+9O4MJfW9w==} + + '@colors/colors@1.5.0': + resolution: {integrity: sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==} + engines: {node: '>=0.1.90'} + + '@cspotcode/source-map-support@0.8.1': + resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} + engines: {node: '>=12'} + + '@electric-sql/pglite@0.1.5': + resolution: {integrity: sha512-eymv4ONNvoPZQTvOQIi5dbpR+J5HzEv0qQH9o/y3gvNheJV/P/NFcrbsfJZYTsDKoq7DKrTiFNexsRkJKy8x9Q==} + + '@esbuild-kit/core-utils@3.3.2': + resolution: {integrity: sha512-sPRAnw9CdSsRmEtnsl2WXWdyquogVpB3yZ3dgwJfe8zrOzTsV7cJvmwrKVa+0ma5BoiGJ+BoqkMvawbayKUsqQ==} + + '@esbuild-kit/esm-loader@2.6.5': + resolution: {integrity: sha512-FxEMIkJKnodyA1OaCUoEvbYRkoZlLZ4d/eXFu9Fh8CbBBgP5EmZxrfTRyN0qpXZ4vOvqnE5YdRdcrmUUXuU+dA==} + + '@esbuild-plugins/node-globals-polyfill@0.2.3': + resolution: {integrity: sha512-r3MIryXDeXDOZh7ih1l/yE9ZLORCd5e8vWg02azWRGj5SPTuoh69A2AIyn0Z31V/kHBfZ4HgWJ+OK3GTTwLmnw==} + peerDependencies: + esbuild: '*' + + '@esbuild-plugins/node-modules-polyfill@0.2.2': + resolution: {integrity: sha512-LXV7QsWJxRuMYvKbiznh+U1ilIop3g2TeKRzUxOG5X3YITc8JyyTa90BmLwqqv0YnX4v32CSlG+vsziZp9dMvA==} + peerDependencies: + esbuild: '*' + + '@esbuild/aix-ppc64@0.19.12': + resolution: {integrity: sha512-bmoCYyWdEL3wDQIVbcyzRyeKLgk2WtWLTWz1ZIAZF/EGbNOwSA6ew3PftJ1PqMiOOGu0OyFMzG53L0zqIpPeNA==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [aix] + + '@esbuild/aix-ppc64@0.20.2': + resolution: {integrity: sha512-D+EBOJHXdNZcLJRBkhENNG8Wji2kgc9AZ9KiPr1JuZjsNtyHzrsfLRrY0tk2H2aoFu6RANO1y1iPPUCDYWkb5g==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [aix] + + '@esbuild/android-arm64@0.17.19': + resolution: {integrity: sha512-KBMWvEZooR7+kzY0BtbTQn0OAYY7CsiydT63pVEaPtVYF0hXbUaOyZog37DKxK7NF3XacBJOpYT4adIJh+avxA==} + engines: {node: '>=12'} + cpu: [arm64] + os: [android] + + '@esbuild/android-arm64@0.18.20': + resolution: {integrity: sha512-Nz4rJcchGDtENV0eMKUNa6L12zz2zBDXuhj/Vjh18zGqB44Bi7MBMSXjgunJgjRhCmKOjnPuZp4Mb6OKqtMHLQ==} + engines: {node: '>=12'} + cpu: [arm64] + os: [android] + + '@esbuild/android-arm64@0.19.12': + resolution: {integrity: sha512-P0UVNGIienjZv3f5zq0DP3Nt2IE/3plFzuaS96vihvD0Hd6H/q4WXUGpCxD/E8YrSXfNyRPbpTq+T8ZQioSuPA==} + engines: {node: '>=12'} + cpu: [arm64] + os: [android] + + '@esbuild/android-arm64@0.20.2': + resolution: {integrity: sha512-mRzjLacRtl/tWU0SvD8lUEwb61yP9cqQo6noDZP/O8VkwafSYwZ4yWy24kan8jE/IMERpYncRt2dw438LP3Xmg==} + engines: {node: '>=12'} + cpu: [arm64] + os: [android] + + '@esbuild/android-arm@0.17.19': + resolution: {integrity: sha512-rIKddzqhmav7MSmoFCmDIb6e2W57geRsM94gV2l38fzhXMwq7hZoClug9USI2pFRGL06f4IOPHHpFNOkWieR8A==} + engines: {node: '>=12'} + cpu: [arm] + os: [android] + + '@esbuild/android-arm@0.18.20': + resolution: {integrity: sha512-fyi7TDI/ijKKNZTUJAQqiG5T7YjJXgnzkURqmGj13C6dCqckZBLdl4h7bkhHt/t0WP+zO9/zwroDvANaOqO5Sw==} + engines: {node: '>=12'} + cpu: [arm] + os: [android] + + '@esbuild/android-arm@0.19.12': + resolution: {integrity: sha512-qg/Lj1mu3CdQlDEEiWrlC4eaPZ1KztwGJ9B6J+/6G+/4ewxJg7gqj8eVYWvao1bXrqGiW2rsBZFSX3q2lcW05w==} + engines: {node: '>=12'} + cpu: [arm] + os: [android] + + '@esbuild/android-arm@0.20.2': + resolution: {integrity: sha512-t98Ra6pw2VaDhqNWO2Oph2LXbz/EJcnLmKLGBJwEwXX/JAN83Fym1rU8l0JUWK6HkIbWONCSSatf4sf2NBRx/w==} + engines: {node: '>=12'} + cpu: [arm] + os: [android] + + '@esbuild/android-x64@0.17.19': + resolution: {integrity: sha512-uUTTc4xGNDT7YSArp/zbtmbhO0uEEK9/ETW29Wk1thYUJBz3IVnvgEiEwEa9IeLyvnpKrWK64Utw2bgUmDveww==} + engines: {node: '>=12'} + cpu: [x64] + os: [android] + + '@esbuild/android-x64@0.18.20': + resolution: {integrity: sha512-8GDdlePJA8D6zlZYJV/jnrRAi6rOiNaCC/JclcXpB+KIuvfBN4owLtgzY2bsxnx666XjJx2kDPUmnTtR8qKQUg==} + engines: {node: '>=12'} + cpu: [x64] + os: [android] + + '@esbuild/android-x64@0.19.12': + resolution: {integrity: sha512-3k7ZoUW6Q6YqhdhIaq/WZ7HwBpnFBlW905Fa4s4qWJyiNOgT1dOqDiVAQFwBH7gBRZr17gLrlFCRzF6jFh7Kew==} + engines: {node: '>=12'} + cpu: [x64] + os: [android] + + '@esbuild/android-x64@0.20.2': + resolution: {integrity: sha512-btzExgV+/lMGDDa194CcUQm53ncxzeBrWJcncOBxuC6ndBkKxnHdFJn86mCIgTELsooUmwUm9FkhSp5HYu00Rg==} + engines: {node: '>=12'} + cpu: [x64] + os: [android] + + '@esbuild/darwin-arm64@0.17.19': + resolution: {integrity: sha512-80wEoCfF/hFKM6WE1FyBHc9SfUblloAWx6FJkFWTWiCoht9Mc0ARGEM47e67W9rI09YoUxJL68WHfDRYEAvOhg==} + engines: {node: '>=12'} + cpu: [arm64] + os: [darwin] + + '@esbuild/darwin-arm64@0.18.20': + resolution: {integrity: sha512-bxRHW5kHU38zS2lPTPOyuyTm+S+eobPUnTNkdJEfAddYgEcll4xkT8DB9d2008DtTbl7uJag2HuE5NZAZgnNEA==} + engines: {node: '>=12'} + cpu: [arm64] + os: [darwin] + + '@esbuild/darwin-arm64@0.19.12': + resolution: {integrity: sha512-B6IeSgZgtEzGC42jsI+YYu9Z3HKRxp8ZT3cqhvliEHovq8HSX2YX8lNocDn79gCKJXOSaEot9MVYky7AKjCs8g==} + engines: {node: '>=12'} + cpu: [arm64] + os: [darwin] + + '@esbuild/darwin-arm64@0.20.2': + resolution: {integrity: sha512-4J6IRT+10J3aJH3l1yzEg9y3wkTDgDk7TSDFX+wKFiWjqWp/iCfLIYzGyasx9l0SAFPT1HwSCR+0w/h1ES/MjA==} + engines: {node: '>=12'} + cpu: [arm64] + os: [darwin] + + '@esbuild/darwin-x64@0.17.19': + resolution: {integrity: sha512-IJM4JJsLhRYr9xdtLytPLSH9k/oxR3boaUIYiHkAawtwNOXKE8KoU8tMvryogdcT8AU+Bflmh81Xn6Q0vTZbQw==} + engines: {node: '>=12'} + cpu: [x64] + os: [darwin] + + '@esbuild/darwin-x64@0.18.20': + resolution: {integrity: sha512-pc5gxlMDxzm513qPGbCbDukOdsGtKhfxD1zJKXjCCcU7ju50O7MeAZ8c4krSJcOIJGFR+qx21yMMVYwiQvyTyQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [darwin] + + '@esbuild/darwin-x64@0.19.12': + resolution: {integrity: sha512-hKoVkKzFiToTgn+41qGhsUJXFlIjxI/jSYeZf3ugemDYZldIXIxhvwN6erJGlX4t5h417iFuheZ7l+YVn05N3A==} + engines: {node: '>=12'} + cpu: [x64] + os: [darwin] + + '@esbuild/darwin-x64@0.20.2': + resolution: {integrity: sha512-tBcXp9KNphnNH0dfhv8KYkZhjc+H3XBkF5DKtswJblV7KlT9EI2+jeA8DgBjp908WEuYll6pF+UStUCfEpdysA==} + engines: {node: '>=12'} + cpu: [x64] + os: [darwin] + + '@esbuild/freebsd-arm64@0.17.19': + resolution: {integrity: sha512-pBwbc7DufluUeGdjSU5Si+P3SoMF5DQ/F/UmTSb8HXO80ZEAJmrykPyzo1IfNbAoaqw48YRpv8shwd1NoI0jcQ==} + engines: {node: '>=12'} + cpu: [arm64] + os: [freebsd] + + '@esbuild/freebsd-arm64@0.18.20': + resolution: {integrity: sha512-yqDQHy4QHevpMAaxhhIwYPMv1NECwOvIpGCZkECn8w2WFHXjEwrBn3CeNIYsibZ/iZEUemj++M26W3cNR5h+Tw==} + engines: {node: '>=12'} + cpu: [arm64] + os: [freebsd] + + '@esbuild/freebsd-arm64@0.19.12': + resolution: {integrity: sha512-4aRvFIXmwAcDBw9AueDQ2YnGmz5L6obe5kmPT8Vd+/+x/JMVKCgdcRwH6APrbpNXsPz+K653Qg8HB/oXvXVukA==} + engines: {node: '>=12'} + cpu: [arm64] + os: [freebsd] + + '@esbuild/freebsd-arm64@0.20.2': + resolution: {integrity: sha512-d3qI41G4SuLiCGCFGUrKsSeTXyWG6yem1KcGZVS+3FYlYhtNoNgYrWcvkOoaqMhwXSMrZRl69ArHsGJ9mYdbbw==} + engines: {node: '>=12'} + cpu: [arm64] + os: [freebsd] + + '@esbuild/freebsd-x64@0.17.19': + resolution: {integrity: sha512-4lu+n8Wk0XlajEhbEffdy2xy53dpR06SlzvhGByyg36qJw6Kpfk7cp45DR/62aPH9mtJRmIyrXAS5UWBrJT6TQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [freebsd] + + '@esbuild/freebsd-x64@0.18.20': + resolution: {integrity: sha512-tgWRPPuQsd3RmBZwarGVHZQvtzfEBOreNuxEMKFcd5DaDn2PbBxfwLcj4+aenoh7ctXcbXmOQIn8HI6mCSw5MQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [freebsd] + + '@esbuild/freebsd-x64@0.19.12': + resolution: {integrity: sha512-EYoXZ4d8xtBoVN7CEwWY2IN4ho76xjYXqSXMNccFSx2lgqOG/1TBPW0yPx1bJZk94qu3tX0fycJeeQsKovA8gg==} + engines: {node: '>=12'} + cpu: [x64] + os: [freebsd] + + '@esbuild/freebsd-x64@0.20.2': + resolution: {integrity: sha512-d+DipyvHRuqEeM5zDivKV1KuXn9WeRX6vqSqIDgwIfPQtwMP4jaDsQsDncjTDDsExT4lR/91OLjRo8bmC1e+Cw==} + engines: {node: '>=12'} + cpu: [x64] + os: [freebsd] + + '@esbuild/linux-arm64@0.17.19': + resolution: {integrity: sha512-ct1Tg3WGwd3P+oZYqic+YZF4snNl2bsnMKRkb3ozHmnM0dGWuxcPTTntAF6bOP0Sp4x0PjSF+4uHQ1xvxfRKqg==} + engines: {node: '>=12'} + cpu: [arm64] + os: [linux] + + '@esbuild/linux-arm64@0.18.20': + resolution: {integrity: sha512-2YbscF+UL7SQAVIpnWvYwM+3LskyDmPhe31pE7/aoTMFKKzIc9lLbyGUpmmb8a8AixOL61sQ/mFh3jEjHYFvdA==} + engines: {node: '>=12'} + cpu: [arm64] + os: [linux] + + '@esbuild/linux-arm64@0.19.12': + resolution: {integrity: sha512-EoTjyYyLuVPfdPLsGVVVC8a0p1BFFvtpQDB/YLEhaXyf/5bczaGeN15QkR+O4S5LeJ92Tqotve7i1jn35qwvdA==} + engines: {node: '>=12'} + cpu: [arm64] + os: [linux] + + '@esbuild/linux-arm64@0.20.2': + resolution: {integrity: sha512-9pb6rBjGvTFNira2FLIWqDk/uaf42sSyLE8j1rnUpuzsODBq7FvpwHYZxQ/It/8b+QOS1RYfqgGFNLRI+qlq2A==} + engines: {node: '>=12'} + cpu: [arm64] + os: [linux] + + '@esbuild/linux-arm@0.17.19': + resolution: {integrity: sha512-cdmT3KxjlOQ/gZ2cjfrQOtmhG4HJs6hhvm3mWSRDPtZ/lP5oe8FWceS10JaSJC13GBd4eH/haHnqf7hhGNLerA==} + engines: {node: '>=12'} + cpu: [arm] + os: [linux] + + '@esbuild/linux-arm@0.18.20': + resolution: {integrity: sha512-/5bHkMWnq1EgKr1V+Ybz3s1hWXok7mDFUMQ4cG10AfW3wL02PSZi5kFpYKrptDsgb2WAJIvRcDm+qIvXf/apvg==} + engines: {node: '>=12'} + cpu: [arm] + os: [linux] + + '@esbuild/linux-arm@0.19.12': + resolution: {integrity: sha512-J5jPms//KhSNv+LO1S1TX1UWp1ucM6N6XuL6ITdKWElCu8wXP72l9MM0zDTzzeikVyqFE6U8YAV9/tFyj0ti+w==} + engines: {node: '>=12'} + cpu: [arm] + os: [linux] + + '@esbuild/linux-arm@0.20.2': + resolution: {integrity: sha512-VhLPeR8HTMPccbuWWcEUD1Az68TqaTYyj6nfE4QByZIQEQVWBB8vup8PpR7y1QHL3CpcF6xd5WVBU/+SBEvGTg==} + engines: {node: '>=12'} + cpu: [arm] + os: [linux] + + '@esbuild/linux-ia32@0.17.19': + resolution: {integrity: sha512-w4IRhSy1VbsNxHRQpeGCHEmibqdTUx61Vc38APcsRbuVgK0OPEnQ0YD39Brymn96mOx48Y2laBQGqgZ0j9w6SQ==} + engines: {node: '>=12'} + cpu: [ia32] + os: [linux] + + '@esbuild/linux-ia32@0.18.20': + resolution: {integrity: sha512-P4etWwq6IsReT0E1KHU40bOnzMHoH73aXp96Fs8TIT6z9Hu8G6+0SHSw9i2isWrD2nbx2qo5yUqACgdfVGx7TA==} + engines: {node: '>=12'} + cpu: [ia32] + os: [linux] + + '@esbuild/linux-ia32@0.19.12': + resolution: {integrity: sha512-Thsa42rrP1+UIGaWz47uydHSBOgTUnwBwNq59khgIwktK6x60Hivfbux9iNR0eHCHzOLjLMLfUMLCypBkZXMHA==} + engines: {node: '>=12'} + cpu: [ia32] + os: [linux] + + '@esbuild/linux-ia32@0.20.2': + resolution: {integrity: sha512-o10utieEkNPFDZFQm9CoP7Tvb33UutoJqg3qKf1PWVeeJhJw0Q347PxMvBgVVFgouYLGIhFYG0UGdBumROyiig==} + engines: {node: '>=12'} + cpu: [ia32] + os: [linux] + + '@esbuild/linux-loong64@0.14.54': + resolution: {integrity: sha512-bZBrLAIX1kpWelV0XemxBZllyRmM6vgFQQG2GdNb+r3Fkp0FOh1NJSvekXDs7jq70k4euu1cryLMfU+mTXlEpw==} + engines: {node: '>=12'} + cpu: [loong64] + os: [linux] + + '@esbuild/linux-loong64@0.17.19': + resolution: {integrity: sha512-2iAngUbBPMq439a+z//gE+9WBldoMp1s5GWsUSgqHLzLJ9WoZLZhpwWuym0u0u/4XmZ3gpHmzV84PonE+9IIdQ==} + engines: {node: '>=12'} + cpu: [loong64] + os: [linux] + + '@esbuild/linux-loong64@0.18.20': + resolution: {integrity: sha512-nXW8nqBTrOpDLPgPY9uV+/1DjxoQ7DoB2N8eocyq8I9XuqJ7BiAMDMf9n1xZM9TgW0J8zrquIb/A7s3BJv7rjg==} + engines: {node: '>=12'} + cpu: [loong64] + os: [linux] + + '@esbuild/linux-loong64@0.19.12': + resolution: {integrity: sha512-LiXdXA0s3IqRRjm6rV6XaWATScKAXjI4R4LoDlvO7+yQqFdlr1Bax62sRwkVvRIrwXxvtYEHHI4dm50jAXkuAA==} + engines: {node: '>=12'} + cpu: [loong64] + os: [linux] + + '@esbuild/linux-loong64@0.20.2': + resolution: {integrity: sha512-PR7sp6R/UC4CFVomVINKJ80pMFlfDfMQMYynX7t1tNTeivQ6XdX5r2XovMmha/VjR1YN/HgHWsVcTRIMkymrgQ==} + engines: {node: '>=12'} + cpu: [loong64] + os: [linux] + + '@esbuild/linux-mips64el@0.17.19': + resolution: {integrity: sha512-LKJltc4LVdMKHsrFe4MGNPp0hqDFA1Wpt3jE1gEyM3nKUvOiO//9PheZZHfYRfYl6AwdTH4aTcXSqBerX0ml4A==} + engines: {node: '>=12'} + cpu: [mips64el] + os: [linux] + + '@esbuild/linux-mips64el@0.18.20': + resolution: {integrity: sha512-d5NeaXZcHp8PzYy5VnXV3VSd2D328Zb+9dEq5HE6bw6+N86JVPExrA6O68OPwobntbNJ0pzCpUFZTo3w0GyetQ==} + engines: {node: '>=12'} + cpu: [mips64el] + os: [linux] + + '@esbuild/linux-mips64el@0.19.12': + resolution: {integrity: sha512-fEnAuj5VGTanfJ07ff0gOA6IPsvrVHLVb6Lyd1g2/ed67oU1eFzL0r9WL7ZzscD+/N6i3dWumGE1Un4f7Amf+w==} + engines: {node: '>=12'} + cpu: [mips64el] + os: [linux] + + '@esbuild/linux-mips64el@0.20.2': + resolution: {integrity: sha512-4BlTqeutE/KnOiTG5Y6Sb/Hw6hsBOZapOVF6njAESHInhlQAghVVZL1ZpIctBOoTFbQyGW+LsVYZ8lSSB3wkjA==} + engines: {node: '>=12'} + cpu: [mips64el] + os: [linux] + + '@esbuild/linux-ppc64@0.17.19': + resolution: {integrity: sha512-/c/DGybs95WXNS8y3Ti/ytqETiW7EU44MEKuCAcpPto3YjQbyK3IQVKfF6nbghD7EcLUGl0NbiL5Rt5DMhn5tg==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [linux] + + '@esbuild/linux-ppc64@0.18.20': + resolution: {integrity: sha512-WHPyeScRNcmANnLQkq6AfyXRFr5D6N2sKgkFo2FqguP44Nw2eyDlbTdZwd9GYk98DZG9QItIiTlFLHJHjxP3FA==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [linux] + + '@esbuild/linux-ppc64@0.19.12': + resolution: {integrity: sha512-nYJA2/QPimDQOh1rKWedNOe3Gfc8PabU7HT3iXWtNUbRzXS9+vgB0Fjaqr//XNbd82mCxHzik2qotuI89cfixg==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [linux] + + '@esbuild/linux-ppc64@0.20.2': + resolution: {integrity: sha512-rD3KsaDprDcfajSKdn25ooz5J5/fWBylaaXkuotBDGnMnDP1Uv5DLAN/45qfnf3JDYyJv/ytGHQaziHUdyzaAg==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [linux] + + '@esbuild/linux-riscv64@0.17.19': + resolution: {integrity: sha512-FC3nUAWhvFoutlhAkgHf8f5HwFWUL6bYdvLc/TTuxKlvLi3+pPzdZiFKSWz/PF30TB1K19SuCxDTI5KcqASJqA==} + engines: {node: '>=12'} + cpu: [riscv64] + os: [linux] + + '@esbuild/linux-riscv64@0.18.20': + resolution: {integrity: sha512-WSxo6h5ecI5XH34KC7w5veNnKkju3zBRLEQNY7mv5mtBmrP/MjNBCAlsM2u5hDBlS3NGcTQpoBvRzqBcRtpq1A==} + engines: {node: '>=12'} + cpu: [riscv64] + os: [linux] + + '@esbuild/linux-riscv64@0.19.12': + resolution: {integrity: sha512-2MueBrlPQCw5dVJJpQdUYgeqIzDQgw3QtiAHUC4RBz9FXPrskyyU3VI1hw7C0BSKB9OduwSJ79FTCqtGMWqJHg==} + engines: {node: '>=12'} + cpu: [riscv64] + os: [linux] + + '@esbuild/linux-riscv64@0.20.2': + resolution: {integrity: sha512-snwmBKacKmwTMmhLlz/3aH1Q9T8v45bKYGE3j26TsaOVtjIag4wLfWSiZykXzXuE1kbCE+zJRmwp+ZbIHinnVg==} + engines: {node: '>=12'} + cpu: [riscv64] + os: [linux] + + '@esbuild/linux-s390x@0.17.19': + resolution: {integrity: sha512-IbFsFbxMWLuKEbH+7sTkKzL6NJmG2vRyy6K7JJo55w+8xDk7RElYn6xvXtDW8HCfoKBFK69f3pgBJSUSQPr+4Q==} + engines: {node: '>=12'} + cpu: [s390x] + os: [linux] + + '@esbuild/linux-s390x@0.18.20': + resolution: {integrity: sha512-+8231GMs3mAEth6Ja1iK0a1sQ3ohfcpzpRLH8uuc5/KVDFneH6jtAJLFGafpzpMRO6DzJ6AvXKze9LfFMrIHVQ==} + engines: {node: '>=12'} + cpu: [s390x] + os: [linux] + + '@esbuild/linux-s390x@0.19.12': + resolution: {integrity: sha512-+Pil1Nv3Umes4m3AZKqA2anfhJiVmNCYkPchwFJNEJN5QxmTs1uzyy4TvmDrCRNT2ApwSari7ZIgrPeUx4UZDg==} + engines: {node: '>=12'} + cpu: [s390x] + os: [linux] + + '@esbuild/linux-s390x@0.20.2': + resolution: {integrity: sha512-wcWISOobRWNm3cezm5HOZcYz1sKoHLd8VL1dl309DiixxVFoFe/o8HnwuIwn6sXre88Nwj+VwZUvJf4AFxkyrQ==} + engines: {node: '>=12'} + cpu: [s390x] + os: [linux] + + '@esbuild/linux-x64@0.17.19': + resolution: {integrity: sha512-68ngA9lg2H6zkZcyp22tsVt38mlhWde8l3eJLWkyLrp4HwMUr3c1s/M2t7+kHIhvMjglIBrFpncX1SzMckomGw==} + engines: {node: '>=12'} + cpu: [x64] + os: [linux] + + '@esbuild/linux-x64@0.18.20': + resolution: {integrity: sha512-UYqiqemphJcNsFEskc73jQ7B9jgwjWrSayxawS6UVFZGWrAAtkzjxSqnoclCXxWtfwLdzU+vTpcNYhpn43uP1w==} + engines: {node: '>=12'} + cpu: [x64] + os: [linux] + + '@esbuild/linux-x64@0.19.12': + resolution: {integrity: sha512-B71g1QpxfwBvNrfyJdVDexenDIt1CiDN1TIXLbhOw0KhJzE78KIFGX6OJ9MrtC0oOqMWf+0xop4qEU8JrJTwCg==} + engines: {node: '>=12'} + cpu: [x64] + os: [linux] + + '@esbuild/linux-x64@0.20.2': + resolution: {integrity: sha512-1MdwI6OOTsfQfek8sLwgyjOXAu+wKhLEoaOLTjbijk6E2WONYpH9ZU2mNtR+lZ2B4uwr+usqGuVfFT9tMtGvGw==} + engines: {node: '>=12'} + cpu: [x64] + os: [linux] + + '@esbuild/netbsd-x64@0.17.19': + resolution: {integrity: sha512-CwFq42rXCR8TYIjIfpXCbRX0rp1jo6cPIUPSaWwzbVI4aOfX96OXY8M6KNmtPcg7QjYeDmN+DD0Wp3LaBOLf4Q==} + engines: {node: '>=12'} + cpu: [x64] + os: [netbsd] + + '@esbuild/netbsd-x64@0.18.20': + resolution: {integrity: sha512-iO1c++VP6xUBUmltHZoMtCUdPlnPGdBom6IrO4gyKPFFVBKioIImVooR5I83nTew5UOYrk3gIJhbZh8X44y06A==} + engines: {node: '>=12'} + cpu: [x64] + os: [netbsd] + + '@esbuild/netbsd-x64@0.19.12': + resolution: {integrity: sha512-3ltjQ7n1owJgFbuC61Oj++XhtzmymoCihNFgT84UAmJnxJfm4sYCiSLTXZtE00VWYpPMYc+ZQmB6xbSdVh0JWA==} + engines: {node: '>=12'} + cpu: [x64] + os: [netbsd] + + '@esbuild/netbsd-x64@0.20.2': + resolution: {integrity: sha512-K8/DhBxcVQkzYc43yJXDSyjlFeHQJBiowJ0uVL6Tor3jGQfSGHNNJcWxNbOI8v5k82prYqzPuwkzHt3J1T1iZQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [netbsd] + + '@esbuild/openbsd-x64@0.17.19': + resolution: {integrity: sha512-cnq5brJYrSZ2CF6c35eCmviIN3k3RczmHz8eYaVlNasVqsNY+JKohZU5MKmaOI+KkllCdzOKKdPs762VCPC20g==} + engines: {node: '>=12'} + cpu: [x64] + os: [openbsd] + + '@esbuild/openbsd-x64@0.18.20': + resolution: {integrity: sha512-e5e4YSsuQfX4cxcygw/UCPIEP6wbIL+se3sxPdCiMbFLBWu0eiZOJ7WoD+ptCLrmjZBK1Wk7I6D/I3NglUGOxg==} + engines: {node: '>=12'} + cpu: [x64] + os: [openbsd] + + '@esbuild/openbsd-x64@0.19.12': + resolution: {integrity: sha512-RbrfTB9SWsr0kWmb9srfF+L933uMDdu9BIzdA7os2t0TXhCRjrQyCeOt6wVxr79CKD4c+p+YhCj31HBkYcXebw==} + engines: {node: '>=12'} + cpu: [x64] + os: [openbsd] + + '@esbuild/openbsd-x64@0.20.2': + resolution: {integrity: sha512-eMpKlV0SThJmmJgiVyN9jTPJ2VBPquf6Kt/nAoo6DgHAoN57K15ZghiHaMvqjCye/uU4X5u3YSMgVBI1h3vKrQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [openbsd] + + '@esbuild/sunos-x64@0.17.19': + resolution: {integrity: sha512-vCRT7yP3zX+bKWFeP/zdS6SqdWB8OIpaRq/mbXQxTGHnIxspRtigpkUcDMlSCOejlHowLqII7K2JKevwyRP2rg==} + engines: {node: '>=12'} + cpu: [x64] + os: [sunos] + + '@esbuild/sunos-x64@0.18.20': + resolution: {integrity: sha512-kDbFRFp0YpTQVVrqUd5FTYmWo45zGaXe0X8E1G/LKFC0v8x0vWrhOWSLITcCn63lmZIxfOMXtCfti/RxN/0wnQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [sunos] + + '@esbuild/sunos-x64@0.19.12': + resolution: {integrity: sha512-HKjJwRrW8uWtCQnQOz9qcU3mUZhTUQvi56Q8DPTLLB+DawoiQdjsYq+j+D3s9I8VFtDr+F9CjgXKKC4ss89IeA==} + engines: {node: '>=12'} + cpu: [x64] + os: [sunos] + + '@esbuild/sunos-x64@0.20.2': + resolution: {integrity: sha512-2UyFtRC6cXLyejf/YEld4Hajo7UHILetzE1vsRcGL3earZEW77JxrFjH4Ez2qaTiEfMgAXxfAZCm1fvM/G/o8w==} + engines: {node: '>=12'} + cpu: [x64] + os: [sunos] + + '@esbuild/win32-arm64@0.17.19': + resolution: {integrity: sha512-yYx+8jwowUstVdorcMdNlzklLYhPxjniHWFKgRqH7IFlUEa0Umu3KuYplf1HUZZ422e3NU9F4LGb+4O0Kdcaag==} + engines: {node: '>=12'} + cpu: [arm64] + os: [win32] + + '@esbuild/win32-arm64@0.18.20': + resolution: {integrity: sha512-ddYFR6ItYgoaq4v4JmQQaAI5s7npztfV4Ag6NrhiaW0RrnOXqBkgwZLofVTlq1daVTQNhtI5oieTvkRPfZrePg==} + engines: {node: '>=12'} + cpu: [arm64] + os: [win32] + + '@esbuild/win32-arm64@0.19.12': + resolution: {integrity: sha512-URgtR1dJnmGvX864pn1B2YUYNzjmXkuJOIqG2HdU62MVS4EHpU2946OZoTMnRUHklGtJdJZ33QfzdjGACXhn1A==} + engines: {node: '>=12'} + cpu: [arm64] + os: [win32] + + '@esbuild/win32-arm64@0.20.2': + resolution: {integrity: sha512-GRibxoawM9ZCnDxnP3usoUDO9vUkpAxIIZ6GQI+IlVmr5kP3zUq+l17xELTHMWTWzjxa2guPNyrpq1GWmPvcGQ==} + engines: {node: '>=12'} + cpu: [arm64] + os: [win32] + + '@esbuild/win32-ia32@0.17.19': + resolution: {integrity: sha512-eggDKanJszUtCdlVs0RB+h35wNlb5v4TWEkq4vZcmVt5u/HiDZrTXe2bWFQUez3RgNHwx/x4sk5++4NSSicKkw==} + engines: {node: '>=12'} + cpu: [ia32] + os: [win32] + + '@esbuild/win32-ia32@0.18.20': + resolution: {integrity: sha512-Wv7QBi3ID/rROT08SABTS7eV4hX26sVduqDOTe1MvGMjNd3EjOz4b7zeexIR62GTIEKrfJXKL9LFxTYgkyeu7g==} + engines: {node: '>=12'} + cpu: [ia32] + os: [win32] + + '@esbuild/win32-ia32@0.19.12': + resolution: {integrity: sha512-+ZOE6pUkMOJfmxmBZElNOx72NKpIa/HFOMGzu8fqzQJ5kgf6aTGrcJaFsNiVMH4JKpMipyK+7k0n2UXN7a8YKQ==} + engines: {node: '>=12'} + cpu: [ia32] + os: [win32] + + '@esbuild/win32-ia32@0.20.2': + resolution: {integrity: sha512-HfLOfn9YWmkSKRQqovpnITazdtquEW8/SoHW7pWpuEeguaZI4QnCRW6b+oZTztdBnZOS2hqJ6im/D5cPzBTTlQ==} + engines: {node: '>=12'} + cpu: [ia32] + os: [win32] + + '@esbuild/win32-x64@0.17.19': + resolution: {integrity: sha512-lAhycmKnVOuRYNtRtatQR1LPQf2oYCkRGkSFnseDAKPl8lu5SOsK/e1sXe5a0Pc5kHIHe6P2I/ilntNv2xf3cA==} + engines: {node: '>=12'} + cpu: [x64] + os: [win32] + + '@esbuild/win32-x64@0.18.20': + resolution: {integrity: sha512-kTdfRcSiDfQca/y9QIkng02avJ+NCaQvrMejlsB3RRv5sE9rRoeBPISaZpKxHELzRxZyLvNts1P27W3wV+8geQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [win32] + + '@esbuild/win32-x64@0.19.12': + resolution: {integrity: sha512-T1QyPSDCyMXaO3pzBkF96E8xMkiRYbUEZADd29SyPGabqxMViNoii+NcK7eWJAEoU6RZyEm5lVSIjTmcdoB9HA==} + engines: {node: '>=12'} + cpu: [x64] + os: [win32] + + '@esbuild/win32-x64@0.20.2': + resolution: {integrity: sha512-N49X4lJX27+l9jbLKSqZ6bKNjzQvHaT8IIFUy+YIqmXQdjYCToGWwOItDrfby14c78aDd5NHQl29xingXfCdLQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [win32] + + '@eslint-community/eslint-utils@4.4.0': + resolution: {integrity: sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 + + '@eslint-community/regexpp@4.10.0': + resolution: {integrity: sha512-Cu96Sd2By9mCNTx2iyKOmq10v22jUVQv0lQnlGNy16oE9589yE+QADPbrMGCkA51cKZSg3Pu/aTJVTGfL/qjUA==} + engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} + + '@eslint/eslintrc@2.1.4': + resolution: {integrity: sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + + '@eslint/js@8.57.0': + resolution: {integrity: sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + + '@ewoudenberg/difflib@0.1.0': + resolution: {integrity: sha512-OU5P5mJyD3OoWYMWY+yIgwvgNS9cFAU10f+DDuvtogcWQOoJIsQ4Hy2McSfUfhKjq8L0FuWVb4Rt7kgA+XK86A==} + + '@fastify/busboy@2.1.1': + resolution: {integrity: sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==} + engines: {node: '>=14'} + + '@hono/node-server@1.11.1': + resolution: {integrity: sha512-GW1Iomhmm1o4Z+X57xGby8A35Cu9UZLL7pSMdqDBkD99U5cywff8F+8hLk5aBTzNubnsFAvWQ/fZjNwPsEn9lA==} + engines: {node: '>=18.14.1'} + + '@hono/zod-validator@0.2.1': + resolution: {integrity: sha512-HFoxln7Q6JsE64qz2WBS28SD33UB2alp3aRKmcWnNLDzEL1BLsWfbdX6e1HIiUprHYTIXf5y7ax8eYidKUwyaA==} + peerDependencies: + hono: '>=3.9.0' + zod: ^3.19.1 + + '@humanwhocodes/config-array@0.11.14': + resolution: {integrity: sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==} + engines: {node: '>=10.10.0'} + + '@humanwhocodes/module-importer@1.0.1': + resolution: {integrity: sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==} + engines: {node: '>=12.22'} + + '@humanwhocodes/object-schema@2.0.3': + resolution: {integrity: sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==} + + '@isaacs/cliui@8.0.2': + resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} + engines: {node: '>=12'} + + '@jest/schemas@29.6.3': + resolution: {integrity: sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + '@jridgewell/gen-mapping@0.3.5': + resolution: {integrity: sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==} + engines: {node: '>=6.0.0'} + + '@jridgewell/resolve-uri@3.1.2': + resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==} + engines: {node: '>=6.0.0'} + + '@jridgewell/set-array@1.2.1': + resolution: {integrity: sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==} + engines: {node: '>=6.0.0'} + + '@jridgewell/sourcemap-codec@1.4.15': + resolution: {integrity: sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==} + + '@jridgewell/trace-mapping@0.3.25': + resolution: {integrity: sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==} + + '@jridgewell/trace-mapping@0.3.9': + resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} + + '@libsql/client@0.4.3': + resolution: {integrity: sha512-AUYKnSPqAsFBVWBvmtrb4dG3pQlvTKT92eztAest9wQU2iJkabH8WzHLDb3dKFWKql7/kiCqvBQUVpozDwhekQ==} + + '@libsql/core@0.4.3': + resolution: {integrity: sha512-r28iYBtaLBW9RRgXPFh6cGCsVI/rwRlOzSOpAu/1PVTm6EJ3t233pUf97jETVHU0vjdr1d8VvV6fKAvJkokqCw==} + + '@libsql/darwin-arm64@0.2.0': + resolution: {integrity: sha512-+qyT2W/n5CFH1YZWv2mxW4Fsoo4dX9Z9M/nvbQqZ7H84J8hVegvVAsIGYzcK8xAeMEcpU5yGKB1Y9NoDY4hOSQ==} + cpu: [arm64] + os: [darwin] + + '@libsql/darwin-x64@0.2.0': + resolution: {integrity: sha512-hwmO2mF1n8oDHKFrUju6Jv+n9iFtTf5JUK+xlnIE3Td0ZwGC/O1R/Z/btZTd9nD+vsvakC8SJT7/Q6YlWIkhEw==} + cpu: [x64] + os: [darwin] + + '@libsql/hrana-client@0.5.6': + resolution: {integrity: sha512-mjQoAmejZ1atG+M3YR2ZW+rg6ceBByH/S/h17ZoYZkqbWrvohFhXyz2LFxj++ARMoY9m6w3RJJIRdJdmnEUlFg==} + + '@libsql/isomorphic-fetch@0.1.12': + resolution: {integrity: sha512-MRo4UcmjAGAa3ac56LoD5OE13m2p0lu0VEtZC2NZMcogM/jc5fU9YtMQ3qbPjFJ+u2BBjFZgMPkQaLS1dlMhpg==} + + '@libsql/isomorphic-ws@0.1.5': + resolution: {integrity: sha512-DtLWIH29onUYR00i0GlQ3UdcTRC6EP4u9w/h9LxpUZJWRMARk6dQwZ6Jkd+QdwVpuAOrdxt18v0K2uIYR3fwFg==} + + '@libsql/linux-arm64-gnu@0.2.0': + resolution: {integrity: sha512-1w2lPXIYtnBaK5t/Ej5E8x7lPiE+jP3KATI/W4yei5Z/ONJh7jQW5PJ7sYU95vTME3hWEM1FXN6kvzcpFAte7w==} + cpu: [arm64] + os: [linux] + + '@libsql/linux-arm64-musl@0.2.0': + resolution: {integrity: sha512-lkblBEJ7xuNiWNjP8DDq0rqoWccszfkUS7Efh5EjJ+GDWdCBVfh08mPofIZg0fZVLWQCY3j+VZCG1qZfATBizg==} + cpu: [arm64] + os: [linux] + + '@libsql/linux-x64-gnu@0.2.0': + resolution: {integrity: sha512-+x/d289KeJydwOhhqSxKT+6MSQTCfLltzOpTzPccsvdt5fxg8CBi+gfvEJ4/XW23Sa+9bc7zodFP0i6MOlxX7w==} + cpu: [x64] + os: [linux] + + '@libsql/linux-x64-musl@0.2.0': + resolution: {integrity: sha512-5Xn0c5A6vKf9D1ASpgk7mef//FuY7t5Lktj/eiU4n3ryxG+6WTpqstTittJUgepVjcleLPYxIhQAYeYwTYH1IQ==} + cpu: [x64] + os: [linux] + + '@libsql/win32-x64-msvc@0.2.0': + resolution: {integrity: sha512-rpK+trBIpRST15m3cMYg5aPaX7kvCIottxY7jZPINkKAaScvfbn9yulU/iZUM9YtuK96Y1ZmvwyVIK/Y5DzoMQ==} + cpu: [x64] + os: [win32] + + '@neon-rs/load@0.0.4': + resolution: {integrity: sha512-kTPhdZyTQxB+2wpiRcFWrDcejc4JI6tkPuS7UZCG4l6Zvc5kU/gGQ/ozvHTh1XR5tS+UlfAfGuPajjzQjCiHCw==} + + '@neondatabase/serverless@0.7.2': + resolution: {integrity: sha512-wU3WA2uTyNO7wjPs3Mg0G01jztAxUxzd9/mskMmtPwPTjf7JKWi9AW5/puOGXLxmZ9PVgRFeBVRVYq5nBPhsCg==} + + '@neondatabase/serverless@0.9.3': + resolution: {integrity: sha512-6ZBK8asl2Z3+ADEaELvbaVVGVlmY1oAzkxxZfpmXPKFuJhbDN+5fU3zYBamsahS/Ch1zE+CVWB3R+8QEI2LMSw==} + + '@nodelib/fs.scandir@2.1.5': + resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} + engines: {node: '>= 8'} + + '@nodelib/fs.stat@2.0.5': + resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} + engines: {node: '>= 8'} + + '@nodelib/fs.walk@1.2.8': + resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} + engines: {node: '>= 8'} + + '@originjs/vite-plugin-commonjs@1.0.3': + resolution: {integrity: sha512-KuEXeGPptM2lyxdIEJ4R11+5ztipHoE7hy8ClZt3PYaOVQ/pyngd2alaSrPnwyFeOW1UagRBaQ752aA1dTMdOQ==} + + '@pkgjs/parseargs@0.11.0': + resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} + engines: {node: '>=14'} + + '@pkgr/core@0.1.1': + resolution: {integrity: sha512-cq8o4cWH0ibXh9VGi5P20Tu9XF/0fFXl9EUinr9QfTM7a7p0oTA4iJRCQWppXR1Pg8dSM0UCItCkPwsk9qWWYA==} + engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} + + '@planetscale/database@1.18.0': + resolution: {integrity: sha512-t2XdOfrVgcF7AW791FtdPS27NyNqcE1SpoXgk3HpziousvUMsJi4Q6NL3JyOBpsMOrvk94749o8yyonvX5quPw==} + engines: {node: '>=16'} + + '@rollup/rollup-android-arm-eabi@4.17.2': + resolution: {integrity: sha512-NM0jFxY8bB8QLkoKxIQeObCaDlJKewVlIEkuyYKm5An1tdVZ966w2+MPQ2l8LBZLjR+SgyV+nRkTIunzOYBMLQ==} + cpu: [arm] + os: [android] + + '@rollup/rollup-android-arm64@4.17.2': + resolution: {integrity: sha512-yeX/Usk7daNIVwkq2uGoq2BYJKZY1JfyLTaHO/jaiSwi/lsf8fTFoQW/n6IdAsx5tx+iotu2zCJwz8MxI6D/Bw==} + cpu: [arm64] + os: [android] + + '@rollup/rollup-darwin-arm64@4.17.2': + resolution: {integrity: sha512-kcMLpE6uCwls023+kknm71ug7MZOrtXo+y5p/tsg6jltpDtgQY1Eq5sGfHcQfb+lfuKwhBmEURDga9N0ol4YPw==} + cpu: [arm64] + os: [darwin] + + '@rollup/rollup-darwin-x64@4.17.2': + resolution: {integrity: sha512-AtKwD0VEx0zWkL0ZjixEkp5tbNLzX+FCqGG1SvOu993HnSz4qDI6S4kGzubrEJAljpVkhRSlg5bzpV//E6ysTQ==} + cpu: [x64] + os: [darwin] + + '@rollup/rollup-linux-arm-gnueabihf@4.17.2': + resolution: {integrity: sha512-3reX2fUHqN7sffBNqmEyMQVj/CKhIHZd4y631duy0hZqI8Qoqf6lTtmAKvJFYa6bhU95B1D0WgzHkmTg33In0A==} + cpu: [arm] + os: [linux] + + '@rollup/rollup-linux-arm-musleabihf@4.17.2': + resolution: {integrity: sha512-uSqpsp91mheRgw96xtyAGP9FW5ChctTFEoXP0r5FAzj/3ZRv3Uxjtc7taRQSaQM/q85KEKjKsZuiZM3GyUivRg==} + cpu: [arm] + os: [linux] + + '@rollup/rollup-linux-arm64-gnu@4.17.2': + resolution: {integrity: sha512-EMMPHkiCRtE8Wdk3Qhtciq6BndLtstqZIroHiiGzB3C5LDJmIZcSzVtLRbwuXuUft1Cnv+9fxuDtDxz3k3EW2A==} + cpu: [arm64] + os: [linux] + + '@rollup/rollup-linux-arm64-musl@4.17.2': + resolution: {integrity: sha512-NMPylUUZ1i0z/xJUIx6VUhISZDRT+uTWpBcjdv0/zkp7b/bQDF+NfnfdzuTiB1G6HTodgoFa93hp0O1xl+/UbA==} + cpu: [arm64] + os: [linux] + + '@rollup/rollup-linux-powerpc64le-gnu@4.17.2': + resolution: {integrity: sha512-T19My13y8uYXPw/L/k0JYaX1fJKFT/PWdXiHr8mTbXWxjVF1t+8Xl31DgBBvEKclw+1b00Chg0hxE2O7bTG7GQ==} + cpu: [ppc64] + os: [linux] + + '@rollup/rollup-linux-riscv64-gnu@4.17.2': + resolution: {integrity: sha512-BOaNfthf3X3fOWAB+IJ9kxTgPmMqPPH5f5k2DcCsRrBIbWnaJCgX2ll77dV1TdSy9SaXTR5iDXRL8n7AnoP5cg==} + cpu: [riscv64] + os: [linux] + + '@rollup/rollup-linux-s390x-gnu@4.17.2': + resolution: {integrity: sha512-W0UP/x7bnn3xN2eYMql2T/+wpASLE5SjObXILTMPUBDB/Fg/FxC+gX4nvCfPBCbNhz51C+HcqQp2qQ4u25ok6g==} + cpu: [s390x] + os: [linux] + + '@rollup/rollup-linux-x64-gnu@4.17.2': + resolution: {integrity: sha512-Hy7pLwByUOuyaFC6mAr7m+oMC+V7qyifzs/nW2OJfC8H4hbCzOX07Ov0VFk/zP3kBsELWNFi7rJtgbKYsav9QQ==} + cpu: [x64] + os: [linux] + + '@rollup/rollup-linux-x64-musl@4.17.2': + resolution: {integrity: sha512-h1+yTWeYbRdAyJ/jMiVw0l6fOOm/0D1vNLui9iPuqgRGnXA0u21gAqOyB5iHjlM9MMfNOm9RHCQ7zLIzT0x11Q==} + cpu: [x64] + os: [linux] + + '@rollup/rollup-win32-arm64-msvc@4.17.2': + resolution: {integrity: sha512-tmdtXMfKAjy5+IQsVtDiCfqbynAQE/TQRpWdVataHmhMb9DCoJxp9vLcCBjEQWMiUYxO1QprH/HbY9ragCEFLA==} + cpu: [arm64] + os: [win32] + + '@rollup/rollup-win32-ia32-msvc@4.17.2': + resolution: {integrity: sha512-7II/QCSTAHuE5vdZaQEwJq2ZACkBpQDOmQsE6D6XUbnBHW8IAhm4eTufL6msLJorzrHDFv3CF8oCA/hSIRuZeQ==} + cpu: [ia32] + os: [win32] + + '@rollup/rollup-win32-x64-msvc@4.17.2': + resolution: {integrity: sha512-TGGO7v7qOq4CYmSBVEYpI1Y5xDuCEnbVC5Vth8mOsW0gDSzxNrVERPc790IGHsrT2dQSimgMr9Ub3Y1Jci5/8w==} + cpu: [x64] + os: [win32] + + '@sinclair/typebox@0.27.8': + resolution: {integrity: sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==} + + '@sindresorhus/is@4.6.0': + resolution: {integrity: sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==} + engines: {node: '>=10'} + + '@smithy/abort-controller@3.0.0': + resolution: {integrity: sha512-p6GlFGBt9K4MYLu72YuJ523NVR4A8oHlC5M2JO6OmQqN8kAc/uh1JqLE+FizTokrSJGg0CSvC+BrsmGzKtsZKA==} + engines: {node: '>=16.0.0'} + + '@smithy/config-resolver@3.0.0': + resolution: {integrity: sha512-2GzOfADwYLQugYkKQhIyZyQlM05K+tMKvRnc6eFfZcpJGRfKoMUMYdPlBKmqHwQFXQKBrGV6cxL9oymWgDzvFw==} + engines: {node: '>=16.0.0'} + + '@smithy/core@2.0.1': + resolution: {integrity: sha512-rcMkjvwxH/bER+oZUPR0yTA0ELD6m3A+d92+CFkdF6HJFCBB1bXo7P5pm21L66XwTN01B6bUhSCQ7cymWRD8zg==} + engines: {node: '>=16.0.0'} + + '@smithy/credential-provider-imds@3.0.0': + resolution: {integrity: sha512-lfmBiFQcA3FsDAPxNfY0L7CawcWtbyWsBOHo34nF095728JLkBX4Y9q/VPPE2r7fqMVK+drmDigqE2/SSQeVRA==} + engines: {node: '>=16.0.0'} + + '@smithy/fetch-http-handler@3.0.1': + resolution: {integrity: sha512-uaH74i5BDj+rBwoQaXioKpI0SHBJFtOVwzrCpxZxphOW0ki5jhj7dXvDMYM2IJem8TpdFvS2iC08sjOblfFGFg==} + + '@smithy/hash-node@3.0.0': + resolution: {integrity: sha512-84qXstNemP3XS5jcof0el6+bDfjzuvhJPQTEfro3lgtbCtKgzPm3MgiS6ehXVPjeQ5+JS0HqmTz8f/RYfzHVxw==} + engines: {node: '>=16.0.0'} + + '@smithy/invalid-dependency@3.0.0': + resolution: {integrity: sha512-F6wBBaEFgJzj0s4KUlliIGPmqXemwP6EavgvDqYwCH40O5Xr2iMHvS8todmGVZtuJCorBkXsYLyTu4PuizVq5g==} + + '@smithy/is-array-buffer@3.0.0': + resolution: {integrity: sha512-+Fsu6Q6C4RSJiy81Y8eApjEB5gVtM+oFKTffg+jSuwtvomJJrhUJBu2zS8wjXSgH/g1MKEWrzyChTBe6clb5FQ==} + engines: {node: '>=16.0.0'} + + '@smithy/middleware-content-length@3.0.0': + resolution: {integrity: sha512-3C4s4d/iGobgCtk2tnWW6+zSTOBg1PRAm2vtWZLdriwTroFbbWNSr3lcyzHdrQHnEXYCC5K52EbpfodaIUY8sg==} + engines: {node: '>=16.0.0'} + + '@smithy/middleware-endpoint@3.0.0': + resolution: {integrity: sha512-aXOAWztw/5qAfp0NcA2OWpv6ZI/E+Dh9mByif7i91D/0iyYNUcKvskmXiowKESFkuZ7PIMd3VOR4fTibZDs2OQ==} + engines: {node: '>=16.0.0'} + + '@smithy/middleware-retry@3.0.1': + resolution: {integrity: sha512-hBhSEuL841FhJBK/19WpaGk5YWSzFk/P2UaVjANGKRv3eYNO8Y1lANWgqnuPWjOyCEWMPr58vELFDWpxvRKANw==} + engines: {node: '>=16.0.0'} + + '@smithy/middleware-serde@3.0.0': + resolution: {integrity: sha512-I1vKG1foI+oPgG9r7IMY1S+xBnmAn1ISqployvqkwHoSb8VPsngHDTOgYGYBonuOKndaWRUGJZrKYYLB+Ane6w==} + engines: {node: '>=16.0.0'} + + '@smithy/middleware-stack@3.0.0': + resolution: {integrity: sha512-+H0jmyfAyHRFXm6wunskuNAqtj7yfmwFB6Fp37enytp2q047/Od9xetEaUbluyImOlGnGpaVGaVfjwawSr+i6Q==} + engines: {node: '>=16.0.0'} + + '@smithy/node-config-provider@3.0.0': + resolution: {integrity: sha512-buqfaSdDh0zo62EPLf8rGDvcpKwGpO5ho4bXS2cdFhlOta7tBkWJt+O5uiaAeICfIOfPclNOndshDNSanX2X9g==} + engines: {node: '>=16.0.0'} + + '@smithy/node-http-handler@3.0.0': + resolution: {integrity: sha512-3trD4r7NOMygwLbUJo4eodyQuypAWr7uvPnebNJ9a70dQhVn+US8j/lCnvoJS6BXfZeF7PkkkI0DemVJw+n+eQ==} + engines: {node: '>=16.0.0'} + + '@smithy/property-provider@3.0.0': + resolution: {integrity: sha512-LmbPgHBswdXCrkWWuUwBm9w72S2iLWyC/5jet9/Y9cGHtzqxi+GVjfCfahkvNV4KXEwgnH8EMpcrD9RUYe0eLQ==} + engines: {node: '>=16.0.0'} + + '@smithy/protocol-http@4.0.0': + resolution: {integrity: sha512-qOQZOEI2XLWRWBO9AgIYuHuqjZ2csyr8/IlgFDHDNuIgLAMRx2Bl8ck5U5D6Vh9DPdoaVpuzwWMa0xcdL4O/AQ==} + engines: {node: '>=16.0.0'} + + '@smithy/querystring-builder@3.0.0': + resolution: {integrity: sha512-bW8Fi0NzyfkE0TmQphDXr1AmBDbK01cA4C1Z7ggwMAU5RDz5AAv/KmoRwzQAS0kxXNf/D2ALTEgwK0U2c4LtRg==} + engines: {node: '>=16.0.0'} + + '@smithy/querystring-parser@3.0.0': + resolution: {integrity: sha512-UzHwthk0UEccV4dHzPySnBy34AWw3V9lIqUTxmozQ+wPDAO9csCWMfOLe7V9A2agNYy7xE+Pb0S6K/J23JSzfQ==} + engines: {node: '>=16.0.0'} + + '@smithy/service-error-classification@3.0.0': + resolution: {integrity: sha512-3BsBtOUt2Gsnc3X23ew+r2M71WwtpHfEDGhHYHSDg6q1t8FrWh15jT25DLajFV1H+PpxAJ6gqe9yYeRUsmSdFA==} + engines: {node: '>=16.0.0'} + + '@smithy/shared-ini-file-loader@3.0.0': + resolution: {integrity: sha512-REVw6XauXk8xE4zo5aGL7Rz4ywA8qNMUn8RtWeTRQsgAlmlvbJ7CEPBcaXU2NDC3AYBgYAXrGyWD8XrN8UGDog==} + engines: {node: '>=16.0.0'} + + '@smithy/signature-v4@3.0.0': + resolution: {integrity: sha512-kXFOkNX+BQHe2qnLxpMEaCRGap9J6tUGLzc3A9jdn+nD4JdMwCKTJ+zFwQ20GkY+mAXGatyTw3HcoUlR39HwmA==} + engines: {node: '>=16.0.0'} + + '@smithy/smithy-client@3.0.1': + resolution: {integrity: sha512-KAiFY4Y4jdHxR+4zerH/VBhaFKM8pbaVmJZ/CWJRwtM/CmwzTfXfvYwf6GoUwiHepdv+lwiOXCuOl6UBDUEINw==} + engines: {node: '>=16.0.0'} + + '@smithy/types@3.0.0': + resolution: {integrity: sha512-VvWuQk2RKFuOr98gFhjca7fkBS+xLLURT8bUjk5XQoV0ZLm7WPwWPPY3/AwzTLuUBDeoKDCthfe1AsTUWaSEhw==} + engines: {node: '>=16.0.0'} + + '@smithy/url-parser@3.0.0': + resolution: {integrity: sha512-2XLazFgUu+YOGHtWihB3FSLAfCUajVfNBXGGYjOaVKjLAuAxx3pSBY3hBgLzIgB17haf59gOG3imKqTy8mcrjw==} + + '@smithy/util-base64@3.0.0': + resolution: {integrity: sha512-Kxvoh5Qtt0CDsfajiZOCpJxgtPHXOKwmM+Zy4waD43UoEMA+qPxxa98aE/7ZhdnBFZFXMOiBR5xbcaMhLtznQQ==} + engines: {node: '>=16.0.0'} + + '@smithy/util-body-length-browser@3.0.0': + resolution: {integrity: sha512-cbjJs2A1mLYmqmyVl80uoLTJhAcfzMOyPgjwAYusWKMdLeNtzmMz9YxNl3/jRLoxSS3wkqkf0jwNdtXWtyEBaQ==} + + '@smithy/util-body-length-node@3.0.0': + resolution: {integrity: sha512-Tj7pZ4bUloNUP6PzwhN7K386tmSmEET9QtQg0TgdNOnxhZvCssHji+oZTUIuzxECRfG8rdm2PMw2WCFs6eIYkA==} + engines: {node: '>=16.0.0'} + + '@smithy/util-buffer-from@3.0.0': + resolution: {integrity: sha512-aEOHCgq5RWFbP+UDPvPot26EJHjOC+bRgse5A8V3FSShqd5E5UN4qc7zkwsvJPPAVsf73QwYcHN1/gt/rtLwQA==} + engines: {node: '>=16.0.0'} + + '@smithy/util-config-provider@3.0.0': + resolution: {integrity: sha512-pbjk4s0fwq3Di/ANL+rCvJMKM5bzAQdE5S/6RL5NXgMExFAi6UgQMPOm5yPaIWPpr+EOXKXRonJ3FoxKf4mCJQ==} + engines: {node: '>=16.0.0'} + + '@smithy/util-defaults-mode-browser@3.0.1': + resolution: {integrity: sha512-nW5kEzdJn1Bn5TF+gOPHh2rcPli8JU9vSSXLbfg7uPnfR1TMRQqs9zlYRhIb87NeSxIbpdXOI94tvXSy+fvDYg==} + engines: {node: '>= 10.0.0'} + + '@smithy/util-defaults-mode-node@3.0.1': + resolution: {integrity: sha512-TFk+Qb+elLc/MOhtSp+50fstyfZ6avQbgH2d96xUBpeScu+Al9elxv+UFAjaTHe0HQe5n+wem8ZLpXvU8lwV6Q==} + engines: {node: '>= 10.0.0'} + + '@smithy/util-endpoints@2.0.0': + resolution: {integrity: sha512-+exaXzEY3DNt2qtA2OtRNSDlVrE4p32j1JSsQkzA5AdP0YtJNjkYbYhJxkFmPYcjI1abuwopOZCwUmv682QkiQ==} + engines: {node: '>=16.0.0'} + + '@smithy/util-hex-encoding@3.0.0': + resolution: {integrity: sha512-eFndh1WEK5YMUYvy3lPlVmYY/fZcQE1D8oSf41Id2vCeIkKJXPcYDCZD+4+xViI6b1XSd7tE+s5AmXzz5ilabQ==} + engines: {node: '>=16.0.0'} + + '@smithy/util-middleware@3.0.0': + resolution: {integrity: sha512-q5ITdOnV2pXHSVDnKWrwgSNTDBAMHLptFE07ua/5Ty5WJ11bvr0vk2a7agu7qRhrCFRQlno5u3CneU5EELK+DQ==} + engines: {node: '>=16.0.0'} + + '@smithy/util-retry@3.0.0': + resolution: {integrity: sha512-nK99bvJiziGv/UOKJlDvFF45F00WgPLKVIGUfAK+mDhzVN2hb/S33uW2Tlhg5PVBoqY7tDVqL0zmu4OxAHgo9g==} + engines: {node: '>=16.0.0'} + + '@smithy/util-stream@3.0.1': + resolution: {integrity: sha512-7F7VNNhAsfMRA8I986YdOY5fE0/T1/ZjFF6OLsqkvQVNP3vZ/szYDfGCyphb7ioA09r32K/0qbSFfNFU68aSzA==} + engines: {node: '>=16.0.0'} + + '@smithy/util-uri-escape@3.0.0': + resolution: {integrity: sha512-LqR7qYLgZTD7nWLBecUi4aqolw8Mhza9ArpNEQ881MJJIU2sE5iHCK6TdyqqzcDLy0OPe10IY4T8ctVdtynubg==} + engines: {node: '>=16.0.0'} + + '@smithy/util-utf8@3.0.0': + resolution: {integrity: sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==} + engines: {node: '>=16.0.0'} + + '@types/better-sqlite3@7.6.10': + resolution: {integrity: sha512-TZBjD+yOsyrUJGmcUj6OS3JADk3+UZcNv3NOBqGkM09bZdi28fNZw8ODqbMOLfKCu7RYCO62/ldq1iHbzxqoPw==} + + '@types/docker-modem@3.0.6': + resolution: {integrity: sha512-yKpAGEuKRSS8wwx0joknWxsmLha78wNMe9R2S3UNsVOkZded8UqOrV8KoeDXoXsjndxwyF3eIhyClGbO1SEhEg==} + + '@types/dockerode@3.3.29': + resolution: {integrity: sha512-5PRRq/yt5OT/Jf77ltIdz4EiR9+VLnPF+HpU4xGFwUqmV24Co2HKBNW3w+slqZ1CYchbcDeqJASHDYWzZCcMiQ==} + + '@types/estree@1.0.5': + resolution: {integrity: sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==} + + '@types/fs-extra@11.0.4': + resolution: {integrity: sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ==} + + '@types/glob@8.1.0': + resolution: {integrity: sha512-IO+MJPVhoqz+28h1qLAcBEH2+xHMK6MTyHJc7MTnnYb6wsoLR29POVGJ7LycmVXIqyy/4/2ShP5sUwTXuOwb/w==} + + '@types/json-diff@1.0.3': + resolution: {integrity: sha512-Qvxm8fpRMv/1zZR3sQWImeRK2mBYJji20xF51Fq9Gt//Ed18u0x6/FNLogLS1xhfUWTEmDyqveJqn95ltB6Kvw==} + + '@types/jsonfile@6.1.4': + resolution: {integrity: sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ==} + + '@types/minimatch@5.1.2': + resolution: {integrity: sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA==} + + '@types/minimist@1.2.5': + resolution: {integrity: sha512-hov8bUuiLiyFPGyFPE1lwWhmzYbirOXQNNo40+y3zow8aFVTeyn3VWL0VFFfdNddA8S4Vf0Tc062rzyNr7Paag==} + + '@types/node-fetch@2.6.11': + resolution: {integrity: sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g==} + + '@types/node-forge@1.3.11': + resolution: {integrity: sha512-FQx220y22OKNTqaByeBGqHWYz4cl94tpcxeFdvBo3wjG6XPBuZ0BNgNZRV5J5TFmmcsJ4IzsLkmGRiQbnYsBEQ==} + + '@types/node@18.19.33': + resolution: {integrity: sha512-NR9+KrpSajr2qBVp/Yt5TU/rp+b5Mayi3+OlMlcg2cVCfRmcG5PWZ7S4+MG9PZ5gWBoc9Pd0BKSRViuBCRPu0A==} + + '@types/pg@8.11.6': + resolution: {integrity: sha512-/2WmmBXHLsfRqzfHW7BNZ8SbYzE8OSk7i3WjFYvfgRHj7S1xj+16Je5fUKv3lVdVzk/zn9TXOqf+avFCFIE0yQ==} + + '@types/pg@8.6.6': + resolution: {integrity: sha512-O2xNmXebtwVekJDD+02udOncjVcMZQuTEQEMpKJ0ZRf5E7/9JJX3izhKUcUifBkyKpljyUM6BTgy2trmviKlpw==} + + '@types/pluralize@0.0.33': + resolution: {integrity: sha512-JOqsl+ZoCpP4e8TDke9W79FDcSgPAR0l6pixx2JHkhnRjvShyYiAYw2LVsnA7K08Y6DeOnaU6ujmENO4os/cYg==} + + '@types/ps-tree@1.1.6': + resolution: {integrity: sha512-PtrlVaOaI44/3pl3cvnlK+GxOM3re2526TJvPvh7W+keHIXdV4TE0ylpPBAcvFQCbGitaTXwL9u+RF7qtVeazQ==} + + '@types/semver@7.5.8': + resolution: {integrity: sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ==} + + '@types/ssh2@1.15.0': + resolution: {integrity: sha512-YcT8jP5F8NzWeevWvcyrrLB3zcneVjzYY9ZDSMAMboI+2zR1qYWFhwsyOFVzT7Jorn67vqxC0FRiw8YyG9P1ww==} + + '@types/uuid@9.0.8': + resolution: {integrity: sha512-jg+97EGIcY9AGHJJRaaPVgetKDsrTgbRjQ5Msgjh/DQKEFl0DtyRr/VCOyD1T2R1MNeWPK/u7JoGhlDZnKBAfA==} + + '@types/which@3.0.3': + resolution: {integrity: sha512-2C1+XoY0huExTbs8MQv1DuS5FS86+SEjdM9F/+GS61gg5Hqbtj8ZiDSx8MfWcyei907fIPbfPGCOrNUTnVHY1g==} + + '@types/ws@8.5.10': + resolution: {integrity: sha512-vmQSUcfalpIq0R9q7uTo2lXs6eGIpt9wtnLdMv9LVpIjCA/+ufZRozlVoVelIYixx1ugCBKDhn89vnsEGOCx9A==} + + '@typescript-eslint/eslint-plugin@7.10.0': + resolution: {integrity: sha512-PzCr+a/KAef5ZawX7nbyNwBDtM1HdLIT53aSA2DDlxmxMngZ43O8SIePOeX8H5S+FHXeI6t97mTt/dDdzY4Fyw==} + engines: {node: ^18.18.0 || >=20.0.0} + peerDependencies: + '@typescript-eslint/parser': ^7.0.0 + eslint: ^8.56.0 + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + + '@typescript-eslint/parser@7.10.0': + resolution: {integrity: sha512-2EjZMA0LUW5V5tGQiaa2Gys+nKdfrn2xiTIBLR4fxmPmVSvgPcKNW+AE/ln9k0A4zDUti0J/GZXMDupQoI+e1w==} + engines: {node: ^18.18.0 || >=20.0.0} + peerDependencies: + eslint: ^8.56.0 + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + + '@typescript-eslint/scope-manager@7.10.0': + resolution: {integrity: sha512-7L01/K8W/VGl7noe2mgH0K7BE29Sq6KAbVmxurj8GGaPDZXPr8EEQ2seOeAS+mEV9DnzxBQB6ax6qQQ5C6P4xg==} + engines: {node: ^18.18.0 || >=20.0.0} + + '@typescript-eslint/type-utils@7.10.0': + resolution: {integrity: sha512-D7tS4WDkJWrVkuzgm90qYw9RdgBcrWmbbRkrLA4d7Pg3w0ttVGDsvYGV19SH8gPR5L7OtcN5J1hTtyenO9xE9g==} + engines: {node: ^18.18.0 || >=20.0.0} + peerDependencies: + eslint: ^8.56.0 + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + + '@typescript-eslint/types@7.10.0': + resolution: {integrity: sha512-7fNj+Ya35aNyhuqrA1E/VayQX9Elwr8NKZ4WueClR3KwJ7Xx9jcCdOrLW04h51de/+gNbyFMs+IDxh5xIwfbNg==} + engines: {node: ^18.18.0 || >=20.0.0} + + '@typescript-eslint/typescript-estree@7.10.0': + resolution: {integrity: sha512-LXFnQJjL9XIcxeVfqmNj60YhatpRLt6UhdlFwAkjNc6jSUlK8zQOl1oktAP8PlWFzPQC1jny/8Bai3/HPuvN5g==} + engines: {node: ^18.18.0 || >=20.0.0} + peerDependencies: + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + + '@typescript-eslint/utils@7.10.0': + resolution: {integrity: sha512-olzif1Fuo8R8m/qKkzJqT7qwy16CzPRWBvERS0uvyc+DHd8AKbO4Jb7kpAvVzMmZm8TrHnI7hvjN4I05zow+tg==} + engines: {node: ^18.18.0 || >=20.0.0} + peerDependencies: + eslint: ^8.56.0 + + '@typescript-eslint/visitor-keys@7.10.0': + resolution: {integrity: sha512-9ntIVgsi6gg6FIq9xjEO4VQJvwOqA3jaBFQJ/6TK5AvEup2+cECI6Fh7QiBxmfMHXU0V0J4RyPeOU1VDNzl9cg==} + engines: {node: ^18.18.0 || >=20.0.0} + + '@ungap/structured-clone@1.2.0': + resolution: {integrity: sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==} + + '@vercel/postgres@0.8.0': + resolution: {integrity: sha512-/QUV9ExwaNdKooRjOQqvrKNVnRvsaXeukPNI5DB1ovUTesglfR/fparw7ngo1KUWWKIVpEj2TRrA+ObRHRdaLg==} + engines: {node: '>=14.6'} + + '@vitest/expect@1.6.0': + resolution: {integrity: sha512-ixEvFVQjycy/oNgHjqsL6AZCDduC+tflRluaHIzKIsdbzkLn2U/iBnVeJwB6HsIjQBdfMR8Z0tRxKUsvFJEeWQ==} + + '@vitest/runner@1.6.0': + resolution: {integrity: sha512-P4xgwPjwesuBiHisAVz/LSSZtDjOTPYZVmNAnpHHSR6ONrf8eCJOFRvUwdHn30F5M1fxhqtl7QZQUk2dprIXAg==} + + '@vitest/snapshot@1.6.0': + resolution: {integrity: sha512-+Hx43f8Chus+DCmygqqfetcAZrDJwvTj0ymqjQq4CvmpKFSTVteEOBzCusu1x2tt4OJcvBflyHUE0DZSLgEMtQ==} + + '@vitest/spy@1.6.0': + resolution: {integrity: sha512-leUTap6B/cqi/bQkXUu6bQV5TZPx7pmMBKBQiI0rJA8c3pB56ZsaTbREnF7CJfmvAS4V2cXIBAh/3rVwrrCYgw==} + + '@vitest/utils@1.6.0': + resolution: {integrity: sha512-21cPiuGMoMZwiOHa2i4LXkMkMkCGzA+MVFV70jRwHo95dL4x/ts5GZhML1QWuy7yfp3WzK3lRvZi3JnXTYqrBw==} + + acorn-jsx@5.3.2: + resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} + peerDependencies: + acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 + + acorn-walk@8.3.2: + resolution: {integrity: sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A==} + engines: {node: '>=0.4.0'} + + acorn@8.11.3: + resolution: {integrity: sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==} + engines: {node: '>=0.4.0'} + hasBin: true + + aggregate-error@4.0.1: + resolution: {integrity: sha512-0poP0T7el6Vq3rstR8Mn4V/IQrpBLO6POkUSrN7RhyY+GF/InCFShQzsQ39T25gkHhLgSLByyAz+Kjb+c2L98w==} + engines: {node: '>=12'} + + ajv@6.12.6: + resolution: {integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==} + + ansi-escapes@6.2.1: + resolution: {integrity: sha512-4nJ3yixlEthEJ9Rk4vPcdBRkZvQZlYyu8j4/Mqz5sgIkddmEnH2Yj2ZrnP9S3tQOvSNRUIgVNF/1yPpRAGNRig==} + engines: {node: '>=14.16'} + + ansi-regex@5.0.1: + resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} + engines: {node: '>=8'} + + ansi-regex@6.0.1: + resolution: {integrity: sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==} + engines: {node: '>=12'} + + ansi-styles@4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} + + ansi-styles@5.2.0: + resolution: {integrity: sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==} + engines: {node: '>=10'} + + ansi-styles@6.2.1: + resolution: {integrity: sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==} + engines: {node: '>=12'} + + ansicolors@0.3.2: + resolution: {integrity: sha512-QXu7BPrP29VllRxH8GwB7x5iX5qWKAAMLqKQGWTeLWVlNHNOpVMJ91dsxQAIWXpjuW5wqvxu3Jd/nRjrJ+0pqg==} + + any-promise@1.3.0: + resolution: {integrity: sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==} + + anymatch@3.1.3: + resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} + engines: {node: '>= 8'} + + argparse@1.0.10: + resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==} + + argparse@2.0.1: + resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} + + array-find-index@1.0.2: + resolution: {integrity: sha512-M1HQyIXcBGtVywBt8WVdim+lrNaK7VHp99Qt5pSNziXznKHViIBbXWtfRTpEFpF/c4FdfxNAsCCwPp5phBYJtw==} + engines: {node: '>=0.10.0'} + + array-union@2.1.0: + resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==} + engines: {node: '>=8'} + + arrgv@1.0.2: + resolution: {integrity: sha512-a4eg4yhp7mmruZDQFqVMlxNRFGi/i1r87pt8SDHy0/I8PqSXoUTlWZRdAZo0VXgvEARcujbtTk8kiZRi1uDGRw==} + engines: {node: '>=8.0.0'} + + arrify@3.0.0: + resolution: {integrity: sha512-tLkvA81vQG/XqE2mjDkGQHoOINtMHtysSnemrmoGe6PydDPMRbVugqyk4A6V/WDWEfm3l+0d8anA9r8cv/5Jaw==} + engines: {node: '>=12'} + + as-table@1.0.55: + resolution: {integrity: sha512-xvsWESUJn0JN421Xb9MQw6AsMHRCUknCe0Wjlxvjud80mU4E6hQf1A6NzQKcYNmYw62MfzEtXc+badstZP3JpQ==} + + asn1@0.2.6: + resolution: {integrity: sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==} + + assertion-error@1.1.0: + resolution: {integrity: sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==} + + asynckit@0.4.0: + resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + + ava@5.3.1: + resolution: {integrity: sha512-Scv9a4gMOXB6+ni4toLuhAm9KYWEjsgBglJl+kMGI5+IVDt120CCDZyB5HNU9DjmLI2t4I0GbnxGLmmRfGTJGg==} + engines: {node: '>=14.19 <15 || >=16.15 <17 || >=18'} + hasBin: true + peerDependencies: + '@ava/typescript': '*' + peerDependenciesMeta: + '@ava/typescript': + optional: true + + balanced-match@1.0.2: + resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + + base64-js@1.5.1: + resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} + + bcrypt-pbkdf@1.0.2: + resolution: {integrity: sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==} + + better-sqlite3@9.6.0: + resolution: {integrity: sha512-yR5HATnqeYNVnkaUTf4bOP2dJSnyhP4puJN/QPRyx4YkBEEUxib422n2XzPqDEHjQQqazoYoADdAm5vE15+dAQ==} + + binary-extensions@2.3.0: + resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==} + engines: {node: '>=8'} + + bindings@1.5.0: + resolution: {integrity: sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==} + + bl@4.1.0: + resolution: {integrity: sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==} + + blake3-wasm@2.1.5: + resolution: {integrity: sha512-F1+K8EbfOZE49dtoPtmxUQrpXaBIl3ICvasLh+nJta0xkz+9kF/7uet9fLnwKqhDrmj6g+6K3Tw9yQPUg2ka5g==} + + blueimp-md5@2.19.0: + resolution: {integrity: sha512-DRQrD6gJyy8FbiE4s+bDoXS9hiW3Vbx5uCdwvcCf3zLHL+Iv7LtGHLpr+GZV8rHG8tK766FGYBwRbu8pELTt+w==} + + bowser@2.11.0: + resolution: {integrity: sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==} + + brace-expansion@1.1.11: + resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==} + + brace-expansion@2.0.1: + resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==} + + braces@3.0.2: + resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==} + engines: {node: '>=8'} + + buffer-from@1.1.2: + resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==} + + buffer@5.7.1: + resolution: {integrity: sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==} + + bufferutil@4.0.8: + resolution: {integrity: sha512-4T53u4PdgsXqKaIctwF8ifXlRTTmEPJ8iEPWFdGZvcf7sbwYo6FKFEX9eNNAnzFZ7EzJAQ3CJeOtCRA4rDp7Pw==} + engines: {node: '>=6.14.2'} + + buildcheck@0.0.6: + resolution: {integrity: sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A==} + engines: {node: '>=10.0.0'} + + bundle-require@4.1.0: + resolution: {integrity: sha512-FeArRFM+ziGkRViKRnSTbHZc35dgmR9yNog05Kn0+ItI59pOAISGvnnIwW1WgFZQW59IxD9QpJnUPkdIPfZuXg==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + peerDependencies: + esbuild: '>=0.17' + + cac@6.7.14: + resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} + engines: {node: '>=8'} + + callsites@3.1.0: + resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} + engines: {node: '>=6'} + + callsites@4.1.0: + resolution: {integrity: sha512-aBMbD1Xxay75ViYezwT40aQONfr+pSXTHwNKvIXhXD6+LY3F1dLIcceoC5OZKBVHbXcysz1hL9D2w0JJIMXpUw==} + engines: {node: '>=12.20'} + + camelcase@7.0.1: + resolution: {integrity: sha512-xlx1yCK2Oc1APsPXDL2LdlNP6+uu8OCDdhOBSVT279M/S+y75O30C2VuD8T2ogdePBBl7PfPF4504tnLgX3zfw==} + engines: {node: '>=14.16'} + + capnp-ts@0.7.0: + resolution: {integrity: sha512-XKxXAC3HVPv7r674zP0VC3RTXz+/JKhfyw94ljvF80yynK6VkTnqE3jMuN8b3dUVmmc43TjyxjW4KTsmB3c86g==} + + cardinal@2.1.1: + resolution: {integrity: sha512-JSr5eOgoEymtYHBjNWyjrMqet9Am2miJhlfKNdqLp6zoeAh0KN5dRAcxlecj5mAJrmQomgiOBj35xHLrFjqBpw==} + hasBin: true + + cbor@8.1.0: + resolution: {integrity: sha512-DwGjNW9omn6EwP70aXsn7FQJx5kO12tX0bZkaTjzdVFM6/7nhA4t0EENocKGx6D2Bch9PE2KzCUf5SceBdeijg==} + engines: {node: '>=12.19'} + + chai@4.4.1: + resolution: {integrity: sha512-13sOfMv2+DWduEU+/xbun3LScLoqN17nBeTLUsmDfKdoiC1fr0n9PU4guu4AhRcOVFk/sW8LyZWHuhWtQZiF+g==} + engines: {node: '>=4'} + + chalk@4.1.2: + resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} + engines: {node: '>=10'} + + chalk@5.3.0: + resolution: {integrity: sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==} + engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} + + char-regex@1.0.2: + resolution: {integrity: sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==} + engines: {node: '>=10'} + + check-error@1.0.3: + resolution: {integrity: sha512-iKEoDYaRmd1mxM90a2OEfWhjsjPpYPuQ+lMYsoxB126+t8fw7ySEO48nmDg5COTjxDI65/Y2OWpeEHk3ZOe8zg==} + + chokidar@3.6.0: + resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==} + engines: {node: '>= 8.10.0'} + + chownr@1.1.4: + resolution: {integrity: sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==} + + chunkd@2.0.1: + resolution: {integrity: sha512-7d58XsFmOq0j6el67Ug9mHf9ELUXsQXYJBkyxhH/k+6Ke0qXRnv0kbemx+Twc6fRJ07C49lcbdgm9FL1Ei/6SQ==} + + ci-info@3.9.0: + resolution: {integrity: sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==} + engines: {node: '>=8'} + + ci-parallel-vars@1.0.1: + resolution: {integrity: sha512-uvzpYrpmidaoxvIQHM+rKSrigjOe9feHYbw4uOI2gdfe1C3xIlxO+kVXq83WQWNniTf8bAxVpy+cQeFQsMERKg==} + + clean-stack@4.2.0: + resolution: {integrity: sha512-LYv6XPxoyODi36Dp976riBtSY27VmFo+MKqEU9QCCWyTrdEPDog+RWA7xQWHi6Vbp61j5c4cdzzX1NidnwtUWg==} + engines: {node: '>=12'} + + clean-yaml-object@0.1.0: + resolution: {integrity: sha512-3yONmlN9CSAkzNwnRCiJQ7Q2xK5mWuEfL3PuTZcAUzhObbXsfsnMptJzXwz93nc5zn9V9TwCVMmV7w4xsm43dw==} + engines: {node: '>=0.10.0'} + + cli-color@2.0.4: + resolution: {integrity: sha512-zlnpg0jNcibNrO7GG9IeHH7maWFeCz+Ja1wx/7tZNU5ASSSSZ+/qZciM0/LHCYxSdqv5h2sdbQ/PXYdOuetXvA==} + engines: {node: '>=0.10'} + + cli-table3@0.6.5: + resolution: {integrity: sha512-+W/5efTR7y5HRD7gACw9yQjqMVvEMLBHmboM/kPWam+H+Hmyrgjh6YncVKK122YZkXrLudzTuAukUw9FnMf7IQ==} + engines: {node: 10.* || >= 12.*} + + cli-truncate@3.1.0: + resolution: {integrity: sha512-wfOBkjXteqSnI59oPcJkcPl/ZmwvMMOj340qUIY1SKZCv0B9Cf4D4fAucRkIKQmsIuYK3x1rrgU7MeGRruiuiA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + cliui@8.0.1: + resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} + engines: {node: '>=12'} + + code-excerpt@4.0.0: + resolution: {integrity: sha512-xxodCmBen3iy2i0WtAK8FlFNrRzjUqjRsMfho58xT/wvZU1YTM3fCnRjcy1gJPMepaRlgm/0e6w8SpWHpn3/cA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + color-convert@2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} + + color-name@1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + + colors@1.4.0: + resolution: {integrity: sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA==} + engines: {node: '>=0.1.90'} + + combined-stream@1.0.8: + resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} + engines: {node: '>= 0.8'} + + commander@10.0.1: + resolution: {integrity: sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==} + engines: {node: '>=14'} + + commander@12.1.0: + resolution: {integrity: sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==} + engines: {node: '>=18'} + + commander@4.1.1: + resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==} + engines: {node: '>= 6'} + + commander@9.5.0: + resolution: {integrity: sha512-KRs7WVDKg86PWiuAqhDrAQnTXZKraVcCc6vFdL14qrZ/DcWwuRo7VoiYXalXO7S5GKpqYiVEwCbgFDfxNHKJBQ==} + engines: {node: ^12.20.0 || >=14} + + common-path-prefix@3.0.0: + resolution: {integrity: sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w==} + + concat-map@0.0.1: + resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} + + concordance@5.0.4: + resolution: {integrity: sha512-OAcsnTEYu1ARJqWVGwf4zh4JDfHZEaSNlNccFmt8YjB2l/n19/PF2viLINHc57vO4FKIAFl2FWASIGZZWZ2Kxw==} + engines: {node: '>=10.18.0 <11 || >=12.14.0 <13 || >=14'} + + confbox@0.1.7: + resolution: {integrity: sha512-uJcB/FKZtBMCJpK8MQji6bJHgu1tixKPxRLeGkNzBoOZzpnZUJm0jm2/sBDWcuBx1dYgxV4JU+g5hmNxCyAmdA==} + + convert-to-spaces@2.0.1: + resolution: {integrity: sha512-rcQ1bsQO9799wq24uE5AM2tAILy4gXGIK/njFWcVQkGNZ96edlpY+A7bjwvzjYvLDyzmG1MmMLZhpcsb+klNMQ==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + cookie@0.5.0: + resolution: {integrity: sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==} + engines: {node: '>= 0.6'} + + copy-anything@3.0.5: + resolution: {integrity: sha512-yCEafptTtb4bk7GLEQoM8KVJpxAfdBJYaXyzQEgQQQgYrZiDp8SJmGKlYza6CYjEDNstAdNdKA3UuoULlEbS6w==} + engines: {node: '>=12.13'} + + cpu-features@0.0.10: + resolution: {integrity: sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==} + engines: {node: '>=10.0.0'} + + cross-spawn@7.0.3: + resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} + engines: {node: '>= 8'} + + currently-unhandled@0.4.1: + resolution: {integrity: sha512-/fITjgjGU50vjQ4FH6eUoYu+iUoUKIXws2hL15JJpIR+BbTxaXQsMuuyjtNh2WqsSBS5nsaZHFsFecyw5CCAng==} + engines: {node: '>=0.10.0'} + + d@1.0.2: + resolution: {integrity: sha512-MOqHvMWF9/9MX6nza0KgvFH4HpMU0EF5uUDXqX/BtxtU8NfB0QzRtJ8Oe/6SuS4kbhyzVJwjd97EA4PKrzJ8bw==} + engines: {node: '>=0.12'} + + data-uri-to-buffer@2.0.2: + resolution: {integrity: sha512-ND9qDTLc6diwj+Xe5cdAgVTbLVdXbtxTJRXRhli8Mowuaan+0EJOtdqJ0QCHNSSPyoXGx9HX2/VMnKeC34AChA==} + + data-uri-to-buffer@4.0.1: + resolution: {integrity: sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==} + engines: {node: '>= 12'} + + date-time@3.1.0: + resolution: {integrity: sha512-uqCUKXE5q1PNBXjPqvwhwJf9SwMoAHBgWJ6DcrnS5o+W2JOiIILl0JEdVD8SGujrNS02GGxgwAg2PN2zONgtjg==} + engines: {node: '>=6'} + + debug@4.3.4: + resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + + decompress-response@6.0.0: + resolution: {integrity: sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==} + engines: {node: '>=10'} + + deep-eql@4.1.3: + resolution: {integrity: sha512-WaEtAOpRA1MQ0eohqZjpGD8zdI0Ovsm8mmFhaDN8dvDZzyoUMcYDnf5Y6iu7HTXxf8JDS23qWa4a+hKCDyOPzw==} + engines: {node: '>=6'} + + deep-extend@0.6.0: + resolution: {integrity: sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==} + engines: {node: '>=4.0.0'} + + deep-is@0.1.4: + resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==} + + delayed-stream@1.0.0: + resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} + engines: {node: '>=0.4.0'} + + denque@2.1.0: + resolution: {integrity: sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==} + engines: {node: '>=0.10'} + + detect-libc@2.0.2: + resolution: {integrity: sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw==} + engines: {node: '>=8'} + + detect-libc@2.0.3: + resolution: {integrity: sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==} + engines: {node: '>=8'} + + diff-sequences@29.6.3: + resolution: {integrity: sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + difflib@0.2.4: + resolution: {integrity: sha512-9YVwmMb0wQHQNr5J9m6BSj6fk4pfGITGQOOs+D9Fl+INODWFOfvhIU1hNv6GgR1RBoC/9NJcwu77zShxV0kT7w==} + + dir-glob@3.0.1: + resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} + engines: {node: '>=8'} + + docker-modem@3.0.8: + resolution: {integrity: sha512-f0ReSURdM3pcKPNS30mxOHSbaFLcknGmQjwSfmbcdOw1XWKXVhukM3NJHhr7NpY9BIyyWQb0EBo3KQvvuU5egQ==} + engines: {node: '>= 8.0'} + + dockerode@3.3.5: + resolution: {integrity: sha512-/0YNa3ZDNeLr/tSckmD69+Gq+qVNhvKfAHNeZJBnp7EOP6RGKV8ORrJHkUn20So5wU+xxT7+1n5u8PjHbfjbSA==} + engines: {node: '>= 8.0'} + + doctrine@3.0.0: + resolution: {integrity: sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==} + engines: {node: '>=6.0.0'} + + dotenv@16.4.5: + resolution: {integrity: sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==} + engines: {node: '>=12'} + + dreamopt@0.8.0: + resolution: {integrity: sha512-vyJTp8+mC+G+5dfgsY+r3ckxlz+QMX40VjPQsZc5gxVAxLmi64TBoVkP54A/pRAXMXsbu2GMMBrZPxNv23waMg==} + engines: {node: '>=0.4.0'} + + drizzle-kit@0.21.2: + resolution: {integrity: sha512-U87IhZyCt/9d0ZT/Na3KFJVY31tSxtTx/n9UMcWFpW/5c2Ede39xiCG5efNV/0iimsv97UIRtDI0ldLBW5lbcg==} + hasBin: true + + drizzle-orm@0.32.0-85c8008: + resolution: {integrity: sha512-gHLqGZz0eqAvSw4vq46sHRV8qLHxrbuCVlwaVZ1t4ntyH8csyCKEXTWO78cBJwYUpz7BCSzqVX+5ZYa/QM+/Gw==} + peerDependencies: + '@aws-sdk/client-rds-data': '>=3' + '@cloudflare/workers-types': '>=3' + '@electric-sql/pglite': '>=0.1.1' + '@libsql/client': '*' + '@neondatabase/serverless': '>=0.1' + '@op-engineering/op-sqlite': '>=2' + '@opentelemetry/api': ^1.4.1 + '@planetscale/database': '>=1' + '@tidbcloud/serverless': '*' + '@types/better-sqlite3': '*' + '@types/pg': '*' + '@types/react': '>=18' + '@types/sql.js': '*' + '@vercel/postgres': '>=0.8.0' + '@xata.io/client': '*' + better-sqlite3: '>=7' + bun-types: '*' + expo-sqlite: '>=13.2.0' + knex: '*' + kysely: '*' + mysql2: '>=2' + pg: '>=8' + postgres: '>=3' + react: '>=18' + sql.js: '>=1' + sqlite3: '>=5' + peerDependenciesMeta: + '@aws-sdk/client-rds-data': + optional: true + '@cloudflare/workers-types': + optional: true + '@electric-sql/pglite': + optional: true + '@libsql/client': + optional: true + '@neondatabase/serverless': + optional: true + '@op-engineering/op-sqlite': + optional: true + '@opentelemetry/api': + optional: true + '@planetscale/database': + optional: true + '@tidbcloud/serverless': + optional: true + '@types/better-sqlite3': + optional: true + '@types/pg': + optional: true + '@types/react': + optional: true + '@types/sql.js': + optional: true + '@vercel/postgres': + optional: true + '@xata.io/client': + optional: true + better-sqlite3: + optional: true + bun-types: + optional: true + expo-sqlite: + optional: true + knex: + optional: true + kysely: + optional: true + mysql2: + optional: true + pg: + optional: true + postgres: + optional: true + react: + optional: true + sql.js: + optional: true + sqlite3: + optional: true + + duplexer@0.1.2: + resolution: {integrity: sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==} + + eastasianwidth@0.2.0: + resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} + + emittery@1.0.3: + resolution: {integrity: sha512-tJdCJitoy2lrC2ldJcqN4vkqJ00lT+tOWNT1hBJjO/3FDMJa5TTIiYGCKGkn/WfCyOzUMObeohbVTj00fhiLiA==} + engines: {node: '>=14.16'} + + emoji-regex@8.0.0: + resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + + emoji-regex@9.2.2: + resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} + + emojilib@2.4.0: + resolution: {integrity: sha512-5U0rVMU5Y2n2+ykNLQqMoqklN9ICBT/KsvC1Gz6vqHbz2AXXGkG+Pm5rMWk/8Vjrr/mY9985Hi8DYzn1F09Nyw==} + + end-of-stream@1.4.4: + resolution: {integrity: sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==} + + env-paths@3.0.0: + resolution: {integrity: sha512-dtJUTepzMW3Lm/NPxRf3wP4642UWhjL2sQxc+ym2YMj1m/H2zDNQOlezafzkHwn6sMstjHTwG6iQQsctDW/b1A==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + es5-ext@0.10.64: + resolution: {integrity: sha512-p2snDhiLaXe6dahss1LddxqEm+SkuDvV8dnIQG0MWjyHpcMNfXKPE+/Cc0y+PhxJX3A4xGNeFCj5oc0BUh6deg==} + engines: {node: '>=0.10'} + + es6-iterator@2.0.3: + resolution: {integrity: sha512-zw4SRzoUkd+cl+ZoE15A9o1oQd920Bb0iOJMQkQhl3jNc03YqVjAhG7scf9C5KWRU/R13Orf588uCC6525o02g==} + + es6-symbol@3.1.4: + resolution: {integrity: sha512-U9bFFjX8tFiATgtkJ1zg25+KviIXpgRvRHS8sau3GfhVzThRQrOeksPeT0BWW2MNZs1OEWJ1DPXOQMn0KKRkvg==} + engines: {node: '>=0.12'} + + es6-weak-map@2.0.3: + resolution: {integrity: sha512-p5um32HOTO1kP+w7PRnB+5lQ43Z6muuMuIMffvDN8ZB4GcnjLBV6zGStpbASIMk4DCAvEaamhe2zhyCb/QXXsA==} + + esbuild-android-64@0.14.54: + resolution: {integrity: sha512-Tz2++Aqqz0rJ7kYBfz+iqyE3QMycD4vk7LBRyWaAVFgFtQ/O8EJOnVmTOiDWYZ/uYzB4kvP+bqejYdVKzE5lAQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [android] + + esbuild-android-arm64@0.14.54: + resolution: {integrity: sha512-F9E+/QDi9sSkLaClO8SOV6etqPd+5DgJje1F9lOWoNncDdOBL2YF59IhsWATSt0TLZbYCf3pNlTHvVV5VfHdvg==} + engines: {node: '>=12'} + cpu: [arm64] + os: [android] + + esbuild-darwin-64@0.14.54: + resolution: {integrity: sha512-jtdKWV3nBviOd5v4hOpkVmpxsBy90CGzebpbO9beiqUYVMBtSc0AL9zGftFuBon7PNDcdvNCEuQqw2x0wP9yug==} + engines: {node: '>=12'} + cpu: [x64] + os: [darwin] + + esbuild-darwin-arm64@0.14.54: + resolution: {integrity: sha512-OPafJHD2oUPyvJMrsCvDGkRrVCar5aVyHfWGQzY1dWnzErjrDuSETxwA2HSsyg2jORLY8yBfzc1MIpUkXlctmw==} + engines: {node: '>=12'} + cpu: [arm64] + os: [darwin] + + esbuild-freebsd-64@0.14.54: + resolution: {integrity: sha512-OKwd4gmwHqOTp4mOGZKe/XUlbDJ4Q9TjX0hMPIDBUWWu/kwhBAudJdBoxnjNf9ocIB6GN6CPowYpR/hRCbSYAg==} + engines: {node: '>=12'} + cpu: [x64] + os: [freebsd] + + esbuild-freebsd-arm64@0.14.54: + resolution: {integrity: sha512-sFwueGr7OvIFiQT6WeG0jRLjkjdqWWSrfbVwZp8iMP+8UHEHRBvlaxL6IuKNDwAozNUmbb8nIMXa7oAOARGs1Q==} + engines: {node: '>=12'} + cpu: [arm64] + os: [freebsd] + + esbuild-linux-32@0.14.54: + resolution: {integrity: sha512-1ZuY+JDI//WmklKlBgJnglpUL1owm2OX+8E1syCD6UAxcMM/XoWd76OHSjl/0MR0LisSAXDqgjT3uJqT67O3qw==} + engines: {node: '>=12'} + cpu: [ia32] + os: [linux] + + esbuild-linux-64@0.14.54: + resolution: {integrity: sha512-EgjAgH5HwTbtNsTqQOXWApBaPVdDn7XcK+/PtJwZLT1UmpLoznPd8c5CxqsH2dQK3j05YsB3L17T8vE7cp4cCg==} + engines: {node: '>=12'} + cpu: [x64] + os: [linux] + + esbuild-linux-arm64@0.14.54: + resolution: {integrity: sha512-WL71L+0Rwv+Gv/HTmxTEmpv0UgmxYa5ftZILVi2QmZBgX3q7+tDeOQNqGtdXSdsL8TQi1vIaVFHUPDe0O0kdig==} + engines: {node: '>=12'} + cpu: [arm64] + os: [linux] + + esbuild-linux-arm@0.14.54: + resolution: {integrity: sha512-qqz/SjemQhVMTnvcLGoLOdFpCYbz4v4fUo+TfsWG+1aOu70/80RV6bgNpR2JCrppV2moUQkww+6bWxXRL9YMGw==} + engines: {node: '>=12'} + cpu: [arm] + os: [linux] + + esbuild-linux-mips64le@0.14.54: + resolution: {integrity: sha512-qTHGQB8D1etd0u1+sB6p0ikLKRVuCWhYQhAHRPkO+OF3I/iSlTKNNS0Lh2Oc0g0UFGguaFZZiPJdJey3AGpAlw==} + engines: {node: '>=12'} + cpu: [mips64el] + os: [linux] + + esbuild-linux-ppc64le@0.14.54: + resolution: {integrity: sha512-j3OMlzHiqwZBDPRCDFKcx595XVfOfOnv68Ax3U4UKZ3MTYQB5Yz3X1mn5GnodEVYzhtZgxEBidLWeIs8FDSfrQ==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [linux] + + esbuild-linux-riscv64@0.14.54: + resolution: {integrity: sha512-y7Vt7Wl9dkOGZjxQZnDAqqn+XOqFD7IMWiewY5SPlNlzMX39ocPQlOaoxvT4FllA5viyV26/QzHtvTjVNOxHZg==} + engines: {node: '>=12'} + cpu: [riscv64] + os: [linux] + + esbuild-linux-s390x@0.14.54: + resolution: {integrity: sha512-zaHpW9dziAsi7lRcyV4r8dhfG1qBidQWUXweUjnw+lliChJqQr+6XD71K41oEIC3Mx1KStovEmlzm+MkGZHnHA==} + engines: {node: '>=12'} + cpu: [s390x] + os: [linux] + + esbuild-netbsd-64@0.14.54: + resolution: {integrity: sha512-PR01lmIMnfJTgeU9VJTDY9ZerDWVFIUzAtJuDHwwceppW7cQWjBBqP48NdeRtoP04/AtO9a7w3viI+PIDr6d+w==} + engines: {node: '>=12'} + cpu: [x64] + os: [netbsd] + + esbuild-node-externals@1.13.1: + resolution: {integrity: sha512-ho4Lokc6iMB1lWbb2tWJ6otien+3Kfoaxe0fy7NUNgVuLnfmlW+GRINftTVUGtTVY/dapuwUu/CvFylYNwzkMA==} + engines: {node: '>=12'} + peerDependencies: + esbuild: 0.12 - 0.21 + + esbuild-openbsd-64@0.14.54: + resolution: {integrity: sha512-Qyk7ikT2o7Wu76UsvvDS5q0amJvmRzDyVlL0qf5VLsLchjCa1+IAvd8kTBgUxD7VBUUVgItLkk609ZHUc1oCaw==} + engines: {node: '>=12'} + cpu: [x64] + os: [openbsd] + + esbuild-register@3.5.0: + resolution: {integrity: sha512-+4G/XmakeBAsvJuDugJvtyF1x+XJT4FMocynNpxrvEBViirpfUn2PgNpCHedfWhF4WokNsO/OvMKrmJOIJsI5A==} + peerDependencies: + esbuild: '>=0.12 <1' + + esbuild-sunos-64@0.14.54: + resolution: {integrity: sha512-28GZ24KmMSeKi5ueWzMcco6EBHStL3B6ubM7M51RmPwXQGLe0teBGJocmWhgwccA1GeFXqxzILIxXpHbl9Q/Kw==} + engines: {node: '>=12'} + cpu: [x64] + os: [sunos] + + esbuild-windows-32@0.14.54: + resolution: {integrity: sha512-T+rdZW19ql9MjS7pixmZYVObd9G7kcaZo+sETqNH4RCkuuYSuv9AGHUVnPoP9hhuE1WM1ZimHz1CIBHBboLU7w==} + engines: {node: '>=12'} + cpu: [ia32] + os: [win32] + + esbuild-windows-64@0.14.54: + resolution: {integrity: sha512-AoHTRBUuYwXtZhjXZbA1pGfTo8cJo3vZIcWGLiUcTNgHpJJMC1rVA44ZereBHMJtotyN71S8Qw0npiCIkW96cQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [win32] + + esbuild-windows-arm64@0.14.54: + resolution: {integrity: sha512-M0kuUvXhot1zOISQGXwWn6YtS+Y/1RT9WrVIOywZnJHo3jCDyewAc79aKNQWFCQm+xNHVTq9h8dZKvygoXQQRg==} + engines: {node: '>=12'} + cpu: [arm64] + os: [win32] + + esbuild@0.14.54: + resolution: {integrity: sha512-Cy9llcy8DvET5uznocPyqL3BFRrFXSVqbgpMJ9Wz8oVjZlh/zUSNbPRbov0VX7VxN2JH1Oa0uNxZ7eLRb62pJA==} + engines: {node: '>=12'} + hasBin: true + + esbuild@0.17.19: + resolution: {integrity: sha512-XQ0jAPFkK/u3LcVRcvVHQcTIqD6E2H1fvZMA5dQPSOWb3suUbWbfbRf94pjc0bNzRYLfIrDRQXr7X+LHIm5oHw==} + engines: {node: '>=12'} + hasBin: true + + esbuild@0.18.20: + resolution: {integrity: sha512-ceqxoedUrcayh7Y7ZX6NdbbDzGROiyVBgC4PriJThBKSVPWnnFHZAkfI1lJT8QFkOwH4qOS2SJkS4wvpGl8BpA==} + engines: {node: '>=12'} + hasBin: true + + esbuild@0.19.12: + resolution: {integrity: sha512-aARqgq8roFBj054KvQr5f1sFu0D65G+miZRCuJyJ0G13Zwx7vRar5Zhn2tkQNzIXcBrNVsv/8stehpj+GAjgbg==} + engines: {node: '>=12'} + hasBin: true + + esbuild@0.20.2: + resolution: {integrity: sha512-WdOOppmUNU+IbZ0PaDiTst80zjnrOkyJNHoKupIcVyU8Lvla3Ugx94VzkQ32Ijqd7UhHJy75gNWDMUekcrSJ6g==} + engines: {node: '>=12'} + hasBin: true + + escalade@3.1.2: + resolution: {integrity: sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==} + engines: {node: '>=6'} + + escape-string-regexp@2.0.0: + resolution: {integrity: sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==} + engines: {node: '>=8'} + + escape-string-regexp@4.0.0: + resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==} + engines: {node: '>=10'} + + escape-string-regexp@5.0.0: + resolution: {integrity: sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==} + engines: {node: '>=12'} + + eslint-config-prettier@9.1.0: + resolution: {integrity: sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw==} + hasBin: true + peerDependencies: + eslint: '>=7.0.0' + + eslint-plugin-prettier@5.1.3: + resolution: {integrity: sha512-C9GCVAs4Eq7ZC/XFQHITLiHJxQngdtraXaM+LoUFoFp/lHNl2Zn8f3WQbe9HvTBBQ9YnKFB0/2Ajdqwo5D1EAw==} + engines: {node: ^14.18.0 || >=16.0.0} + peerDependencies: + '@types/eslint': '>=8.0.0' + eslint: '>=8.0.0' + eslint-config-prettier: '*' + prettier: '>=3.0.0' + peerDependenciesMeta: + '@types/eslint': + optional: true + eslint-config-prettier: + optional: true + + eslint-scope@7.2.2: + resolution: {integrity: sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + + eslint-visitor-keys@3.4.3: + resolution: {integrity: sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + + eslint@8.57.0: + resolution: {integrity: sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + hasBin: true + + esniff@2.0.1: + resolution: {integrity: sha512-kTUIGKQ/mDPFoJ0oVfcmyJn4iBDRptjNVIzwIFR7tqWXdVI9xfA2RMwY/gbSpJG3lkdWNEjLap/NqVHZiJsdfg==} + engines: {node: '>=0.10'} + + espree@9.6.1: + resolution: {integrity: sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + + esprima@4.0.1: + resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==} + engines: {node: '>=4'} + hasBin: true + + esquery@1.5.0: + resolution: {integrity: sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==} + engines: {node: '>=0.10'} + + esrecurse@4.3.0: + resolution: {integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==} + engines: {node: '>=4.0'} + + estraverse@5.3.0: + resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==} + engines: {node: '>=4.0'} + + estree-walker@0.6.1: + resolution: {integrity: sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w==} + + estree-walker@3.0.3: + resolution: {integrity: sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==} + + esutils@2.0.3: + resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} + engines: {node: '>=0.10.0'} + + event-emitter@0.3.5: + resolution: {integrity: sha512-D9rRn9y7kLPnJ+hMq7S/nhvoKwwvVJahBi2BPmx3bvbsEdK3W9ii8cBSGjP+72/LnM4n6fo3+dkCX5FeTQruXA==} + + event-stream@3.3.4: + resolution: {integrity: sha512-QHpkERcGsR0T7Qm3HNJSyXKEEj8AHNxkY3PK8TS2KJvQ7NiSHe3DDpwVKKtoYprL/AreyzFBeIkBIWChAqn60g==} + + execa@5.1.1: + resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} + engines: {node: '>=10'} + + execa@8.0.1: + resolution: {integrity: sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==} + engines: {node: '>=16.17'} + + exit-hook@2.2.1: + resolution: {integrity: sha512-eNTPlAD67BmP31LDINZ3U7HSF8l57TxOY2PmBJ1shpCvpnxBF93mWCE8YHBnXs8qiUZJc9WDcWIeC3a2HIAMfw==} + engines: {node: '>=6'} + + expand-template@2.0.3: + resolution: {integrity: sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==} + engines: {node: '>=6'} + + ext@1.7.0: + resolution: {integrity: sha512-6hxeJYaL110a9b5TEJSj0gojyHQAmA2ch5Os+ySCiA1QGdS697XWY1pzsrSjqA9LDEEgdB/KypIlR59RcLuHYw==} + + fast-deep-equal@3.1.3: + resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} + + fast-diff@1.3.0: + resolution: {integrity: sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw==} + + fast-glob@3.3.2: + resolution: {integrity: sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==} + engines: {node: '>=8.6.0'} + + fast-json-stable-stringify@2.1.0: + resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==} + + fast-levenshtein@2.0.6: + resolution: {integrity: sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==} + + fast-xml-parser@4.2.5: + resolution: {integrity: sha512-B9/wizE4WngqQftFPmdaMYlXoJlJOYxGQOanC77fq9k8+Z0v5dDSVh+3glErdIROP//s/jgb7ZuxKfB8nVyo0g==} + hasBin: true + + fastq@1.17.1: + resolution: {integrity: sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==} + + fetch-blob@3.2.0: + resolution: {integrity: sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==} + engines: {node: ^12.20 || >= 14.13} + + fflate@0.8.2: + resolution: {integrity: sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==} + + figures@5.0.0: + resolution: {integrity: sha512-ej8ksPF4x6e5wvK9yevct0UCXh8TTFlWGVLlgjZuoBH1HwjIfKE/IdL5mq89sFA7zELi1VhKpmtDnrs7zWyeyg==} + engines: {node: '>=14'} + + file-entry-cache@6.0.1: + resolution: {integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==} + engines: {node: ^10.12.0 || >=12.0.0} + + file-uri-to-path@1.0.0: + resolution: {integrity: sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==} + + fill-range@7.1.1: + resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} + engines: {node: '>=8'} + + find-up@5.0.0: + resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==} + engines: {node: '>=10'} + + find-up@6.3.0: + resolution: {integrity: sha512-v2ZsoEuVHYy8ZIlYqwPe/39Cy+cFDzp4dXPaxNvkEuouymu+2Jbz0PxpKarJHYJTmv2HWT3O382qY8l4jMWthw==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + flat-cache@3.2.0: + resolution: {integrity: sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==} + engines: {node: ^10.12.0 || >=12.0.0} + + flatted@3.3.1: + resolution: {integrity: sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==} + + foreground-child@3.1.1: + resolution: {integrity: sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==} + engines: {node: '>=14'} + + form-data@4.0.0: + resolution: {integrity: sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==} + engines: {node: '>= 6'} + + formdata-polyfill@4.0.10: + resolution: {integrity: sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==} + engines: {node: '>=12.20.0'} + + from@0.1.7: + resolution: {integrity: sha512-twe20eF1OxVxp/ML/kq2p1uc6KvFK/+vs8WjEbeKmV2He22MKm7YF2ANIt+EOqhJ5L3K/SuuPhk0hWQDjOM23g==} + + fs-constants@1.0.0: + resolution: {integrity: sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==} + + fs-extra@11.2.0: + resolution: {integrity: sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==} + engines: {node: '>=14.14'} + + fs.realpath@1.0.0: + resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} + + fsevents@2.3.3: + resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + + function-bind@1.1.2: + resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} + + fx@34.0.0: + resolution: {integrity: sha512-/fZih3/WLsrtlaj2mahjWxAmyuikmcl3D5kKPqLtFmEilLsy9wp0+/vEmfvYXXhwJc+ajtCFDCf+yttXmPMHSQ==} + hasBin: true + + generate-function@2.3.1: + resolution: {integrity: sha512-eeB5GfMNeevm/GRYq20ShmsaGcmI81kIX2K9XQx5miC8KdHaC6Jm0qQ8ZNeGOi7wYB8OsdxKs+Y2oVuTFuVwKQ==} + + get-caller-file@2.0.5: + resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} + engines: {node: 6.* || 8.* || >= 10.*} + + get-func-name@2.0.2: + resolution: {integrity: sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ==} + + get-port@6.1.2: + resolution: {integrity: sha512-BrGGraKm2uPqurfGVj/z97/zv8dPleC6x9JBNRTrDNtCkkRF4rPwrQXFgL7+I+q8QSdU4ntLQX2D7KIxSy8nGw==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + get-source@2.0.12: + resolution: {integrity: sha512-X5+4+iD+HoSeEED+uwrQ07BOQr0kEDFMVqqpBuI+RaZBpBpHCuXxo70bjar6f0b0u/DQJsJ7ssurpP0V60Az+w==} + + get-stream@6.0.1: + resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} + engines: {node: '>=10'} + + get-stream@8.0.1: + resolution: {integrity: sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==} + engines: {node: '>=16'} + + get-tsconfig@4.7.5: + resolution: {integrity: sha512-ZCuZCnlqNzjb4QprAzXKdpp/gh6KTxSJuw3IBsPnV/7fV4NxC9ckB+vPTt8w7fJA0TaSD7c55BR47JD6MEDyDw==} + + github-from-package@0.0.0: + resolution: {integrity: sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==} + + glob-parent@5.1.2: + resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} + engines: {node: '>= 6'} + + glob-parent@6.0.2: + resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==} + engines: {node: '>=10.13.0'} + + glob-to-regexp@0.4.1: + resolution: {integrity: sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==} + + glob@10.3.15: + resolution: {integrity: sha512-0c6RlJt1TICLyvJYIApxb8GsXoai0KUP7AxKKAtsYXdgJR1mGEUa7DgwShbdk1nly0PYoZj01xd4hzbq3fsjpw==} + engines: {node: '>=16 || 14 >=14.18'} + hasBin: true + + glob@7.2.3: + resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} + + glob@8.1.0: + resolution: {integrity: sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==} + engines: {node: '>=12'} + + globals@13.24.0: + resolution: {integrity: sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==} + engines: {node: '>=8'} + + globby@11.1.0: + resolution: {integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==} + engines: {node: '>=10'} + + globby@13.2.2: + resolution: {integrity: sha512-Y1zNGV+pzQdh7H39l9zgB4PJqjRNqydvdYCDG4HFXM4XuvSaQQlEc91IU1yALL8gUTDomgBAfz3XJdmUS+oo0w==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + globrex@0.1.2: + resolution: {integrity: sha512-uHJgbwAMwNFf5mLst7IWLNg14x1CkeqglJb/K3doi4dw6q2IvAAmM/Y81kevy83wP+Sst+nutFTYOGg3d1lsxg==} + + graceful-fs@4.2.11: + resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} + + graphemer@1.4.0: + resolution: {integrity: sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==} + + hanji@0.0.5: + resolution: {integrity: sha512-Abxw1Lq+TnYiL4BueXqMau222fPSPMFtya8HdpWsz/xVAhifXou71mPh/kY2+08RgFcVccjG3uZHs6K5HAe3zw==} + + has-flag@4.0.0: + resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} + engines: {node: '>=8'} + + hasown@2.0.2: + resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} + engines: {node: '>= 0.4'} + + heap@0.2.7: + resolution: {integrity: sha512-2bsegYkkHO+h/9MGbn6KWcE45cHZgPANo5LXF7EvWdT0yT2EguSVO1nDgU5c8+ZOPwp2vMNa7YFsJhVcDR9Sdg==} + + hono@4.3.9: + resolution: {integrity: sha512-6c5LVE23HnIS8iBhY+XPmYJlPeeClznOi7mBNsAsJCgxo8Ciz75LTjqRUf5wv4RYq8kL+1KPLUZHCtKmbZssNg==} + engines: {node: '>=16.0.0'} + + human-signals@2.1.0: + resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} + engines: {node: '>=10.17.0'} + + human-signals@5.0.0: + resolution: {integrity: sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==} + engines: {node: '>=16.17.0'} + + iconv-lite@0.6.3: + resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==} + engines: {node: '>=0.10.0'} + + ieee754@1.2.1: + resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} + + ignore-by-default@2.1.0: + resolution: {integrity: sha512-yiWd4GVmJp0Q6ghmM2B/V3oZGRmjrKLXvHR3TE1nfoXsmoggllfZUQe74EN0fJdPFZu2NIvNdrMMLm3OsV7Ohw==} + engines: {node: '>=10 <11 || >=12 <13 || >=14'} + + ignore@5.3.1: + resolution: {integrity: sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==} + engines: {node: '>= 4'} + + import-fresh@3.3.0: + resolution: {integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==} + engines: {node: '>=6'} + + imurmurhash@0.1.4: + resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==} + engines: {node: '>=0.8.19'} + + indent-string@5.0.0: + resolution: {integrity: sha512-m6FAo/spmsW2Ab2fU35JTYwtOKa2yAwXSwgjSv1TJzh4Mh7mC3lzAOVLBprb72XsTrgkEIsl7YrFNAiDiRhIGg==} + engines: {node: '>=12'} + + inflight@1.0.6: + resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} + + inherits@2.0.4: + resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + + ini@1.3.8: + resolution: {integrity: sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==} + + irregular-plurals@3.5.0: + resolution: {integrity: sha512-1ANGLZ+Nkv1ptFb2pa8oG8Lem4krflKuX/gINiHJHjJUKaJHk/SXk5x6K3J+39/p0h1RQ2saROclJJ+QLvETCQ==} + engines: {node: '>=8'} + + is-binary-path@2.1.0: + resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==} + engines: {node: '>=8'} + + is-core-module@2.13.1: + resolution: {integrity: sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==} + + is-error@2.2.2: + resolution: {integrity: sha512-IOQqts/aHWbiisY5DuPJQ0gcbvaLFCa7fBa9xoLfxBZvQ+ZI/Zh9xoI7Gk+G64N0FdK4AbibytHht2tWgpJWLg==} + + is-extglob@2.1.1: + resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} + engines: {node: '>=0.10.0'} + + is-fullwidth-code-point@3.0.0: + resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} + engines: {node: '>=8'} + + is-fullwidth-code-point@4.0.0: + resolution: {integrity: sha512-O4L094N2/dZ7xqVdrXhh9r1KODPJpFms8B5sGdJLPy664AgvXsreZUyCQQNItZRDlYug4xStLjNp/sz3HvBowQ==} + engines: {node: '>=12'} + + is-glob@4.0.3: + resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} + engines: {node: '>=0.10.0'} + + is-number@7.0.0: + resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} + engines: {node: '>=0.12.0'} + + is-path-inside@3.0.3: + resolution: {integrity: sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==} + engines: {node: '>=8'} + + is-plain-object@5.0.0: + resolution: {integrity: sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==} + engines: {node: '>=0.10.0'} + + is-promise@2.2.2: + resolution: {integrity: sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ==} + + is-promise@4.0.0: + resolution: {integrity: sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==} + + is-property@1.0.2: + resolution: {integrity: sha512-Ks/IoX00TtClbGQr4TWXemAnktAQvYB7HzcCxDGqEZU6oCmb2INHuOoKxbtR+HFkmYWBKv/dOZtGRiAjDhj92g==} + + is-stream@2.0.1: + resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} + engines: {node: '>=8'} + + is-stream@3.0.0: + resolution: {integrity: sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + is-unicode-supported@1.3.0: + resolution: {integrity: sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==} + engines: {node: '>=12'} + + is-what@4.1.16: + resolution: {integrity: sha512-ZhMwEosbFJkA0YhFnNDgTM4ZxDRsS6HqTo7qsZM08fehyRYIYa0yHu5R6mgo1n/8MgaPBXiPimPD77baVFYg+A==} + engines: {node: '>=12.13'} + + isexe@2.0.0: + resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + + jackspeak@2.3.6: + resolution: {integrity: sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ==} + engines: {node: '>=14'} + + joycon@3.1.1: + resolution: {integrity: sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==} + engines: {node: '>=10'} + + js-base64@3.7.7: + resolution: {integrity: sha512-7rCnleh0z2CkXhH67J8K1Ytz0b2Y+yxTPL+/KOJoa20hfnVQ/3/T6W/KflYI4bRHRagNeXeU2bkNGI3v1oS/lw==} + + js-string-escape@1.0.1: + resolution: {integrity: sha512-Smw4xcfIQ5LVjAOuJCvN/zIodzA/BBSsluuoSykP+lUvScIi4U6RJLfwHet5cxFnCswUjISV8oAXaqaJDY3chg==} + engines: {node: '>= 0.8'} + + js-tokens@9.0.0: + resolution: {integrity: sha512-WriZw1luRMlmV3LGJaR6QOJjWwgLUTf89OwT2lUOyjX2dJGBwgmIkbcz+7WFZjrZM635JOIR517++e/67CP9dQ==} + + js-yaml@3.14.1: + resolution: {integrity: sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==} + hasBin: true + + js-yaml@4.1.0: + resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==} + hasBin: true + + json-buffer@3.0.1: + resolution: {integrity: sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==} + + json-diff@0.9.0: + resolution: {integrity: sha512-cVnggDrVkAAA3OvFfHpFEhOnmcsUpleEKq4d4O8sQWWSH40MBrWstKigVB1kGrgLWzuom+7rRdaCsnBD6VyObQ==} + hasBin: true + + json-diff@1.0.6: + resolution: {integrity: sha512-tcFIPRdlc35YkYdGxcamJjllUhXWv4n2rK9oJ2RsAzV4FBkuV4ojKEDgcZ+kpKxDmJKv+PFK65+1tVVOnSeEqA==} + hasBin: true + + json-schema-traverse@0.4.1: + resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==} + + json-stable-stringify-without-jsonify@1.0.1: + resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==} + + jsonfile@6.1.0: + resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==} + + keyv@4.5.4: + resolution: {integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==} + + levn@0.4.1: + resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} + engines: {node: '>= 0.8.0'} + + libsql@0.2.0: + resolution: {integrity: sha512-ELBRqhpJx5Dap0187zKQnntZyk4EjlDHSrjIVL8t+fQ5e8IxbQTeYgZgigMjB1EvrETdkm0Y0VxBGhzPQ+t0Jg==} + cpu: [x64, arm64] + os: [darwin, linux, win32] + + lilconfig@3.1.1: + resolution: {integrity: sha512-O18pf7nyvHTckunPWCV1XUNXU1piu01y2b7ATJ0ppkUkk8ocqVWBrYjJBCwHDjD/ZWcfyrA0P4gKhzWGi5EINQ==} + engines: {node: '>=14'} + + lines-and-columns@1.2.4: + resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} + + load-json-file@7.0.1: + resolution: {integrity: sha512-Gnxj3ev3mB5TkVBGad0JM6dmLiQL+o0t23JPBZ9sd+yvSLk05mFoqKBw5N8gbbkU4TNXyqCgIrl/VM17OgUIgQ==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + load-tsconfig@0.2.5: + resolution: {integrity: sha512-IXO6OCs9yg8tMKzfPZ1YmheJbZCiEsnBdcB03l0OcfK9prKnJb96siuHCr5Fl37/yo9DnKU+TLpxzTUspw9shg==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + local-pkg@0.5.0: + resolution: {integrity: sha512-ok6z3qlYyCDS4ZEU27HaU6x/xZa9Whf8jD4ptH5UZTQYZVYeb9bnZ3ojVhiJNLiXK1Hfc0GNbLXcmZ5plLDDBg==} + engines: {node: '>=14'} + + locate-path@6.0.0: + resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} + engines: {node: '>=10'} + + locate-path@7.2.0: + resolution: {integrity: sha512-gvVijfZvn7R+2qyPX8mAuKcFGDf6Nc61GdvGafQsHL0sBIxfKzA+usWn4GFC/bk+QdwPUD4kWFJLhElipq+0VA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + lodash.merge@4.6.2: + resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} + + lodash.sortby@4.7.0: + resolution: {integrity: sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==} + + lodash.throttle@4.1.1: + resolution: {integrity: sha512-wIkUCfVKpVsWo3JSZlc+8MB5it+2AN5W8J7YVMST30UrvcQNZ1Okbj+rbVniijTWE6FGYy4XJq/rHkas8qJMLQ==} + + lodash@4.17.21: + resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==} + + long@4.0.0: + resolution: {integrity: sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==} + + loupe@2.3.7: + resolution: {integrity: sha512-zSMINGVYkdpYSOBmLi0D1Uo7JU9nVdQKrHxC8eYlV+9YKK9WePqAlL7lSlorG/U2Fw1w0hTBmaa/jrQ3UbPHtA==} + + lru-cache@10.2.2: + resolution: {integrity: sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==} + engines: {node: 14 || >=16.14} + + lru-cache@6.0.0: + resolution: {integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==} + engines: {node: '>=10'} + + lru-cache@7.18.3: + resolution: {integrity: sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==} + engines: {node: '>=12'} + + lru-queue@0.1.0: + resolution: {integrity: sha512-BpdYkt9EvGl8OfWHDQPISVpcl5xZthb+XPsbELj5AQXxIC8IriDZIQYjBJPEm5rS420sjZ0TLEzRcq5KdBhYrQ==} + + magic-string@0.25.9: + resolution: {integrity: sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==} + + magic-string@0.30.10: + resolution: {integrity: sha512-iIRwTIf0QKV3UAnYK4PU8uiEc4SRh5jX0mwpIwETPpHdhVM4f53RSwS/vXvN1JhGX+Cs7B8qIq3d6AH49O5fAQ==} + + map-age-cleaner@0.1.3: + resolution: {integrity: sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w==} + engines: {node: '>=6'} + + map-stream@0.1.0: + resolution: {integrity: sha512-CkYQrPYZfWnu/DAmVCpTSX/xHpKZ80eKh2lAkyA6AJTef6bW+6JpbQZN5rofum7da+SyN1bi5ctTm+lTfcCW3g==} + + marked-terminal@6.2.0: + resolution: {integrity: sha512-ubWhwcBFHnXsjYNsu+Wndpg0zhY4CahSpPlA70PlO0rR9r2sZpkyU+rkCsOWH+KMEkx847UpALON+HWgxowFtw==} + engines: {node: '>=16.0.0'} + peerDependencies: + marked: '>=1 <12' + + marked@9.1.6: + resolution: {integrity: sha512-jcByLnIFkd5gSXZmjNvS1TlmRhCXZjIzHYlaGkPlLIekG55JDR2Z4va9tZwCiP+/RDERiNhMOFu01xd6O5ct1Q==} + engines: {node: '>= 16'} + hasBin: true + + matcher@5.0.0: + resolution: {integrity: sha512-s2EMBOWtXFc8dgqvoAzKJXxNHibcdJMV0gwqKUaw9E2JBJuGUK7DrNKrA6g/i+v72TT16+6sVm5mS3thaMLQUw==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + md5-hex@3.0.1: + resolution: {integrity: sha512-BUiRtTtV39LIJwinWBjqVsU9xhdnz7/i889V859IBFpuqGAj6LuOvHv5XLbgZ2R7ptJoJaEcxkv88/h25T7Ciw==} + engines: {node: '>=8'} + + mem@9.0.2: + resolution: {integrity: sha512-F2t4YIv9XQUBHt6AOJ0y7lSmP1+cY7Fm1DRh9GClTGzKST7UWLMx6ly9WZdLH/G/ppM5RL4MlQfRT71ri9t19A==} + engines: {node: '>=12.20'} + + memoizee@0.4.15: + resolution: {integrity: sha512-UBWmJpLZd5STPm7PMUlOw/TSy972M+z8gcyQ5veOnSDRREz/0bmpyTfKt3/51DhEBqCZQn1udM/5flcSPYhkdQ==} + + merge-stream@2.0.0: + resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} + + merge2@1.4.1: + resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} + engines: {node: '>= 8'} + + micromatch@4.0.5: + resolution: {integrity: sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==} + engines: {node: '>=8.6'} + + mime-db@1.52.0: + resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} + engines: {node: '>= 0.6'} + + mime-types@2.1.35: + resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} + engines: {node: '>= 0.6'} + + mime@3.0.0: + resolution: {integrity: sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==} + engines: {node: '>=10.0.0'} + hasBin: true + + mimic-fn@2.1.0: + resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} + engines: {node: '>=6'} + + mimic-fn@4.0.0: + resolution: {integrity: sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==} + engines: {node: '>=12'} + + mimic-response@3.1.0: + resolution: {integrity: sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==} + engines: {node: '>=10'} + + miniflare@3.20240512.0: + resolution: {integrity: sha512-X0PlKR0AROKpxFoJNmRtCMIuJxj+ngEcyTOlEokj2rAQ0TBwUhB4/1uiPvdI6ofW5NugPOD1uomAv+gLjwsLDQ==} + engines: {node: '>=16.13'} + hasBin: true + + minimatch@3.1.2: + resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} + + minimatch@5.1.6: + resolution: {integrity: sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==} + engines: {node: '>=10'} + + minimatch@7.4.6: + resolution: {integrity: sha512-sBz8G/YjVniEz6lKPNpKxXwazJe4c19fEfV2GDMX6AjFz+MX9uDWIZW8XreVhkFW3fkIdTv/gxWr/Kks5FFAVw==} + engines: {node: '>=10'} + + minimatch@9.0.4: + resolution: {integrity: sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==} + engines: {node: '>=16 || 14 >=14.17'} + + minimist@1.2.8: + resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} + + minipass@7.1.1: + resolution: {integrity: sha512-UZ7eQ+h8ywIRAW1hIEl2AqdwzJucU/Kp59+8kkZeSvafXhZjul247BvIJjEVFVeON6d7lM46XX1HXCduKAS8VA==} + engines: {node: '>=16 || 14 >=14.17'} + + mkdirp-classic@0.5.3: + resolution: {integrity: sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==} + + mlly@1.7.0: + resolution: {integrity: sha512-U9SDaXGEREBYQgfejV97coK0UL1r+qnF2SyO9A3qcI8MzKnsIFKHNVEkrDyNncQTKQQumsasmeq84eNMdBfsNQ==} + + ms@2.1.2: + resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} + + ms@2.1.3: + resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + + mustache@4.2.0: + resolution: {integrity: sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==} + hasBin: true + + mysql2@2.3.3: + resolution: {integrity: sha512-wxJUev6LgMSgACDkb/InIFxDprRa6T95+VEoR+xPvtngtccNH2dGjEB/fVZ8yg1gWv1510c9CvXuJHi5zUm0ZA==} + engines: {node: '>= 8.0'} + + mz@2.7.0: + resolution: {integrity: sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==} + + named-placeholders@1.1.3: + resolution: {integrity: sha512-eLoBxg6wE/rZkJPhU/xRX1WTpkFEwDJEN96oxFrTsqBdbT5ec295Q+CoHrL9IT0DipqKhmGcaZmwOt8OON5x1w==} + engines: {node: '>=12.0.0'} + + nan@2.19.0: + resolution: {integrity: sha512-nO1xXxfh/RWNxfd/XPfbIfFk5vgLsAxUR9y5O0cHMJu/AW9U95JLXqthYHjEp+8gQ5p96K9jUp8nbVOxCdRbtw==} + + nanoid@3.3.7: + resolution: {integrity: sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==} + engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} + hasBin: true + + napi-build-utils@1.0.2: + resolution: {integrity: sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg==} + + natural-compare@1.4.0: + resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} + + next-tick@1.1.0: + resolution: {integrity: sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ==} + + node-abi@3.62.0: + resolution: {integrity: sha512-CPMcGa+y33xuL1E0TcNIu4YyaZCxnnvkVaEXrsosR3FxN+fV8xvb7Mzpb7IgKler10qeMkE6+Dp8qJhpzdq35g==} + engines: {node: '>=10'} + + node-domexception@1.0.0: + resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} + engines: {node: '>=10.5.0'} + + node-emoji@2.1.3: + resolution: {integrity: sha512-E2WEOVsgs7O16zsURJ/eH8BqhF029wGpEOnv7Urwdo2wmQanOACwJQh0devF9D9RhoZru0+9JXIS0dBXIAz+lA==} + engines: {node: '>=18'} + + node-fetch@2.7.0: + resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==} + engines: {node: 4.x || >=6.0.0} + peerDependencies: + encoding: ^0.1.0 + peerDependenciesMeta: + encoding: + optional: true + + node-fetch@3.3.1: + resolution: {integrity: sha512-cRVc/kyto/7E5shrWca1Wsea4y6tL9iYJE5FBCius3JQfb/4P4I295PfhgbJQBLTx6lATE4z+wK0rPM4VS2uow==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + node-fetch@3.3.2: + resolution: {integrity: sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + node-forge@1.3.1: + resolution: {integrity: sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==} + engines: {node: '>= 6.13.0'} + + node-gyp-build@4.8.1: + resolution: {integrity: sha512-OSs33Z9yWr148JZcbZd5WiAXhh/n9z8TxQcdMhIOlpN9AhWpLfvVFO73+m77bBABQMaY9XSvIa+qk0jlI7Gcaw==} + hasBin: true + + nofilter@3.1.0: + resolution: {integrity: sha512-l2NNj07e9afPnhAhvgVrCD/oy2Ai1yfLpuo3EpiO1jFTsB4sFz6oIfAfSZyQzVpkZQ9xS8ZS5g1jCBgq4Hwo0g==} + engines: {node: '>=12.19'} + + normalize-path@3.0.0: + resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} + engines: {node: '>=0.10.0'} + + npm-run-path@4.0.1: + resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} + engines: {node: '>=8'} + + npm-run-path@5.3.0: + resolution: {integrity: sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + object-assign@4.1.1: + resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==} + engines: {node: '>=0.10.0'} + + obuf@1.1.2: + resolution: {integrity: sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==} + + once@1.4.0: + resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} + + onetime@5.1.2: + resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} + engines: {node: '>=6'} + + onetime@6.0.0: + resolution: {integrity: sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==} + engines: {node: '>=12'} + + optionator@0.9.4: + resolution: {integrity: sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==} + engines: {node: '>= 0.8.0'} + + p-defer@1.0.0: + resolution: {integrity: sha512-wB3wfAxZpk2AzOfUMJNL+d36xothRSyj8EXOa4f6GMqYDN9BJaaSISbsk+wS9abmnebVw95C2Kb5t85UmpCxuw==} + engines: {node: '>=4'} + + p-event@5.0.1: + resolution: {integrity: sha512-dd589iCQ7m1L0bmC5NLlVYfy3TbBEsMUfWx9PyAgPeIcFZ/E2yaTZ4Rz4MiBmmJShviiftHVXOqfnfzJ6kyMrQ==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + p-limit@3.1.0: + resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} + engines: {node: '>=10'} + + p-limit@4.0.0: + resolution: {integrity: sha512-5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + p-limit@5.0.0: + resolution: {integrity: sha512-/Eaoq+QyLSiXQ4lyYV23f14mZRQcXnxfHrN0vCai+ak9G0pp9iEQukIIZq5NccEvwRB8PUnZT0KsOoDCINS1qQ==} + engines: {node: '>=18'} + + p-locate@5.0.0: + resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==} + engines: {node: '>=10'} + + p-locate@6.0.0: + resolution: {integrity: sha512-wPrq66Llhl7/4AGC6I+cqxT07LhXvWL08LNXz1fENOw0Ap4sRZZ/gZpTTJ5jpurzzzfS2W/Ge9BY3LgLjCShcw==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + p-map@5.5.0: + resolution: {integrity: sha512-VFqfGDHlx87K66yZrNdI4YGtD70IRyd+zSvgks6mzHPRNkoKy+9EKP4SFC77/vTTQYmRmti7dvqC+m5jBrBAcg==} + engines: {node: '>=12'} + + p-timeout@5.1.0: + resolution: {integrity: sha512-auFDyzzzGZZZdHz3BtET9VEz0SE/uMEAx7uWfGPucfzEwwe/xH0iVeZibQmANYE/hp9T2+UUZT5m+BKyrDp3Ew==} + engines: {node: '>=12'} + + parent-module@1.0.1: + resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==} + engines: {node: '>=6'} + + parse-ms@3.0.0: + resolution: {integrity: sha512-Tpb8Z7r7XbbtBTrM9UhpkzzaMrqA2VXMT3YChzYltwV3P3pM6t8wl7TvpMnSTosz1aQAdVib7kdoys7vYOPerw==} + engines: {node: '>=12'} + + path-exists@4.0.0: + resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} + engines: {node: '>=8'} + + path-exists@5.0.0: + resolution: {integrity: sha512-RjhtfwJOxzcFmNOi6ltcbcu4Iu+FL3zEj83dk4kAS+fVpTxXLO1b38RvJgT/0QwvV/L3aY9TAnyv0EOqW4GoMQ==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + path-is-absolute@1.0.1: + resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} + engines: {node: '>=0.10.0'} + + path-key@3.1.1: + resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} + engines: {node: '>=8'} + + path-key@4.0.0: + resolution: {integrity: sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==} + engines: {node: '>=12'} + + path-parse@1.0.7: + resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} + + path-scurry@1.11.1: + resolution: {integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==} + engines: {node: '>=16 || 14 >=14.18'} + + path-to-regexp@6.2.2: + resolution: {integrity: sha512-GQX3SSMokngb36+whdpRXE+3f9V8UzyAorlYvOGx87ufGHehNTn5lCxrKtLyZ4Yl/wEKnNnr98ZzOwwDZV5ogw==} + + path-type@4.0.0: + resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} + engines: {node: '>=8'} + + pathe@1.1.2: + resolution: {integrity: sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==} + + pathval@1.1.1: + resolution: {integrity: sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==} + + pause-stream@0.0.11: + resolution: {integrity: sha512-e3FBlXLmN/D1S+zHzanP4E/4Z60oFAa3O051qt1pxa7DEJWKAyil6upYVXCWadEnuoqa4Pkc9oUx9zsxYeRv8A==} + + pg-cloudflare@1.1.1: + resolution: {integrity: sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==} + + pg-connection-string@2.6.4: + resolution: {integrity: sha512-v+Z7W/0EO707aNMaAEfiGnGL9sxxumwLl2fJvCQtMn9Fxsg+lPpPkdcyBSv/KFgpGdYkMfn+EI1Or2EHjpgLCA==} + + pg-int8@1.0.1: + resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==} + engines: {node: '>=4.0.0'} + + pg-numeric@1.0.2: + resolution: {integrity: sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw==} + engines: {node: '>=4'} + + pg-pool@3.6.2: + resolution: {integrity: sha512-Htjbg8BlwXqSBQ9V8Vjtc+vzf/6fVUuak/3/XXKA9oxZprwW3IMDQTGHP+KDmVL7rtd+R1QjbnCFPuTHm3G4hg==} + peerDependencies: + pg: '>=8.0' + + pg-protocol@1.6.1: + resolution: {integrity: sha512-jPIlvgoD63hrEuihvIg+tJhoGjUsLPn6poJY9N5CnlPd91c2T18T/9zBtLxZSb1EhYxBRoZJtzScCaWlYLtktg==} + + pg-types@2.2.0: + resolution: {integrity: sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==} + engines: {node: '>=4'} + + pg-types@4.0.2: + resolution: {integrity: sha512-cRL3JpS3lKMGsKaWndugWQoLOCoP+Cic8oseVcbr0qhPzYD5DWXK+RZ9LY9wxRf7RQia4SCwQlXk0q6FCPrVng==} + engines: {node: '>=10'} + + pg@8.11.5: + resolution: {integrity: sha512-jqgNHSKL5cbDjFlHyYsCXmQDrfIX/3RsNwYqpd4N0Kt8niLuNoRNH+aazv6cOd43gPh9Y4DjQCtb+X0MH0Hvnw==} + engines: {node: '>= 8.0.0'} + peerDependencies: + pg-native: '>=3.0.1' + peerDependenciesMeta: + pg-native: + optional: true + + pgpass@1.0.5: + resolution: {integrity: sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==} + + picocolors@1.0.1: + resolution: {integrity: sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==} + + picomatch@2.3.1: + resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} + engines: {node: '>=8.6'} + + pirates@4.0.6: + resolution: {integrity: sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==} + engines: {node: '>= 6'} + + pkg-conf@4.0.0: + resolution: {integrity: sha512-7dmgi4UY4qk+4mj5Cd8v/GExPo0K+SlY+hulOSdfZ/T6jVH6//y7NtzZo5WrfhDBxuQ0jCa7fLZmNaNh7EWL/w==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + pkg-types@1.1.1: + resolution: {integrity: sha512-ko14TjmDuQJ14zsotODv7dBlwxKhUKQEhuhmbqo1uCi9BB0Z2alo/wAXg6q1dTR5TyuqYyWhjtfe/Tsh+X28jQ==} + + plur@5.1.0: + resolution: {integrity: sha512-VP/72JeXqak2KiOzjgKtQen5y3IZHn+9GOuLDafPv0eXa47xq0At93XahYBs26MsifCQ4enGKwbjBTKgb9QJXg==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + pluralize@8.0.0: + resolution: {integrity: sha512-Nc3IT5yHzflTfbjgqWcCPpo7DaKy4FnpB0l/zCAW0Tc7jxAiuqSxHasntB3D7887LSrA93kDJ9IXovxJYxyLCA==} + engines: {node: '>=4'} + + postcss-load-config@4.0.2: + resolution: {integrity: sha512-bSVhyJGL00wMVoPUzAVAnbEoWyqRxkjv64tUl427SKnPrENtq6hJwUojroMz2VB+Q1edmi4IfrAPpami5VVgMQ==} + engines: {node: '>= 14'} + peerDependencies: + postcss: '>=8.0.9' + ts-node: '>=9.0.0' + peerDependenciesMeta: + postcss: + optional: true + ts-node: + optional: true + + postcss@8.4.38: + resolution: {integrity: sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==} + engines: {node: ^10 || ^12 || >=14} + + postgres-array@2.0.0: + resolution: {integrity: sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==} + engines: {node: '>=4'} + + postgres-array@3.0.2: + resolution: {integrity: sha512-6faShkdFugNQCLwucjPcY5ARoW1SlbnrZjmGl0IrrqewpvxvhSLHimCVzqeuULCbG0fQv7Dtk1yDbG3xv7Veog==} + engines: {node: '>=12'} + + postgres-bytea@1.0.0: + resolution: {integrity: sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==} + engines: {node: '>=0.10.0'} + + postgres-bytea@3.0.0: + resolution: {integrity: sha512-CNd4jim9RFPkObHSjVHlVrxoVQXz7quwNFpz7RY1okNNme49+sVyiTvTRobiLV548Hx/hb1BG+iE7h9493WzFw==} + engines: {node: '>= 6'} + + postgres-date@1.0.7: + resolution: {integrity: sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==} + engines: {node: '>=0.10.0'} + + postgres-date@2.1.0: + resolution: {integrity: sha512-K7Juri8gtgXVcDfZttFKVmhglp7epKb1K4pgrkLxehjqkrgPhfG6OO8LHLkfaqkbpjNRnra018XwAr1yQFWGcA==} + engines: {node: '>=12'} + + postgres-interval@1.2.0: + resolution: {integrity: sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==} + engines: {node: '>=0.10.0'} + + postgres-interval@3.0.0: + resolution: {integrity: sha512-BSNDnbyZCXSxgA+1f5UU2GmwhoI0aU5yMxRGO8CdFEcY2BQF9xm/7MqKnYoM1nJDk8nONNWDk9WeSmePFhQdlw==} + engines: {node: '>=12'} + + postgres-range@1.1.4: + resolution: {integrity: sha512-i/hbxIE9803Alj/6ytL7UHQxRvZkI9O4Sy+J3HGc4F4oo/2eQAjTSNJ0bfxyse3bH0nuVesCk+3IRLaMtG3H6w==} + + postgres@3.4.4: + resolution: {integrity: sha512-IbyN+9KslkqcXa8AO9fxpk97PA4pzewvpi2B3Dwy9u4zpV32QicaEdgmF3eSQUzdRk7ttDHQejNgAEr4XoeH4A==} + engines: {node: '>=12'} + + prebuild-install@7.1.2: + resolution: {integrity: sha512-UnNke3IQb6sgarcZIDU3gbMeTp/9SSU1DAIkil7PrqG1vZlBtY5msYccSKSHDqa3hNg436IXK+SNImReuA1wEQ==} + engines: {node: '>=10'} + hasBin: true + + prelude-ls@1.2.1: + resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} + engines: {node: '>= 0.8.0'} + + prettier-linter-helpers@1.0.0: + resolution: {integrity: sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==} + engines: {node: '>=6.0.0'} + + prettier@2.8.8: + resolution: {integrity: sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==} + engines: {node: '>=10.13.0'} + hasBin: true + + pretty-format@29.7.0: + resolution: {integrity: sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + pretty-ms@8.0.0: + resolution: {integrity: sha512-ASJqOugUF1bbzI35STMBUpZqdfYKlJugy6JBziGi2EE+AL5JPJGSzvpeVXojxrr0ViUYoToUjb5kjSEGf7Y83Q==} + engines: {node: '>=14.16'} + + printable-characters@1.0.42: + resolution: {integrity: sha512-dKp+C4iXWK4vVYZmYSd0KBH5F/h1HoZRsbJ82AVKRO3PEo8L4lBS/vLwhVtpwwuYcoIsVY+1JYKR268yn480uQ==} + + ps-tree@1.2.0: + resolution: {integrity: sha512-0VnamPPYHl4uaU/nSFeZZpR21QAWRz+sRv4iW9+v/GS/J5U5iZB5BNN6J0RMoOvdx2gWM2+ZFMIm58q24e4UYA==} + engines: {node: '>= 0.10'} + hasBin: true + + pump@3.0.0: + resolution: {integrity: sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==} + + punycode@2.3.1: + resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==} + engines: {node: '>=6'} + + queue-microtask@1.2.3: + resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} + + rc@1.2.8: + resolution: {integrity: sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==} + hasBin: true + + react-is@18.3.1: + resolution: {integrity: sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==} + + readable-stream@3.6.2: + resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} + engines: {node: '>= 6'} + + readdirp@3.6.0: + resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} + engines: {node: '>=8.10.0'} + + redeyed@2.1.1: + resolution: {integrity: sha512-FNpGGo1DycYAdnrKFxCMmKYgo/mILAqtRYbkdQD8Ep/Hk2PQ5+aEAEx+IU713RTDmuBaH0c8P5ZozurNu5ObRQ==} + + require-directory@2.1.1: + resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} + engines: {node: '>=0.10.0'} + + resolve-cwd@3.0.0: + resolution: {integrity: sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==} + engines: {node: '>=8'} + + resolve-from@4.0.0: + resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==} + engines: {node: '>=4'} + + resolve-from@5.0.0: + resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} + engines: {node: '>=8'} + + resolve-pkg-maps@1.0.0: + resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==} + + resolve.exports@2.0.2: + resolution: {integrity: sha512-X2UW6Nw3n/aMgDVy+0rSqgHlv39WZAlZrXCdnbyEiKm17DSqHX4MmQMaST3FbeWR5FTuRcUwYAziZajji0Y7mg==} + engines: {node: '>=10'} + + resolve@1.22.8: + resolution: {integrity: sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==} + hasBin: true + + reusify@1.0.4: + resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} + engines: {iojs: '>=1.0.0', node: '>=0.10.0'} + + rimraf@3.0.2: + resolution: {integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==} + hasBin: true + + rollup-plugin-inject@3.0.2: + resolution: {integrity: sha512-ptg9PQwzs3orn4jkgXJ74bfs5vYz1NCZlSQMBUA0wKcGp5i5pA1AO3fOUEte8enhGUC+iapTCzEWw2jEFFUO/w==} + deprecated: This package has been deprecated and is no longer maintained. Please use @rollup/plugin-inject. + + rollup-plugin-node-polyfills@0.2.1: + resolution: {integrity: sha512-4kCrKPTJ6sK4/gLL/U5QzVT8cxJcofO0OU74tnB19F40cmuAKSzH5/siithxlofFEjwvw1YAhPmbvGNA6jEroA==} + + rollup-pluginutils@2.8.2: + resolution: {integrity: sha512-EEp9NhnUkwY8aif6bxgovPHMoMoNr2FulJziTndpt5H9RdwC47GSGuII9XxpSdzVGM0GWrNPHV6ie1LTNJPaLQ==} + + rollup@4.17.2: + resolution: {integrity: sha512-/9ClTJPByC0U4zNLowV1tMBe8yMEAxewtR3cUNX5BoEpGH3dQEWpJLr6CLp0fPdYRF/fzVOgvDb1zXuakwF5kQ==} + engines: {node: '>=18.0.0', npm: '>=8.0.0'} + hasBin: true + + run-parallel@1.2.0: + resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} + + safe-buffer@5.2.1: + resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} + + safer-buffer@2.1.2: + resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} + + selfsigned@2.4.1: + resolution: {integrity: sha512-th5B4L2U+eGLq1TVh7zNRGBapioSORUeymIydxgFpwww9d2qyKvtuPU2jJuHvYAwwqi2Y596QBL3eEqcPEYL8Q==} + engines: {node: '>=10'} + + semver@7.6.2: + resolution: {integrity: sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==} + engines: {node: '>=10'} + hasBin: true + + seq-queue@0.0.5: + resolution: {integrity: sha512-hr3Wtp/GZIc/6DAGPDcV4/9WoZhjrkXsi5B/07QgX8tsdc6ilr7BFM6PM6rbdAX1kFSDYeZGLipIZZKyQP0O5Q==} + + serialize-error@7.0.1: + resolution: {integrity: sha512-8I8TjW5KMOKsZQTvoxjuSIa7foAwPWGOts+6o7sgjz41/qMD9VQHEDxi6PBvK2l0MXUmqZyNpUK+T2tQaaElvw==} + engines: {node: '>=10'} + + shebang-command@2.0.0: + resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} + engines: {node: '>=8'} + + shebang-regex@3.0.0: + resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} + engines: {node: '>=8'} + + siginfo@2.0.0: + resolution: {integrity: sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==} + + signal-exit@3.0.7: + resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} + + signal-exit@4.1.0: + resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} + engines: {node: '>=14'} + + simple-concat@1.0.1: + resolution: {integrity: sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==} + + simple-get@4.0.1: + resolution: {integrity: sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==} + + sisteransi@1.0.5: + resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==} + + skin-tone@2.0.0: + resolution: {integrity: sha512-kUMbT1oBJCpgrnKoSr0o6wPtvRWT9W9UKvGLwfJYO2WuahZRHOpEyL1ckyMGgMWh0UdpmaoFqKKD29WTomNEGA==} + engines: {node: '>=8'} + + slash@3.0.0: + resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} + engines: {node: '>=8'} + + slash@4.0.0: + resolution: {integrity: sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew==} + engines: {node: '>=12'} + + slice-ansi@5.0.0: + resolution: {integrity: sha512-FC+lgizVPfie0kkhqUScwRu1O/lF6NOgJmlCgK+/LYxDCTk8sGelYaHDhFcDN+Sn3Cv+3VSa4Byeo+IMCzpMgQ==} + engines: {node: '>=12'} + + source-map-js@1.2.0: + resolution: {integrity: sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==} + engines: {node: '>=0.10.0'} + + source-map-support@0.5.21: + resolution: {integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==} + + source-map@0.6.1: + resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} + engines: {node: '>=0.10.0'} + + source-map@0.8.0-beta.0: + resolution: {integrity: sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA==} + engines: {node: '>= 8'} + + sourcemap-codec@1.4.8: + resolution: {integrity: sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==} + deprecated: Please use @jridgewell/sourcemap-codec instead + + split-ca@1.0.1: + resolution: {integrity: sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==} + + split2@4.2.0: + resolution: {integrity: sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==} + engines: {node: '>= 10.x'} + + split@0.3.3: + resolution: {integrity: sha512-wD2AeVmxXRBoX44wAycgjVpMhvbwdI2aZjCkvfNcH1YqHQvJVa1duWc73OyVGJUc05fhFaTZeQ/PYsrmyH0JVA==} + + sprintf-js@1.0.3: + resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} + + sqlstring@2.3.3: + resolution: {integrity: sha512-qC9iz2FlN7DQl3+wjwn3802RTyjCx7sDvfQEXchwa6CWOx07/WVfh91gBmQ9fahw8snwGEWU3xGzOt4tFyHLxg==} + engines: {node: '>= 0.6'} + + ssh2@1.15.0: + resolution: {integrity: sha512-C0PHgX4h6lBxYx7hcXwu3QWdh4tg6tZZsTfXcdvc5caW/EMxaB4H9dWsl7qk+F7LAW762hp8VbXOX7x4xUYvEw==} + engines: {node: '>=10.16.0'} + + stack-utils@2.0.6: + resolution: {integrity: sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==} + engines: {node: '>=10'} + + stackback@0.0.2: + resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==} + + stacktracey@2.1.8: + resolution: {integrity: sha512-Kpij9riA+UNg7TnphqjH7/CzctQ/owJGNbFkfEeve4Z4uxT5+JapVLFXcsurIfN34gnTWZNJ/f7NMG0E8JDzTw==} + + std-env@3.7.0: + resolution: {integrity: sha512-JPbdCEQLj1w5GilpiHAx3qJvFndqybBysA3qUOnznweH4QbNYUsW/ea8QzSrnh0vNsezMMw5bcVool8lM0gwzg==} + + stoppable@1.1.0: + resolution: {integrity: sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw==} + engines: {node: '>=4', npm: '>=6'} + + stream-combiner@0.0.4: + resolution: {integrity: sha512-rT00SPnTVyRsaSz5zgSPma/aHSOic5U1prhYdRy5HS2kTZviFpmDgzilbtsJsxiroqACmayynDN/9VzIbX5DOw==} + + string-width@4.2.3: + resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} + engines: {node: '>=8'} + + string-width@5.1.2: + resolution: {integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==} + engines: {node: '>=12'} + + string_decoder@1.3.0: + resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} + + strip-ansi@6.0.1: + resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + engines: {node: '>=8'} + + strip-ansi@7.1.0: + resolution: {integrity: sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==} + engines: {node: '>=12'} + + strip-final-newline@2.0.0: + resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} + engines: {node: '>=6'} + + strip-final-newline@3.0.0: + resolution: {integrity: sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==} + engines: {node: '>=12'} + + strip-json-comments@2.0.1: + resolution: {integrity: sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==} + engines: {node: '>=0.10.0'} + + strip-json-comments@3.1.1: + resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} + engines: {node: '>=8'} + + strip-literal@2.1.0: + resolution: {integrity: sha512-Op+UycaUt/8FbN/Z2TWPBLge3jWrP3xj10f3fnYxf052bKuS3EKs1ZQcVGjnEMdsNVAM+plXRdmjrZ/KgG3Skw==} + + strnum@1.0.5: + resolution: {integrity: sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==} + + sucrase@3.35.0: + resolution: {integrity: sha512-8EbVDiu9iN/nESwxeSxDKe0dunta1GOlHufmSSXxMD2z2/tMZpDMpvXQGsc+ajGo8y2uYUmixaSRUc/QPoQ0GA==} + engines: {node: '>=16 || 14 >=14.17'} + hasBin: true + + superjson@2.2.1: + resolution: {integrity: sha512-8iGv75BYOa0xRJHK5vRLEjE2H/i4lulTjzpUXic3Eg8akftYjkmQDa8JARQ42rlczXyFR3IeRoeFCc7RxHsYZA==} + engines: {node: '>=16'} + + supertap@3.0.1: + resolution: {integrity: sha512-u1ZpIBCawJnO+0QePsEiOknOfCRq0yERxiAchT0i4li0WHNUJbf0evXXSXOcCAR4M8iMDoajXYmstm/qO81Isw==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + supports-color@7.2.0: + resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} + engines: {node: '>=8'} + + supports-hyperlinks@3.0.0: + resolution: {integrity: sha512-QBDPHyPQDRTy9ku4URNGY5Lah8PAaXs6tAAwp55sL5WCsSW7GIfdf6W5ixfziW+t7wh3GVvHyHHyQ1ESsoRvaA==} + engines: {node: '>=14.18'} + + supports-preserve-symlinks-flag@1.0.0: + resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} + engines: {node: '>= 0.4'} + + synckit@0.8.8: + resolution: {integrity: sha512-HwOKAP7Wc5aRGYdKH+dw0PRRpbO841v2DENBtjnR5HFWoiNByAl7vrx3p0G/rCyYXQsrxqtX48TImFtPcIHSpQ==} + engines: {node: ^14.18.0 || >=16.0.0} + + tar-fs@2.0.1: + resolution: {integrity: sha512-6tzWDMeroL87uF/+lin46k+Q+46rAJ0SyPGz7OW7wTgblI273hsBqk2C1j0/xNadNLKDTUL9BukSjB7cwgmlPA==} + + tar-fs@2.1.1: + resolution: {integrity: sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng==} + + tar-stream@2.2.0: + resolution: {integrity: sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==} + engines: {node: '>=6'} + + temp-dir@3.0.0: + resolution: {integrity: sha512-nHc6S/bwIilKHNRgK/3jlhDoIHcp45YgyiwcAk46Tr0LfEqGBVpmiAyuiuxeVE44m3mXnEeVhaipLOEWmH+Njw==} + engines: {node: '>=14.16'} + + text-table@0.2.0: + resolution: {integrity: sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==} + + thenify-all@1.6.0: + resolution: {integrity: sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==} + engines: {node: '>=0.8'} + + thenify@3.3.1: + resolution: {integrity: sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==} + + through@2.3.8: + resolution: {integrity: sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==} + + time-zone@1.0.0: + resolution: {integrity: sha512-TIsDdtKo6+XrPtiTm1ssmMngN1sAhyKnTO2kunQWqNPWIVvCm15Wmw4SWInwTVgJ5u/Tr04+8Ei9TNcw4x4ONA==} + engines: {node: '>=4'} + + timers-ext@0.1.7: + resolution: {integrity: sha512-b85NUNzTSdodShTIbky6ZF02e8STtVVfD+fu4aXXShEELpozH+bCpJLYMPZbsABN2wDH7fJpqIoXxJpzbf0NqQ==} + + tinybench@2.8.0: + resolution: {integrity: sha512-1/eK7zUnIklz4JUUlL+658n58XO2hHLQfSk1Zf2LKieUjxidN16eKFEoDEfjHc3ohofSSqK3X5yO6VGb6iW8Lw==} + + tinypool@0.8.4: + resolution: {integrity: sha512-i11VH5gS6IFeLY3gMBQ00/MmLncVP7JLXOw1vlgkytLmJK7QnEr7NXf0LBdxfmNPAeyetukOk0bOYrJrFGjYJQ==} + engines: {node: '>=14.0.0'} + + tinyspy@2.2.1: + resolution: {integrity: sha512-KYad6Vy5VDWV4GH3fjpseMQ/XU2BhIYP7Vzd0LG44qRWm/Yt2WCOTicFdvmgo6gWaqooMQCawTtILVQJupKu7A==} + engines: {node: '>=14.0.0'} + + to-regex-range@5.0.1: + resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} + engines: {node: '>=8.0'} + + tr46@0.0.3: + resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} + + tr46@1.0.1: + resolution: {integrity: sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==} + + tree-kill@1.2.2: + resolution: {integrity: sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==} + hasBin: true + + ts-api-utils@1.3.0: + resolution: {integrity: sha512-UQMIo7pb8WRomKR1/+MFVLTroIvDVtMX3K6OUir8ynLyzB8Jeriont2bTAtmNPa1ekAgN7YPDyf6V+ygrdU+eQ==} + engines: {node: '>=16'} + peerDependencies: + typescript: '>=4.2.0' + + ts-expose-internals-conditionally@1.0.0-empty.0: + resolution: {integrity: sha512-F8m9NOF6ZhdOClDVdlM8gj3fDCav4ZIFSs/EI3ksQbAAXVSCN/Jh5OCJDDZWBuBy9psFc6jULGDlPwjMYMhJDw==} + + ts-interface-checker@0.1.13: + resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==} + + tsconfck@3.0.3: + resolution: {integrity: sha512-4t0noZX9t6GcPTfBAbIbbIU4pfpCwh0ueq3S4O/5qXI1VwK1outmxhe9dOiEWqMz3MW2LKgDTpqWV+37IWuVbA==} + engines: {node: ^18 || >=20} + hasBin: true + peerDependencies: + typescript: ^5.0.0 + peerDependenciesMeta: + typescript: + optional: true + + tslib@1.14.1: + resolution: {integrity: sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==} + + tslib@2.6.2: + resolution: {integrity: sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==} + + tsup@8.0.2: + resolution: {integrity: sha512-NY8xtQXdH7hDUAZwcQdY/Vzlw9johQsaqf7iwZ6g1DOUlFYQ5/AtVAjTvihhEyeRlGo4dLRVHtrRaL35M1daqQ==} + engines: {node: '>=18'} + hasBin: true + peerDependencies: + '@microsoft/api-extractor': ^7.36.0 + '@swc/core': ^1 + postcss: ^8.4.12 + typescript: '>=4.5.0' + peerDependenciesMeta: + '@microsoft/api-extractor': + optional: true + '@swc/core': + optional: true + postcss: + optional: true + typescript: + optional: true + + tsx@3.14.0: + resolution: {integrity: sha512-xHtFaKtHxM9LOklMmJdI3BEnQq/D5F73Of2E1GDrITi9sgoVkvIsrQUTY1G8FlmGtA+awCI4EBlTRRYxkL2sRg==} + hasBin: true + + tunnel-agent@0.6.0: + resolution: {integrity: sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==} + + tweetnacl@0.14.5: + resolution: {integrity: sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==} + + type-check@0.4.0: + resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==} + engines: {node: '>= 0.8.0'} + + type-detect@4.0.8: + resolution: {integrity: sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==} + engines: {node: '>=4'} + + type-fest@0.13.1: + resolution: {integrity: sha512-34R7HTnG0XIJcBSn5XhDd7nNFPRcXYRZrBB2O2jdKqYODldSzBAqzsWoZYYvduky73toYS/ESqxPvkDf/F0XMg==} + engines: {node: '>=10'} + + type-fest@0.20.2: + resolution: {integrity: sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==} + engines: {node: '>=10'} + + type@2.7.2: + resolution: {integrity: sha512-dzlvlNlt6AXU7EBSfpAscydQ7gXB+pPGsPnfJnZpiNJBDj7IaJzQlBZYGdEi4R9HmPdBv2XmWJ6YUtoTa7lmCw==} + + typescript@5.3.3: + resolution: {integrity: sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw==} + engines: {node: '>=14.17'} + hasBin: true + + typescript@5.4.5: + resolution: {integrity: sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==} + engines: {node: '>=14.17'} + hasBin: true + + ufo@1.5.3: + resolution: {integrity: sha512-Y7HYmWaFwPUmkoQCUIAYpKqkOf+SbVj/2fJJZ4RJMCfZp0rTGwRbzQD+HghfnhKOjL9E01okqz+ncJskGYfBNw==} + + undici-types@5.26.5: + resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} + + undici@5.28.4: + resolution: {integrity: sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==} + engines: {node: '>=14.0'} + + unicode-emoji-modifier-base@1.0.0: + resolution: {integrity: sha512-yLSH4py7oFH3oG/9K+XWrz1pSi3dfUrWEnInbxMfArOfc1+33BlGPQtLsOYwvdMy11AwUBetYuaRxSPqgkq+8g==} + engines: {node: '>=4'} + + universalify@2.0.1: + resolution: {integrity: sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==} + engines: {node: '>= 10.0.0'} + + uri-js@4.4.1: + resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} + + utf-8-validate@6.0.3: + resolution: {integrity: sha512-uIuGf9TWQ/y+0Lp+KGZCMuJWc3N9BHA+l/UmHd/oUHwJJDeysyTRxNQVkbzsIWfGFbRe3OcgML/i0mvVRPOyDA==} + engines: {node: '>=6.14.2'} + + util-deprecate@1.0.2: + resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} + + uuid@9.0.1: + resolution: {integrity: sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==} + hasBin: true + + validate-npm-package-name@5.0.1: + resolution: {integrity: sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + vite-node@1.6.0: + resolution: {integrity: sha512-de6HJgzC+TFzOu0NTC4RAIsyf/DY/ibWDYQUcuEA84EMHhcefTUGkjFHKKEJhQN4A+6I0u++kr3l36ZF2d7XRw==} + engines: {node: ^18.0.0 || >=20.0.0} + hasBin: true + + vite-tsconfig-paths@4.3.2: + resolution: {integrity: sha512-0Vd/a6po6Q+86rPlntHye7F31zA2URZMbH8M3saAZ/xR9QoGN/L21bxEGfXdWmFdNkqPpRdxFT7nmNe12e9/uA==} + peerDependencies: + vite: '*' + peerDependenciesMeta: + vite: + optional: true + + vite@5.2.11: + resolution: {integrity: sha512-HndV31LWW05i1BLPMUCE1B9E9GFbOu1MbenhS58FuK6owSO5qHm7GiCotrNY1YE5rMeQSFBGmT5ZaLEjFizgiQ==} + engines: {node: ^18.0.0 || >=20.0.0} + hasBin: true + peerDependencies: + '@types/node': ^18.0.0 || >=20.0.0 + less: '*' + lightningcss: ^1.21.0 + sass: '*' + stylus: '*' + sugarss: '*' + terser: ^5.4.0 + peerDependenciesMeta: + '@types/node': + optional: true + less: + optional: true + lightningcss: + optional: true + sass: + optional: true + stylus: + optional: true + sugarss: + optional: true + terser: + optional: true + + vitest@1.6.0: + resolution: {integrity: sha512-H5r/dN06swuFnzNFhq/dnz37bPXnq8xB2xB5JOVk8K09rUtoeNN+LHWkoQ0A/i3hvbUKKcCei9KpbxqHMLhLLA==} + engines: {node: ^18.0.0 || >=20.0.0} + hasBin: true + peerDependencies: + '@edge-runtime/vm': '*' + '@types/node': ^18.0.0 || >=20.0.0 + '@vitest/browser': 1.6.0 + '@vitest/ui': 1.6.0 + happy-dom: '*' + jsdom: '*' + peerDependenciesMeta: + '@edge-runtime/vm': + optional: true + '@types/node': + optional: true + '@vitest/browser': + optional: true + '@vitest/ui': + optional: true + happy-dom: + optional: true + jsdom: + optional: true + + web-streams-polyfill@3.3.3: + resolution: {integrity: sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==} + engines: {node: '>= 8'} + + webidl-conversions@3.0.1: + resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} + + webidl-conversions@4.0.2: + resolution: {integrity: sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==} + + webpod@0.0.2: + resolution: {integrity: sha512-cSwwQIeg8v4i3p4ajHhwgR7N6VyxAf+KYSSsY6Pd3aETE+xEU4vbitz7qQkB0I321xnhDdgtxuiSfk5r/FVtjg==} + hasBin: true + + well-known-symbols@2.0.0: + resolution: {integrity: sha512-ZMjC3ho+KXo0BfJb7JgtQ5IBuvnShdlACNkKkdsqBmYw3bPAaJfPeYUo6tLUaT5tG/Gkh7xkpBhKRQ9e7pyg9Q==} + engines: {node: '>=6'} + + whatwg-url@5.0.0: + resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==} + + whatwg-url@7.1.0: + resolution: {integrity: sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==} + + which@2.0.2: + resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} + engines: {node: '>= 8'} + hasBin: true + + which@3.0.1: + resolution: {integrity: sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + hasBin: true + + why-is-node-running@2.2.2: + resolution: {integrity: sha512-6tSwToZxTOcotxHeA+qGCq1mVzKR3CwcJGmVcY+QE8SHy6TnpFnh8PAvPNHYr7EcuVeG0QSMxtYCuO1ta/G/oA==} + engines: {node: '>=8'} + hasBin: true + + word-wrap@1.2.5: + resolution: {integrity: sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==} + engines: {node: '>=0.10.0'} + + wordwrap@1.0.0: + resolution: {integrity: sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==} + + workerd@1.20240512.0: + resolution: {integrity: sha512-VUBmR1PscAPHEE0OF/G2K7/H1gnr9aDWWZzdkIgWfNKkv8dKFCT75H+GJtUHjfwqz3rYCzaNZmatSXOpLGpF8A==} + engines: {node: '>=16'} + hasBin: true + + wrangler@3.57.0: + resolution: {integrity: sha512-izK3AZtlFoTq8N0EZjLOQ7hqwsjaXCc1cbNKuhsLJjDX1jB1YZBDPhIhtXL4VVzkJAcH+0Zw2gguOePFCHNaxw==} + engines: {node: '>=16.17.0'} + hasBin: true + peerDependencies: + '@cloudflare/workers-types': ^4.20240512.0 + peerDependenciesMeta: + '@cloudflare/workers-types': + optional: true + + wrap-ansi@7.0.0: + resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} + engines: {node: '>=10'} + + wrap-ansi@8.1.0: + resolution: {integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==} + engines: {node: '>=12'} + + wrappy@1.0.2: + resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} + + write-file-atomic@5.0.1: + resolution: {integrity: sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + ws@8.14.2: + resolution: {integrity: sha512-wEBG1ftX4jcglPxgFCMJmZ2PLtSbJ2Peg6TmpJFTbe9GZYOQCDPdMYu/Tm0/bGZkw8paZnJY45J4K2PZrLYq8g==} + engines: {node: '>=10.0.0'} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: '>=5.0.2' + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + + ws@8.17.0: + resolution: {integrity: sha512-uJq6108EgZMAl20KagGkzCKfMEjxmKvZHG7Tlq0Z6nOky7YF7aq4mOx6xK8TJ/i1LeK4Qus7INktacctDgY8Ow==} + engines: {node: '>=10.0.0'} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: '>=5.0.2' + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + + xtend@4.0.2: + resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==} + engines: {node: '>=0.4'} + + xxhash-wasm@1.0.2: + resolution: {integrity: sha512-ibF0Or+FivM9lNrg+HGJfVX8WJqgo+kCLDc4vx6xMeTce7Aj+DLttKbxxRR/gNLSAelRc1omAPlJ77N/Jem07A==} + + y18n@5.0.8: + resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} + engines: {node: '>=10'} + + yallist@4.0.0: + resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==} + + yaml@2.4.2: + resolution: {integrity: sha512-B3VqDZ+JAg1nZpaEmWtTXUlBneoGx6CPM9b0TENK6aoSu5t73dItudwdgmi6tHlIZZId4dZ9skcAQ2UbcyAeVA==} + engines: {node: '>= 14'} + hasBin: true + + yargs-parser@21.1.1: + resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} + engines: {node: '>=12'} + + yargs@17.7.2: + resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==} + engines: {node: '>=12'} + + yocto-queue@0.1.0: + resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} + engines: {node: '>=10'} + + yocto-queue@1.0.0: + resolution: {integrity: sha512-9bnSc/HEW2uRy67wc+T8UwauLuPJVn28jb+GtJY16iiKWyvmYJRXVT4UamsAEGQfPohgr2q4Tq0sQbQlxTfi1g==} + engines: {node: '>=12.20'} + + youch@3.3.3: + resolution: {integrity: sha512-qSFXUk3UZBLfggAW3dJKg0BMblG5biqSF8M34E06o5CSsZtH92u9Hqmj2RzGiHDi64fhe83+4tENFP2DB6t6ZA==} + + zod@3.23.8: + resolution: {integrity: sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==} + + zx@7.2.3: + resolution: {integrity: sha512-QODu38nLlYXg/B/Gw7ZKiZrvPkEsjPN3LQ5JFXM7h0JvwhEdPNNl+4Ao1y4+o3CLNiDUNcwzQYZ4/Ko7kKzCMA==} + engines: {node: '>= 16.0.0'} + hasBin: true + +snapshots: + + '@andrewbranch/untar.js@1.0.3': {} + + '@arethetypeswrong/cli@0.15.3': + dependencies: + '@arethetypeswrong/core': 0.15.1 + chalk: 4.1.2 + cli-table3: 0.6.5 + commander: 10.0.1 + marked: 9.1.6 + marked-terminal: 6.2.0(marked@9.1.6) + semver: 7.6.2 + + '@arethetypeswrong/core@0.15.1': + dependencies: + '@andrewbranch/untar.js': 1.0.3 + fflate: 0.8.2 + semver: 7.6.2 + ts-expose-internals-conditionally: 1.0.0-empty.0 + typescript: 5.3.3 + validate-npm-package-name: 5.0.1 + + '@aws-crypto/ie11-detection@3.0.0': + dependencies: + tslib: 1.14.1 + + '@aws-crypto/sha256-browser@3.0.0': + dependencies: + '@aws-crypto/ie11-detection': 3.0.0 + '@aws-crypto/sha256-js': 3.0.0 + '@aws-crypto/supports-web-crypto': 3.0.0 + '@aws-crypto/util': 3.0.0 + '@aws-sdk/types': 3.577.0 + '@aws-sdk/util-locate-window': 3.568.0 + '@aws-sdk/util-utf8-browser': 3.259.0 + tslib: 1.14.1 + + '@aws-crypto/sha256-js@3.0.0': + dependencies: + '@aws-crypto/util': 3.0.0 + '@aws-sdk/types': 3.577.0 + tslib: 1.14.1 + + '@aws-crypto/supports-web-crypto@3.0.0': + dependencies: + tslib: 1.14.1 + + '@aws-crypto/util@3.0.0': + dependencies: + '@aws-sdk/types': 3.577.0 + '@aws-sdk/util-utf8-browser': 3.259.0 + tslib: 1.14.1 + + '@aws-sdk/client-rds-data@3.577.0': + dependencies: + '@aws-crypto/sha256-browser': 3.0.0 + '@aws-crypto/sha256-js': 3.0.0 + '@aws-sdk/client-sso-oidc': 3.577.0(@aws-sdk/client-sts@3.577.0) + '@aws-sdk/client-sts': 3.577.0 + '@aws-sdk/core': 3.576.0 + '@aws-sdk/credential-provider-node': 3.577.0(@aws-sdk/client-sso-oidc@3.577.0(@aws-sdk/client-sts@3.577.0))(@aws-sdk/client-sts@3.577.0) + '@aws-sdk/middleware-host-header': 3.577.0 + '@aws-sdk/middleware-logger': 3.577.0 + '@aws-sdk/middleware-recursion-detection': 3.577.0 + '@aws-sdk/middleware-user-agent': 3.577.0 + '@aws-sdk/region-config-resolver': 3.577.0 + '@aws-sdk/types': 3.577.0 + '@aws-sdk/util-endpoints': 3.577.0 + '@aws-sdk/util-user-agent-browser': 3.577.0 + '@aws-sdk/util-user-agent-node': 3.577.0 + '@smithy/config-resolver': 3.0.0 + '@smithy/core': 2.0.1 + '@smithy/fetch-http-handler': 3.0.1 + '@smithy/hash-node': 3.0.0 + '@smithy/invalid-dependency': 3.0.0 + '@smithy/middleware-content-length': 3.0.0 + '@smithy/middleware-endpoint': 3.0.0 + '@smithy/middleware-retry': 3.0.1 + '@smithy/middleware-serde': 3.0.0 + '@smithy/middleware-stack': 3.0.0 + '@smithy/node-config-provider': 3.0.0 + '@smithy/node-http-handler': 3.0.0 + '@smithy/protocol-http': 4.0.0 + '@smithy/smithy-client': 3.0.1 + '@smithy/types': 3.0.0 + '@smithy/url-parser': 3.0.0 + '@smithy/util-base64': 3.0.0 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-body-length-node': 3.0.0 + '@smithy/util-defaults-mode-browser': 3.0.1 + '@smithy/util-defaults-mode-node': 3.0.1 + '@smithy/util-endpoints': 2.0.0 + '@smithy/util-middleware': 3.0.0 + '@smithy/util-retry': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/client-sso-oidc@3.577.0(@aws-sdk/client-sts@3.577.0)': + dependencies: + '@aws-crypto/sha256-browser': 3.0.0 + '@aws-crypto/sha256-js': 3.0.0 + '@aws-sdk/client-sts': 3.577.0 + '@aws-sdk/core': 3.576.0 + '@aws-sdk/credential-provider-node': 3.577.0(@aws-sdk/client-sso-oidc@3.577.0(@aws-sdk/client-sts@3.577.0))(@aws-sdk/client-sts@3.577.0) + '@aws-sdk/middleware-host-header': 3.577.0 + '@aws-sdk/middleware-logger': 3.577.0 + '@aws-sdk/middleware-recursion-detection': 3.577.0 + '@aws-sdk/middleware-user-agent': 3.577.0 + '@aws-sdk/region-config-resolver': 3.577.0 + '@aws-sdk/types': 3.577.0 + '@aws-sdk/util-endpoints': 3.577.0 + '@aws-sdk/util-user-agent-browser': 3.577.0 + '@aws-sdk/util-user-agent-node': 3.577.0 + '@smithy/config-resolver': 3.0.0 + '@smithy/core': 2.0.1 + '@smithy/fetch-http-handler': 3.0.1 + '@smithy/hash-node': 3.0.0 + '@smithy/invalid-dependency': 3.0.0 + '@smithy/middleware-content-length': 3.0.0 + '@smithy/middleware-endpoint': 3.0.0 + '@smithy/middleware-retry': 3.0.1 + '@smithy/middleware-serde': 3.0.0 + '@smithy/middleware-stack': 3.0.0 + '@smithy/node-config-provider': 3.0.0 + '@smithy/node-http-handler': 3.0.0 + '@smithy/protocol-http': 4.0.0 + '@smithy/smithy-client': 3.0.1 + '@smithy/types': 3.0.0 + '@smithy/url-parser': 3.0.0 + '@smithy/util-base64': 3.0.0 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-body-length-node': 3.0.0 + '@smithy/util-defaults-mode-browser': 3.0.1 + '@smithy/util-defaults-mode-node': 3.0.1 + '@smithy/util-endpoints': 2.0.0 + '@smithy/util-middleware': 3.0.0 + '@smithy/util-retry': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 + transitivePeerDependencies: + - '@aws-sdk/client-sts' + - aws-crt + + '@aws-sdk/client-sso@3.577.0': + dependencies: + '@aws-crypto/sha256-browser': 3.0.0 + '@aws-crypto/sha256-js': 3.0.0 + '@aws-sdk/core': 3.576.0 + '@aws-sdk/middleware-host-header': 3.577.0 + '@aws-sdk/middleware-logger': 3.577.0 + '@aws-sdk/middleware-recursion-detection': 3.577.0 + '@aws-sdk/middleware-user-agent': 3.577.0 + '@aws-sdk/region-config-resolver': 3.577.0 + '@aws-sdk/types': 3.577.0 + '@aws-sdk/util-endpoints': 3.577.0 + '@aws-sdk/util-user-agent-browser': 3.577.0 + '@aws-sdk/util-user-agent-node': 3.577.0 + '@smithy/config-resolver': 3.0.0 + '@smithy/core': 2.0.1 + '@smithy/fetch-http-handler': 3.0.1 + '@smithy/hash-node': 3.0.0 + '@smithy/invalid-dependency': 3.0.0 + '@smithy/middleware-content-length': 3.0.0 + '@smithy/middleware-endpoint': 3.0.0 + '@smithy/middleware-retry': 3.0.1 + '@smithy/middleware-serde': 3.0.0 + '@smithy/middleware-stack': 3.0.0 + '@smithy/node-config-provider': 3.0.0 + '@smithy/node-http-handler': 3.0.0 + '@smithy/protocol-http': 4.0.0 + '@smithy/smithy-client': 3.0.1 + '@smithy/types': 3.0.0 + '@smithy/url-parser': 3.0.0 + '@smithy/util-base64': 3.0.0 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-body-length-node': 3.0.0 + '@smithy/util-defaults-mode-browser': 3.0.1 + '@smithy/util-defaults-mode-node': 3.0.1 + '@smithy/util-endpoints': 2.0.0 + '@smithy/util-middleware': 3.0.0 + '@smithy/util-retry': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/client-sts@3.577.0': + dependencies: + '@aws-crypto/sha256-browser': 3.0.0 + '@aws-crypto/sha256-js': 3.0.0 + '@aws-sdk/client-sso-oidc': 3.577.0(@aws-sdk/client-sts@3.577.0) + '@aws-sdk/core': 3.576.0 + '@aws-sdk/credential-provider-node': 3.577.0(@aws-sdk/client-sso-oidc@3.577.0(@aws-sdk/client-sts@3.577.0))(@aws-sdk/client-sts@3.577.0) + '@aws-sdk/middleware-host-header': 3.577.0 + '@aws-sdk/middleware-logger': 3.577.0 + '@aws-sdk/middleware-recursion-detection': 3.577.0 + '@aws-sdk/middleware-user-agent': 3.577.0 + '@aws-sdk/region-config-resolver': 3.577.0 + '@aws-sdk/types': 3.577.0 + '@aws-sdk/util-endpoints': 3.577.0 + '@aws-sdk/util-user-agent-browser': 3.577.0 + '@aws-sdk/util-user-agent-node': 3.577.0 + '@smithy/config-resolver': 3.0.0 + '@smithy/core': 2.0.1 + '@smithy/fetch-http-handler': 3.0.1 + '@smithy/hash-node': 3.0.0 + '@smithy/invalid-dependency': 3.0.0 + '@smithy/middleware-content-length': 3.0.0 + '@smithy/middleware-endpoint': 3.0.0 + '@smithy/middleware-retry': 3.0.1 + '@smithy/middleware-serde': 3.0.0 + '@smithy/middleware-stack': 3.0.0 + '@smithy/node-config-provider': 3.0.0 + '@smithy/node-http-handler': 3.0.0 + '@smithy/protocol-http': 4.0.0 + '@smithy/smithy-client': 3.0.1 + '@smithy/types': 3.0.0 + '@smithy/url-parser': 3.0.0 + '@smithy/util-base64': 3.0.0 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-body-length-node': 3.0.0 + '@smithy/util-defaults-mode-browser': 3.0.1 + '@smithy/util-defaults-mode-node': 3.0.1 + '@smithy/util-endpoints': 2.0.0 + '@smithy/util-middleware': 3.0.0 + '@smithy/util-retry': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/core@3.576.0': + dependencies: + '@smithy/core': 2.0.1 + '@smithy/protocol-http': 4.0.0 + '@smithy/signature-v4': 3.0.0 + '@smithy/smithy-client': 3.0.1 + '@smithy/types': 3.0.0 + fast-xml-parser: 4.2.5 + tslib: 2.6.2 + + '@aws-sdk/credential-provider-env@3.577.0': + dependencies: + '@aws-sdk/types': 3.577.0 + '@smithy/property-provider': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@aws-sdk/credential-provider-http@3.577.0': + dependencies: + '@aws-sdk/types': 3.577.0 + '@smithy/fetch-http-handler': 3.0.1 + '@smithy/node-http-handler': 3.0.0 + '@smithy/property-provider': 3.0.0 + '@smithy/protocol-http': 4.0.0 + '@smithy/smithy-client': 3.0.1 + '@smithy/types': 3.0.0 + '@smithy/util-stream': 3.0.1 + tslib: 2.6.2 + + '@aws-sdk/credential-provider-ini@3.577.0(@aws-sdk/client-sso-oidc@3.577.0(@aws-sdk/client-sts@3.577.0))(@aws-sdk/client-sts@3.577.0)': + dependencies: + '@aws-sdk/client-sts': 3.577.0 + '@aws-sdk/credential-provider-env': 3.577.0 + '@aws-sdk/credential-provider-process': 3.577.0 + '@aws-sdk/credential-provider-sso': 3.577.0(@aws-sdk/client-sso-oidc@3.577.0(@aws-sdk/client-sts@3.577.0)) + '@aws-sdk/credential-provider-web-identity': 3.577.0(@aws-sdk/client-sts@3.577.0) + '@aws-sdk/types': 3.577.0 + '@smithy/credential-provider-imds': 3.0.0 + '@smithy/property-provider': 3.0.0 + '@smithy/shared-ini-file-loader': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - aws-crt + + '@aws-sdk/credential-provider-node@3.577.0(@aws-sdk/client-sso-oidc@3.577.0(@aws-sdk/client-sts@3.577.0))(@aws-sdk/client-sts@3.577.0)': + dependencies: + '@aws-sdk/credential-provider-env': 3.577.0 + '@aws-sdk/credential-provider-http': 3.577.0 + '@aws-sdk/credential-provider-ini': 3.577.0(@aws-sdk/client-sso-oidc@3.577.0(@aws-sdk/client-sts@3.577.0))(@aws-sdk/client-sts@3.577.0) + '@aws-sdk/credential-provider-process': 3.577.0 + '@aws-sdk/credential-provider-sso': 3.577.0(@aws-sdk/client-sso-oidc@3.577.0(@aws-sdk/client-sts@3.577.0)) + '@aws-sdk/credential-provider-web-identity': 3.577.0(@aws-sdk/client-sts@3.577.0) + '@aws-sdk/types': 3.577.0 + '@smithy/credential-provider-imds': 3.0.0 + '@smithy/property-provider': 3.0.0 + '@smithy/shared-ini-file-loader': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - '@aws-sdk/client-sts' + - aws-crt + + '@aws-sdk/credential-provider-process@3.577.0': + dependencies: + '@aws-sdk/types': 3.577.0 + '@smithy/property-provider': 3.0.0 + '@smithy/shared-ini-file-loader': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@aws-sdk/credential-provider-sso@3.577.0(@aws-sdk/client-sso-oidc@3.577.0(@aws-sdk/client-sts@3.577.0))': + dependencies: + '@aws-sdk/client-sso': 3.577.0 + '@aws-sdk/token-providers': 3.577.0(@aws-sdk/client-sso-oidc@3.577.0(@aws-sdk/client-sts@3.577.0)) + '@aws-sdk/types': 3.577.0 + '@smithy/property-provider': 3.0.0 + '@smithy/shared-ini-file-loader': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - aws-crt + + '@aws-sdk/credential-provider-web-identity@3.577.0(@aws-sdk/client-sts@3.577.0)': + dependencies: + '@aws-sdk/client-sts': 3.577.0 + '@aws-sdk/types': 3.577.0 + '@smithy/property-provider': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@aws-sdk/middleware-host-header@3.577.0': + dependencies: + '@aws-sdk/types': 3.577.0 + '@smithy/protocol-http': 4.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@aws-sdk/middleware-logger@3.577.0': + dependencies: + '@aws-sdk/types': 3.577.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@aws-sdk/middleware-recursion-detection@3.577.0': + dependencies: + '@aws-sdk/types': 3.577.0 + '@smithy/protocol-http': 4.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@aws-sdk/middleware-user-agent@3.577.0': + dependencies: + '@aws-sdk/types': 3.577.0 + '@aws-sdk/util-endpoints': 3.577.0 + '@smithy/protocol-http': 4.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@aws-sdk/region-config-resolver@3.577.0': + dependencies: + '@aws-sdk/types': 3.577.0 + '@smithy/node-config-provider': 3.0.0 + '@smithy/types': 3.0.0 + '@smithy/util-config-provider': 3.0.0 + '@smithy/util-middleware': 3.0.0 + tslib: 2.6.2 + + '@aws-sdk/token-providers@3.577.0(@aws-sdk/client-sso-oidc@3.577.0(@aws-sdk/client-sts@3.577.0))': + dependencies: + '@aws-sdk/client-sso-oidc': 3.577.0(@aws-sdk/client-sts@3.577.0) + '@aws-sdk/types': 3.577.0 + '@smithy/property-provider': 3.0.0 + '@smithy/shared-ini-file-loader': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@aws-sdk/types@3.577.0': + dependencies: + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@aws-sdk/util-endpoints@3.577.0': + dependencies: + '@aws-sdk/types': 3.577.0 + '@smithy/types': 3.0.0 + '@smithy/util-endpoints': 2.0.0 + tslib: 2.6.2 + + '@aws-sdk/util-locate-window@3.568.0': + dependencies: + tslib: 2.6.2 + + '@aws-sdk/util-user-agent-browser@3.577.0': + dependencies: + '@aws-sdk/types': 3.577.0 + '@smithy/types': 3.0.0 + bowser: 2.11.0 + tslib: 2.6.2 + + '@aws-sdk/util-user-agent-node@3.577.0': + dependencies: + '@aws-sdk/types': 3.577.0 + '@smithy/node-config-provider': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@aws-sdk/util-utf8-browser@3.259.0': + dependencies: + tslib: 2.6.2 + + '@balena/dockerignore@1.0.2': {} + + '@cloudflare/kv-asset-handler@0.3.2': + dependencies: + mime: 3.0.0 + + '@cloudflare/workerd-darwin-64@1.20240512.0': + optional: true + + '@cloudflare/workerd-darwin-arm64@1.20240512.0': + optional: true + + '@cloudflare/workerd-linux-64@1.20240512.0': + optional: true + + '@cloudflare/workerd-linux-arm64@1.20240512.0': + optional: true + + '@cloudflare/workerd-windows-64@1.20240512.0': + optional: true + + '@cloudflare/workers-types@4.20240512.0': {} + + '@colors/colors@1.5.0': + optional: true + + '@cspotcode/source-map-support@0.8.1': + dependencies: + '@jridgewell/trace-mapping': 0.3.9 + + '@electric-sql/pglite@0.1.5': {} + + '@esbuild-kit/core-utils@3.3.2': + dependencies: + esbuild: 0.18.20 + source-map-support: 0.5.21 + + '@esbuild-kit/esm-loader@2.6.5': + dependencies: + '@esbuild-kit/core-utils': 3.3.2 + get-tsconfig: 4.7.5 + + '@esbuild-plugins/node-globals-polyfill@0.2.3(esbuild@0.17.19)': + dependencies: + esbuild: 0.17.19 + + '@esbuild-plugins/node-modules-polyfill@0.2.2(esbuild@0.17.19)': + dependencies: + esbuild: 0.17.19 + escape-string-regexp: 4.0.0 + rollup-plugin-node-polyfills: 0.2.1 + + '@esbuild/aix-ppc64@0.19.12': + optional: true + + '@esbuild/aix-ppc64@0.20.2': + optional: true + + '@esbuild/android-arm64@0.17.19': + optional: true + + '@esbuild/android-arm64@0.18.20': + optional: true + + '@esbuild/android-arm64@0.19.12': + optional: true + + '@esbuild/android-arm64@0.20.2': + optional: true + + '@esbuild/android-arm@0.17.19': + optional: true + + '@esbuild/android-arm@0.18.20': + optional: true + + '@esbuild/android-arm@0.19.12': + optional: true + + '@esbuild/android-arm@0.20.2': + optional: true + + '@esbuild/android-x64@0.17.19': + optional: true + + '@esbuild/android-x64@0.18.20': + optional: true + + '@esbuild/android-x64@0.19.12': + optional: true + + '@esbuild/android-x64@0.20.2': + optional: true + + '@esbuild/darwin-arm64@0.17.19': + optional: true + + '@esbuild/darwin-arm64@0.18.20': + optional: true + + '@esbuild/darwin-arm64@0.19.12': + optional: true + + '@esbuild/darwin-arm64@0.20.2': + optional: true + + '@esbuild/darwin-x64@0.17.19': + optional: true + + '@esbuild/darwin-x64@0.18.20': + optional: true + + '@esbuild/darwin-x64@0.19.12': + optional: true + + '@esbuild/darwin-x64@0.20.2': + optional: true + + '@esbuild/freebsd-arm64@0.17.19': + optional: true + + '@esbuild/freebsd-arm64@0.18.20': + optional: true + + '@esbuild/freebsd-arm64@0.19.12': + optional: true + + '@esbuild/freebsd-arm64@0.20.2': + optional: true + + '@esbuild/freebsd-x64@0.17.19': + optional: true + + '@esbuild/freebsd-x64@0.18.20': + optional: true + + '@esbuild/freebsd-x64@0.19.12': + optional: true + + '@esbuild/freebsd-x64@0.20.2': + optional: true + + '@esbuild/linux-arm64@0.17.19': + optional: true + + '@esbuild/linux-arm64@0.18.20': + optional: true + + '@esbuild/linux-arm64@0.19.12': + optional: true + + '@esbuild/linux-arm64@0.20.2': + optional: true + + '@esbuild/linux-arm@0.17.19': + optional: true + + '@esbuild/linux-arm@0.18.20': + optional: true + + '@esbuild/linux-arm@0.19.12': + optional: true + + '@esbuild/linux-arm@0.20.2': + optional: true + + '@esbuild/linux-ia32@0.17.19': + optional: true + + '@esbuild/linux-ia32@0.18.20': + optional: true + + '@esbuild/linux-ia32@0.19.12': + optional: true + + '@esbuild/linux-ia32@0.20.2': + optional: true + + '@esbuild/linux-loong64@0.14.54': + optional: true + + '@esbuild/linux-loong64@0.17.19': + optional: true + + '@esbuild/linux-loong64@0.18.20': + optional: true + + '@esbuild/linux-loong64@0.19.12': + optional: true + + '@esbuild/linux-loong64@0.20.2': + optional: true + + '@esbuild/linux-mips64el@0.17.19': + optional: true + + '@esbuild/linux-mips64el@0.18.20': + optional: true + + '@esbuild/linux-mips64el@0.19.12': + optional: true + + '@esbuild/linux-mips64el@0.20.2': + optional: true + + '@esbuild/linux-ppc64@0.17.19': + optional: true + + '@esbuild/linux-ppc64@0.18.20': + optional: true + + '@esbuild/linux-ppc64@0.19.12': + optional: true + + '@esbuild/linux-ppc64@0.20.2': + optional: true + + '@esbuild/linux-riscv64@0.17.19': + optional: true + + '@esbuild/linux-riscv64@0.18.20': + optional: true + + '@esbuild/linux-riscv64@0.19.12': + optional: true + + '@esbuild/linux-riscv64@0.20.2': + optional: true + + '@esbuild/linux-s390x@0.17.19': + optional: true + + '@esbuild/linux-s390x@0.18.20': + optional: true + + '@esbuild/linux-s390x@0.19.12': + optional: true + + '@esbuild/linux-s390x@0.20.2': + optional: true + + '@esbuild/linux-x64@0.17.19': + optional: true + + '@esbuild/linux-x64@0.18.20': + optional: true + + '@esbuild/linux-x64@0.19.12': + optional: true + + '@esbuild/linux-x64@0.20.2': + optional: true + + '@esbuild/netbsd-x64@0.17.19': + optional: true + + '@esbuild/netbsd-x64@0.18.20': + optional: true + + '@esbuild/netbsd-x64@0.19.12': + optional: true + + '@esbuild/netbsd-x64@0.20.2': + optional: true + + '@esbuild/openbsd-x64@0.17.19': + optional: true + + '@esbuild/openbsd-x64@0.18.20': + optional: true + + '@esbuild/openbsd-x64@0.19.12': + optional: true + + '@esbuild/openbsd-x64@0.20.2': + optional: true + + '@esbuild/sunos-x64@0.17.19': + optional: true + + '@esbuild/sunos-x64@0.18.20': + optional: true + + '@esbuild/sunos-x64@0.19.12': + optional: true + + '@esbuild/sunos-x64@0.20.2': + optional: true + + '@esbuild/win32-arm64@0.17.19': + optional: true + + '@esbuild/win32-arm64@0.18.20': + optional: true + + '@esbuild/win32-arm64@0.19.12': + optional: true + + '@esbuild/win32-arm64@0.20.2': + optional: true + + '@esbuild/win32-ia32@0.17.19': + optional: true + + '@esbuild/win32-ia32@0.18.20': + optional: true + + '@esbuild/win32-ia32@0.19.12': + optional: true + + '@esbuild/win32-ia32@0.20.2': + optional: true + + '@esbuild/win32-x64@0.17.19': + optional: true + + '@esbuild/win32-x64@0.18.20': + optional: true + + '@esbuild/win32-x64@0.19.12': + optional: true + + '@esbuild/win32-x64@0.20.2': + optional: true + + '@eslint-community/eslint-utils@4.4.0(eslint@8.57.0)': + dependencies: + eslint: 8.57.0 + eslint-visitor-keys: 3.4.3 + + '@eslint-community/regexpp@4.10.0': {} + + '@eslint/eslintrc@2.1.4': + dependencies: + ajv: 6.12.6 + debug: 4.3.4 + espree: 9.6.1 + globals: 13.24.0 + ignore: 5.3.1 + import-fresh: 3.3.0 + js-yaml: 4.1.0 + minimatch: 3.1.2 + strip-json-comments: 3.1.1 + transitivePeerDependencies: + - supports-color + + '@eslint/js@8.57.0': {} + + '@ewoudenberg/difflib@0.1.0': + dependencies: + heap: 0.2.7 + + '@fastify/busboy@2.1.1': {} + + '@hono/node-server@1.11.1': {} + + '@hono/zod-validator@0.2.1(hono@4.3.9)(zod@3.23.8)': + dependencies: + hono: 4.3.9 + zod: 3.23.8 + + '@humanwhocodes/config-array@0.11.14': + dependencies: + '@humanwhocodes/object-schema': 2.0.3 + debug: 4.3.4 + minimatch: 3.1.2 + transitivePeerDependencies: + - supports-color + + '@humanwhocodes/module-importer@1.0.1': {} + + '@humanwhocodes/object-schema@2.0.3': {} + + '@isaacs/cliui@8.0.2': + dependencies: + string-width: 5.1.2 + string-width-cjs: string-width@4.2.3 + strip-ansi: 7.1.0 + strip-ansi-cjs: strip-ansi@6.0.1 + wrap-ansi: 8.1.0 + wrap-ansi-cjs: wrap-ansi@7.0.0 + + '@jest/schemas@29.6.3': + dependencies: + '@sinclair/typebox': 0.27.8 + + '@jridgewell/gen-mapping@0.3.5': + dependencies: + '@jridgewell/set-array': 1.2.1 + '@jridgewell/sourcemap-codec': 1.4.15 + '@jridgewell/trace-mapping': 0.3.25 + + '@jridgewell/resolve-uri@3.1.2': {} + + '@jridgewell/set-array@1.2.1': {} + + '@jridgewell/sourcemap-codec@1.4.15': {} + + '@jridgewell/trace-mapping@0.3.25': + dependencies: + '@jridgewell/resolve-uri': 3.1.2 + '@jridgewell/sourcemap-codec': 1.4.15 + + '@jridgewell/trace-mapping@0.3.9': + dependencies: + '@jridgewell/resolve-uri': 3.1.2 + '@jridgewell/sourcemap-codec': 1.4.15 + + '@libsql/client@0.4.3(bufferutil@4.0.8)(utf-8-validate@6.0.3)': + dependencies: + '@libsql/core': 0.4.3 + '@libsql/hrana-client': 0.5.6(bufferutil@4.0.8)(utf-8-validate@6.0.3) + js-base64: 3.7.7 + optionalDependencies: + libsql: 0.2.0 + transitivePeerDependencies: + - bufferutil + - encoding + - utf-8-validate + + '@libsql/core@0.4.3': + dependencies: + js-base64: 3.7.7 + + '@libsql/darwin-arm64@0.2.0': + optional: true + + '@libsql/darwin-x64@0.2.0': + optional: true + + '@libsql/hrana-client@0.5.6(bufferutil@4.0.8)(utf-8-validate@6.0.3)': + dependencies: + '@libsql/isomorphic-fetch': 0.1.12 + '@libsql/isomorphic-ws': 0.1.5(bufferutil@4.0.8)(utf-8-validate@6.0.3) + js-base64: 3.7.7 + node-fetch: 3.3.2 + transitivePeerDependencies: + - bufferutil + - encoding + - utf-8-validate + + '@libsql/isomorphic-fetch@0.1.12': + dependencies: + '@types/node-fetch': 2.6.11 + node-fetch: 2.7.0 + transitivePeerDependencies: + - encoding + + '@libsql/isomorphic-ws@0.1.5(bufferutil@4.0.8)(utf-8-validate@6.0.3)': + dependencies: + '@types/ws': 8.5.10 + ws: 8.17.0(bufferutil@4.0.8)(utf-8-validate@6.0.3) + transitivePeerDependencies: + - bufferutil + - utf-8-validate + + '@libsql/linux-arm64-gnu@0.2.0': + optional: true + + '@libsql/linux-arm64-musl@0.2.0': + optional: true + + '@libsql/linux-x64-gnu@0.2.0': + optional: true + + '@libsql/linux-x64-musl@0.2.0': + optional: true + + '@libsql/win32-x64-msvc@0.2.0': + optional: true + + '@neon-rs/load@0.0.4': + optional: true + + '@neondatabase/serverless@0.7.2': + dependencies: + '@types/pg': 8.6.6 + + '@neondatabase/serverless@0.9.3': + dependencies: + '@types/pg': 8.11.6 + + '@nodelib/fs.scandir@2.1.5': + dependencies: + '@nodelib/fs.stat': 2.0.5 + run-parallel: 1.2.0 + + '@nodelib/fs.stat@2.0.5': {} + + '@nodelib/fs.walk@1.2.8': + dependencies: + '@nodelib/fs.scandir': 2.1.5 + fastq: 1.17.1 + + '@originjs/vite-plugin-commonjs@1.0.3': + dependencies: + esbuild: 0.14.54 + + '@pkgjs/parseargs@0.11.0': + optional: true + + '@pkgr/core@0.1.1': {} + + '@planetscale/database@1.18.0': {} + + '@rollup/rollup-android-arm-eabi@4.17.2': + optional: true + + '@rollup/rollup-android-arm64@4.17.2': + optional: true + + '@rollup/rollup-darwin-arm64@4.17.2': + optional: true + + '@rollup/rollup-darwin-x64@4.17.2': + optional: true + + '@rollup/rollup-linux-arm-gnueabihf@4.17.2': + optional: true + + '@rollup/rollup-linux-arm-musleabihf@4.17.2': + optional: true + + '@rollup/rollup-linux-arm64-gnu@4.17.2': + optional: true + + '@rollup/rollup-linux-arm64-musl@4.17.2': + optional: true + + '@rollup/rollup-linux-powerpc64le-gnu@4.17.2': + optional: true + + '@rollup/rollup-linux-riscv64-gnu@4.17.2': + optional: true + + '@rollup/rollup-linux-s390x-gnu@4.17.2': + optional: true + + '@rollup/rollup-linux-x64-gnu@4.17.2': + optional: true + + '@rollup/rollup-linux-x64-musl@4.17.2': + optional: true + + '@rollup/rollup-win32-arm64-msvc@4.17.2': + optional: true + + '@rollup/rollup-win32-ia32-msvc@4.17.2': + optional: true + + '@rollup/rollup-win32-x64-msvc@4.17.2': + optional: true + + '@sinclair/typebox@0.27.8': {} + + '@sindresorhus/is@4.6.0': {} + + '@smithy/abort-controller@3.0.0': + dependencies: + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@smithy/config-resolver@3.0.0': + dependencies: + '@smithy/node-config-provider': 3.0.0 + '@smithy/types': 3.0.0 + '@smithy/util-config-provider': 3.0.0 + '@smithy/util-middleware': 3.0.0 + tslib: 2.6.2 + + '@smithy/core@2.0.1': + dependencies: + '@smithy/middleware-endpoint': 3.0.0 + '@smithy/middleware-retry': 3.0.1 + '@smithy/middleware-serde': 3.0.0 + '@smithy/protocol-http': 4.0.0 + '@smithy/smithy-client': 3.0.1 + '@smithy/types': 3.0.0 + '@smithy/util-middleware': 3.0.0 + tslib: 2.6.2 + + '@smithy/credential-provider-imds@3.0.0': + dependencies: + '@smithy/node-config-provider': 3.0.0 + '@smithy/property-provider': 3.0.0 + '@smithy/types': 3.0.0 + '@smithy/url-parser': 3.0.0 + tslib: 2.6.2 + + '@smithy/fetch-http-handler@3.0.1': + dependencies: + '@smithy/protocol-http': 4.0.0 + '@smithy/querystring-builder': 3.0.0 + '@smithy/types': 3.0.0 + '@smithy/util-base64': 3.0.0 + tslib: 2.6.2 + + '@smithy/hash-node@3.0.0': + dependencies: + '@smithy/types': 3.0.0 + '@smithy/util-buffer-from': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 + + '@smithy/invalid-dependency@3.0.0': + dependencies: + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@smithy/is-array-buffer@3.0.0': + dependencies: + tslib: 2.6.2 + + '@smithy/middleware-content-length@3.0.0': + dependencies: + '@smithy/protocol-http': 4.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@smithy/middleware-endpoint@3.0.0': + dependencies: + '@smithy/middleware-serde': 3.0.0 + '@smithy/node-config-provider': 3.0.0 + '@smithy/shared-ini-file-loader': 3.0.0 + '@smithy/types': 3.0.0 + '@smithy/url-parser': 3.0.0 + '@smithy/util-middleware': 3.0.0 + tslib: 2.6.2 + + '@smithy/middleware-retry@3.0.1': + dependencies: + '@smithy/node-config-provider': 3.0.0 + '@smithy/protocol-http': 4.0.0 + '@smithy/service-error-classification': 3.0.0 + '@smithy/smithy-client': 3.0.1 + '@smithy/types': 3.0.0 + '@smithy/util-middleware': 3.0.0 + '@smithy/util-retry': 3.0.0 + tslib: 2.6.2 + uuid: 9.0.1 + + '@smithy/middleware-serde@3.0.0': + dependencies: + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@smithy/middleware-stack@3.0.0': + dependencies: + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@smithy/node-config-provider@3.0.0': + dependencies: + '@smithy/property-provider': 3.0.0 + '@smithy/shared-ini-file-loader': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@smithy/node-http-handler@3.0.0': + dependencies: + '@smithy/abort-controller': 3.0.0 + '@smithy/protocol-http': 4.0.0 + '@smithy/querystring-builder': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@smithy/property-provider@3.0.0': + dependencies: + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@smithy/protocol-http@4.0.0': + dependencies: + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@smithy/querystring-builder@3.0.0': + dependencies: + '@smithy/types': 3.0.0 + '@smithy/util-uri-escape': 3.0.0 + tslib: 2.6.2 + + '@smithy/querystring-parser@3.0.0': + dependencies: + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@smithy/service-error-classification@3.0.0': + dependencies: + '@smithy/types': 3.0.0 + + '@smithy/shared-ini-file-loader@3.0.0': + dependencies: + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@smithy/signature-v4@3.0.0': + dependencies: + '@smithy/is-array-buffer': 3.0.0 + '@smithy/types': 3.0.0 + '@smithy/util-hex-encoding': 3.0.0 + '@smithy/util-middleware': 3.0.0 + '@smithy/util-uri-escape': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 + + '@smithy/smithy-client@3.0.1': + dependencies: + '@smithy/middleware-endpoint': 3.0.0 + '@smithy/middleware-stack': 3.0.0 + '@smithy/protocol-http': 4.0.0 + '@smithy/types': 3.0.0 + '@smithy/util-stream': 3.0.1 + tslib: 2.6.2 + + '@smithy/types@3.0.0': + dependencies: + tslib: 2.6.2 + + '@smithy/url-parser@3.0.0': + dependencies: + '@smithy/querystring-parser': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@smithy/util-base64@3.0.0': + dependencies: + '@smithy/util-buffer-from': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 + + '@smithy/util-body-length-browser@3.0.0': + dependencies: + tslib: 2.6.2 + + '@smithy/util-body-length-node@3.0.0': + dependencies: + tslib: 2.6.2 + + '@smithy/util-buffer-from@3.0.0': + dependencies: + '@smithy/is-array-buffer': 3.0.0 + tslib: 2.6.2 + + '@smithy/util-config-provider@3.0.0': + dependencies: + tslib: 2.6.2 + + '@smithy/util-defaults-mode-browser@3.0.1': + dependencies: + '@smithy/property-provider': 3.0.0 + '@smithy/smithy-client': 3.0.1 + '@smithy/types': 3.0.0 + bowser: 2.11.0 + tslib: 2.6.2 + + '@smithy/util-defaults-mode-node@3.0.1': + dependencies: + '@smithy/config-resolver': 3.0.0 + '@smithy/credential-provider-imds': 3.0.0 + '@smithy/node-config-provider': 3.0.0 + '@smithy/property-provider': 3.0.0 + '@smithy/smithy-client': 3.0.1 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@smithy/util-endpoints@2.0.0': + dependencies: + '@smithy/node-config-provider': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@smithy/util-hex-encoding@3.0.0': + dependencies: + tslib: 2.6.2 + + '@smithy/util-middleware@3.0.0': + dependencies: + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@smithy/util-retry@3.0.0': + dependencies: + '@smithy/service-error-classification': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.6.2 + + '@smithy/util-stream@3.0.1': + dependencies: + '@smithy/fetch-http-handler': 3.0.1 + '@smithy/node-http-handler': 3.0.0 + '@smithy/types': 3.0.0 + '@smithy/util-base64': 3.0.0 + '@smithy/util-buffer-from': 3.0.0 + '@smithy/util-hex-encoding': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 + + '@smithy/util-uri-escape@3.0.0': + dependencies: + tslib: 2.6.2 + + '@smithy/util-utf8@3.0.0': + dependencies: + '@smithy/util-buffer-from': 3.0.0 + tslib: 2.6.2 + + '@types/better-sqlite3@7.6.10': + dependencies: + '@types/node': 18.19.33 + + '@types/docker-modem@3.0.6': + dependencies: + '@types/node': 18.19.33 + '@types/ssh2': 1.15.0 + + '@types/dockerode@3.3.29': + dependencies: + '@types/docker-modem': 3.0.6 + '@types/node': 18.19.33 + '@types/ssh2': 1.15.0 + + '@types/estree@1.0.5': {} + + '@types/fs-extra@11.0.4': + dependencies: + '@types/jsonfile': 6.1.4 + '@types/node': 18.19.33 + + '@types/glob@8.1.0': + dependencies: + '@types/minimatch': 5.1.2 + '@types/node': 18.19.33 + + '@types/json-diff@1.0.3': {} + + '@types/jsonfile@6.1.4': + dependencies: + '@types/node': 18.19.33 + + '@types/minimatch@5.1.2': {} + + '@types/minimist@1.2.5': {} + + '@types/node-fetch@2.6.11': + dependencies: + '@types/node': 18.19.33 + form-data: 4.0.0 + + '@types/node-forge@1.3.11': + dependencies: + '@types/node': 18.19.33 + + '@types/node@18.19.33': + dependencies: + undici-types: 5.26.5 + + '@types/pg@8.11.6': + dependencies: + '@types/node': 18.19.33 + pg-protocol: 1.6.1 + pg-types: 4.0.2 + + '@types/pg@8.6.6': + dependencies: + '@types/node': 18.19.33 + pg-protocol: 1.6.1 + pg-types: 2.2.0 + + '@types/pluralize@0.0.33': {} + + '@types/ps-tree@1.1.6': {} + + '@types/semver@7.5.8': {} + + '@types/ssh2@1.15.0': + dependencies: + '@types/node': 18.19.33 + + '@types/uuid@9.0.8': {} + + '@types/which@3.0.3': {} + + '@types/ws@8.5.10': + dependencies: + '@types/node': 18.19.33 + + '@typescript-eslint/eslint-plugin@7.10.0(@typescript-eslint/parser@7.10.0(eslint@8.57.0)(typescript@5.4.5))(eslint@8.57.0)(typescript@5.4.5)': + dependencies: + '@eslint-community/regexpp': 4.10.0 + '@typescript-eslint/parser': 7.10.0(eslint@8.57.0)(typescript@5.4.5) + '@typescript-eslint/scope-manager': 7.10.0 + '@typescript-eslint/type-utils': 7.10.0(eslint@8.57.0)(typescript@5.4.5) + '@typescript-eslint/utils': 7.10.0(eslint@8.57.0)(typescript@5.4.5) + '@typescript-eslint/visitor-keys': 7.10.0 + eslint: 8.57.0 + graphemer: 1.4.0 + ignore: 5.3.1 + natural-compare: 1.4.0 + ts-api-utils: 1.3.0(typescript@5.4.5) + optionalDependencies: + typescript: 5.4.5 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/parser@7.10.0(eslint@8.57.0)(typescript@5.4.5)': + dependencies: + '@typescript-eslint/scope-manager': 7.10.0 + '@typescript-eslint/types': 7.10.0 + '@typescript-eslint/typescript-estree': 7.10.0(typescript@5.4.5) + '@typescript-eslint/visitor-keys': 7.10.0 + debug: 4.3.4 + eslint: 8.57.0 + optionalDependencies: + typescript: 5.4.5 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/scope-manager@7.10.0': + dependencies: + '@typescript-eslint/types': 7.10.0 + '@typescript-eslint/visitor-keys': 7.10.0 + + '@typescript-eslint/type-utils@7.10.0(eslint@8.57.0)(typescript@5.4.5)': + dependencies: + '@typescript-eslint/typescript-estree': 7.10.0(typescript@5.4.5) + '@typescript-eslint/utils': 7.10.0(eslint@8.57.0)(typescript@5.4.5) + debug: 4.3.4 + eslint: 8.57.0 + ts-api-utils: 1.3.0(typescript@5.4.5) + optionalDependencies: + typescript: 5.4.5 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/types@7.10.0': {} + + '@typescript-eslint/typescript-estree@7.10.0(typescript@5.4.5)': + dependencies: + '@typescript-eslint/types': 7.10.0 + '@typescript-eslint/visitor-keys': 7.10.0 + debug: 4.3.4 + globby: 11.1.0 + is-glob: 4.0.3 + minimatch: 9.0.4 + semver: 7.6.2 + ts-api-utils: 1.3.0(typescript@5.4.5) + optionalDependencies: + typescript: 5.4.5 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/utils@7.10.0(eslint@8.57.0)(typescript@5.4.5)': + dependencies: + '@eslint-community/eslint-utils': 4.4.0(eslint@8.57.0) + '@typescript-eslint/scope-manager': 7.10.0 + '@typescript-eslint/types': 7.10.0 + '@typescript-eslint/typescript-estree': 7.10.0(typescript@5.4.5) + eslint: 8.57.0 + transitivePeerDependencies: + - supports-color + - typescript + + '@typescript-eslint/visitor-keys@7.10.0': + dependencies: + '@typescript-eslint/types': 7.10.0 + eslint-visitor-keys: 3.4.3 + + '@ungap/structured-clone@1.2.0': {} + + '@vercel/postgres@0.8.0': + dependencies: + '@neondatabase/serverless': 0.7.2 + bufferutil: 4.0.8 + utf-8-validate: 6.0.3 + ws: 8.14.2(bufferutil@4.0.8)(utf-8-validate@6.0.3) + + '@vitest/expect@1.6.0': + dependencies: + '@vitest/spy': 1.6.0 + '@vitest/utils': 1.6.0 + chai: 4.4.1 + + '@vitest/runner@1.6.0': + dependencies: + '@vitest/utils': 1.6.0 + p-limit: 5.0.0 + pathe: 1.1.2 + + '@vitest/snapshot@1.6.0': + dependencies: + magic-string: 0.30.10 + pathe: 1.1.2 + pretty-format: 29.7.0 + + '@vitest/spy@1.6.0': + dependencies: + tinyspy: 2.2.1 + + '@vitest/utils@1.6.0': + dependencies: + diff-sequences: 29.6.3 + estree-walker: 3.0.3 + loupe: 2.3.7 + pretty-format: 29.7.0 + + acorn-jsx@5.3.2(acorn@8.11.3): + dependencies: + acorn: 8.11.3 + + acorn-walk@8.3.2: {} + + acorn@8.11.3: {} + + aggregate-error@4.0.1: + dependencies: + clean-stack: 4.2.0 + indent-string: 5.0.0 + + ajv@6.12.6: + dependencies: + fast-deep-equal: 3.1.3 + fast-json-stable-stringify: 2.1.0 + json-schema-traverse: 0.4.1 + uri-js: 4.4.1 + + ansi-escapes@6.2.1: {} + + ansi-regex@5.0.1: {} + + ansi-regex@6.0.1: {} + + ansi-styles@4.3.0: + dependencies: + color-convert: 2.0.1 + + ansi-styles@5.2.0: {} + + ansi-styles@6.2.1: {} + + ansicolors@0.3.2: {} + + any-promise@1.3.0: {} + + anymatch@3.1.3: + dependencies: + normalize-path: 3.0.0 + picomatch: 2.3.1 + + argparse@1.0.10: + dependencies: + sprintf-js: 1.0.3 + + argparse@2.0.1: {} + + array-find-index@1.0.2: {} + + array-union@2.1.0: {} + + arrgv@1.0.2: {} + + arrify@3.0.0: {} + + as-table@1.0.55: + dependencies: + printable-characters: 1.0.42 + + asn1@0.2.6: + dependencies: + safer-buffer: 2.1.2 + + assertion-error@1.1.0: {} + + asynckit@0.4.0: {} + + ava@5.3.1: + dependencies: + acorn: 8.11.3 + acorn-walk: 8.3.2 + ansi-styles: 6.2.1 + arrgv: 1.0.2 + arrify: 3.0.0 + callsites: 4.1.0 + cbor: 8.1.0 + chalk: 5.3.0 + chokidar: 3.6.0 + chunkd: 2.0.1 + ci-info: 3.9.0 + ci-parallel-vars: 1.0.1 + clean-yaml-object: 0.1.0 + cli-truncate: 3.1.0 + code-excerpt: 4.0.0 + common-path-prefix: 3.0.0 + concordance: 5.0.4 + currently-unhandled: 0.4.1 + debug: 4.3.4 + emittery: 1.0.3 + figures: 5.0.0 + globby: 13.2.2 + ignore-by-default: 2.1.0 + indent-string: 5.0.0 + is-error: 2.2.2 + is-plain-object: 5.0.0 + is-promise: 4.0.0 + matcher: 5.0.0 + mem: 9.0.2 + ms: 2.1.3 + p-event: 5.0.1 + p-map: 5.5.0 + picomatch: 2.3.1 + pkg-conf: 4.0.0 + plur: 5.1.0 + pretty-ms: 8.0.0 + resolve-cwd: 3.0.0 + stack-utils: 2.0.6 + strip-ansi: 7.1.0 + supertap: 3.0.1 + temp-dir: 3.0.0 + write-file-atomic: 5.0.1 + yargs: 17.7.2 + transitivePeerDependencies: + - supports-color + + balanced-match@1.0.2: {} + + base64-js@1.5.1: {} + + bcrypt-pbkdf@1.0.2: + dependencies: + tweetnacl: 0.14.5 + + better-sqlite3@9.6.0: + dependencies: + bindings: 1.5.0 + prebuild-install: 7.1.2 + + binary-extensions@2.3.0: {} + + bindings@1.5.0: + dependencies: + file-uri-to-path: 1.0.0 + + bl@4.1.0: + dependencies: + buffer: 5.7.1 + inherits: 2.0.4 + readable-stream: 3.6.2 + + blake3-wasm@2.1.5: {} + + blueimp-md5@2.19.0: {} + + bowser@2.11.0: {} + + brace-expansion@1.1.11: + dependencies: + balanced-match: 1.0.2 + concat-map: 0.0.1 + + brace-expansion@2.0.1: + dependencies: + balanced-match: 1.0.2 + + braces@3.0.2: + dependencies: + fill-range: 7.1.1 + + buffer-from@1.1.2: {} + + buffer@5.7.1: + dependencies: + base64-js: 1.5.1 + ieee754: 1.2.1 + + bufferutil@4.0.8: + dependencies: + node-gyp-build: 4.8.1 + + buildcheck@0.0.6: + optional: true + + bundle-require@4.1.0(esbuild@0.19.12): + dependencies: + esbuild: 0.19.12 + load-tsconfig: 0.2.5 + + cac@6.7.14: {} + + callsites@3.1.0: {} + + callsites@4.1.0: {} + + camelcase@7.0.1: {} + + capnp-ts@0.7.0: + dependencies: + debug: 4.3.4 + tslib: 2.6.2 + transitivePeerDependencies: + - supports-color + + cardinal@2.1.1: + dependencies: + ansicolors: 0.3.2 + redeyed: 2.1.1 + + cbor@8.1.0: + dependencies: + nofilter: 3.1.0 + + chai@4.4.1: + dependencies: + assertion-error: 1.1.0 + check-error: 1.0.3 + deep-eql: 4.1.3 + get-func-name: 2.0.2 + loupe: 2.3.7 + pathval: 1.1.1 + type-detect: 4.0.8 + + chalk@4.1.2: + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + + chalk@5.3.0: {} + + char-regex@1.0.2: {} + + check-error@1.0.3: + dependencies: + get-func-name: 2.0.2 + + chokidar@3.6.0: + dependencies: + anymatch: 3.1.3 + braces: 3.0.2 + glob-parent: 5.1.2 + is-binary-path: 2.1.0 + is-glob: 4.0.3 + normalize-path: 3.0.0 + readdirp: 3.6.0 + optionalDependencies: + fsevents: 2.3.3 + + chownr@1.1.4: {} + + chunkd@2.0.1: {} + + ci-info@3.9.0: {} + + ci-parallel-vars@1.0.1: {} + + clean-stack@4.2.0: + dependencies: + escape-string-regexp: 5.0.0 + + clean-yaml-object@0.1.0: {} + + cli-color@2.0.4: + dependencies: + d: 1.0.2 + es5-ext: 0.10.64 + es6-iterator: 2.0.3 + memoizee: 0.4.15 + timers-ext: 0.1.7 + + cli-table3@0.6.5: + dependencies: + string-width: 4.2.3 + optionalDependencies: + '@colors/colors': 1.5.0 + + cli-truncate@3.1.0: + dependencies: + slice-ansi: 5.0.0 + string-width: 5.1.2 + + cliui@8.0.1: + dependencies: + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi: 7.0.0 + + code-excerpt@4.0.0: + dependencies: + convert-to-spaces: 2.0.1 + + color-convert@2.0.1: + dependencies: + color-name: 1.1.4 + + color-name@1.1.4: {} + + colors@1.4.0: {} + + combined-stream@1.0.8: + dependencies: + delayed-stream: 1.0.0 + + commander@10.0.1: {} + + commander@12.1.0: {} + + commander@4.1.1: {} + + commander@9.5.0: {} + + common-path-prefix@3.0.0: {} + + concat-map@0.0.1: {} + + concordance@5.0.4: + dependencies: + date-time: 3.1.0 + esutils: 2.0.3 + fast-diff: 1.3.0 + js-string-escape: 1.0.1 + lodash: 4.17.21 + md5-hex: 3.0.1 + semver: 7.6.2 + well-known-symbols: 2.0.0 + + confbox@0.1.7: {} + + convert-to-spaces@2.0.1: {} + + cookie@0.5.0: {} + + copy-anything@3.0.5: + dependencies: + is-what: 4.1.16 + + cpu-features@0.0.10: + dependencies: + buildcheck: 0.0.6 + nan: 2.19.0 + optional: true + + cross-spawn@7.0.3: + dependencies: + path-key: 3.1.1 + shebang-command: 2.0.0 + which: 2.0.2 + + currently-unhandled@0.4.1: + dependencies: + array-find-index: 1.0.2 + + d@1.0.2: + dependencies: + es5-ext: 0.10.64 + type: 2.7.2 + + data-uri-to-buffer@2.0.2: {} + + data-uri-to-buffer@4.0.1: {} + + date-time@3.1.0: + dependencies: + time-zone: 1.0.0 + + debug@4.3.4: + dependencies: + ms: 2.1.2 + + decompress-response@6.0.0: + dependencies: + mimic-response: 3.1.0 + + deep-eql@4.1.3: + dependencies: + type-detect: 4.0.8 + + deep-extend@0.6.0: {} + + deep-is@0.1.4: {} + + delayed-stream@1.0.0: {} + + denque@2.1.0: {} + + detect-libc@2.0.2: + optional: true + + detect-libc@2.0.3: {} + + diff-sequences@29.6.3: {} + + difflib@0.2.4(patch_hash=jq4t3ysdpnbunjeje4v7nrqn2q): + dependencies: + heap: 0.2.7 + + dir-glob@3.0.1: + dependencies: + path-type: 4.0.0 + + docker-modem@3.0.8: + dependencies: + debug: 4.3.4 + readable-stream: 3.6.2 + split-ca: 1.0.1 + ssh2: 1.15.0 + transitivePeerDependencies: + - supports-color + + dockerode@3.3.5: + dependencies: + '@balena/dockerignore': 1.0.2 + docker-modem: 3.0.8 + tar-fs: 2.0.1 + transitivePeerDependencies: + - supports-color + + doctrine@3.0.0: + dependencies: + esutils: 2.0.3 + + dotenv@16.4.5: {} + + dreamopt@0.8.0: + dependencies: + wordwrap: 1.0.0 + + drizzle-kit@0.21.2: + dependencies: + '@esbuild-kit/esm-loader': 2.6.5 + commander: 9.5.0 + env-paths: 3.0.0 + esbuild: 0.19.12 + esbuild-register: 3.5.0(esbuild@0.19.12) + glob: 8.1.0 + hanji: 0.0.5 + json-diff: 0.9.0 + zod: 3.23.8 + transitivePeerDependencies: + - supports-color + + drizzle-orm@0.32.0-85c8008(@aws-sdk/client-rds-data@3.577.0)(@cloudflare/workers-types@4.20240512.0)(@electric-sql/pglite@0.1.5)(@libsql/client@0.4.3(bufferutil@4.0.8)(utf-8-validate@6.0.3))(@neondatabase/serverless@0.9.3)(@planetscale/database@1.18.0)(@types/better-sqlite3@7.6.10)(@types/pg@8.11.6)(@vercel/postgres@0.8.0)(better-sqlite3@9.6.0)(mysql2@2.3.3)(pg@8.11.5)(postgres@3.4.4): + optionalDependencies: + '@aws-sdk/client-rds-data': 3.577.0 + '@cloudflare/workers-types': 4.20240512.0 + '@electric-sql/pglite': 0.1.5 + '@libsql/client': 0.4.3(bufferutil@4.0.8)(utf-8-validate@6.0.3) + '@neondatabase/serverless': 0.9.3 + '@planetscale/database': 1.18.0 + '@types/better-sqlite3': 7.6.10 + '@types/pg': 8.11.6 + '@vercel/postgres': 0.8.0 + better-sqlite3: 9.6.0 + mysql2: 2.3.3 + pg: 8.11.5 + postgres: 3.4.4 + + duplexer@0.1.2: {} + + eastasianwidth@0.2.0: {} + + emittery@1.0.3: {} + + emoji-regex@8.0.0: {} + + emoji-regex@9.2.2: {} + + emojilib@2.4.0: {} + + end-of-stream@1.4.4: + dependencies: + once: 1.4.0 + + env-paths@3.0.0: {} + + es5-ext@0.10.64: + dependencies: + es6-iterator: 2.0.3 + es6-symbol: 3.1.4 + esniff: 2.0.1 + next-tick: 1.1.0 + + es6-iterator@2.0.3: + dependencies: + d: 1.0.2 + es5-ext: 0.10.64 + es6-symbol: 3.1.4 + + es6-symbol@3.1.4: + dependencies: + d: 1.0.2 + ext: 1.7.0 + + es6-weak-map@2.0.3: + dependencies: + d: 1.0.2 + es5-ext: 0.10.64 + es6-iterator: 2.0.3 + es6-symbol: 3.1.4 + + esbuild-android-64@0.14.54: + optional: true + + esbuild-android-arm64@0.14.54: + optional: true + + esbuild-darwin-64@0.14.54: + optional: true + + esbuild-darwin-arm64@0.14.54: + optional: true + + esbuild-freebsd-64@0.14.54: + optional: true + + esbuild-freebsd-arm64@0.14.54: + optional: true + + esbuild-linux-32@0.14.54: + optional: true + + esbuild-linux-64@0.14.54: + optional: true + + esbuild-linux-arm64@0.14.54: + optional: true + + esbuild-linux-arm@0.14.54: + optional: true + + esbuild-linux-mips64le@0.14.54: + optional: true + + esbuild-linux-ppc64le@0.14.54: + optional: true + + esbuild-linux-riscv64@0.14.54: + optional: true + + esbuild-linux-s390x@0.14.54: + optional: true + + esbuild-netbsd-64@0.14.54: + optional: true + + esbuild-node-externals@1.13.1(esbuild@0.19.12): + dependencies: + esbuild: 0.19.12 + find-up: 5.0.0 + tslib: 2.6.2 + + esbuild-openbsd-64@0.14.54: + optional: true + + esbuild-register@3.5.0(esbuild@0.19.12): + dependencies: + debug: 4.3.4 + esbuild: 0.19.12 + transitivePeerDependencies: + - supports-color + + esbuild-sunos-64@0.14.54: + optional: true + + esbuild-windows-32@0.14.54: + optional: true + + esbuild-windows-64@0.14.54: + optional: true + + esbuild-windows-arm64@0.14.54: + optional: true + + esbuild@0.14.54: + optionalDependencies: + '@esbuild/linux-loong64': 0.14.54 + esbuild-android-64: 0.14.54 + esbuild-android-arm64: 0.14.54 + esbuild-darwin-64: 0.14.54 + esbuild-darwin-arm64: 0.14.54 + esbuild-freebsd-64: 0.14.54 + esbuild-freebsd-arm64: 0.14.54 + esbuild-linux-32: 0.14.54 + esbuild-linux-64: 0.14.54 + esbuild-linux-arm: 0.14.54 + esbuild-linux-arm64: 0.14.54 + esbuild-linux-mips64le: 0.14.54 + esbuild-linux-ppc64le: 0.14.54 + esbuild-linux-riscv64: 0.14.54 + esbuild-linux-s390x: 0.14.54 + esbuild-netbsd-64: 0.14.54 + esbuild-openbsd-64: 0.14.54 + esbuild-sunos-64: 0.14.54 + esbuild-windows-32: 0.14.54 + esbuild-windows-64: 0.14.54 + esbuild-windows-arm64: 0.14.54 + + esbuild@0.17.19: + optionalDependencies: + '@esbuild/android-arm': 0.17.19 + '@esbuild/android-arm64': 0.17.19 + '@esbuild/android-x64': 0.17.19 + '@esbuild/darwin-arm64': 0.17.19 + '@esbuild/darwin-x64': 0.17.19 + '@esbuild/freebsd-arm64': 0.17.19 + '@esbuild/freebsd-x64': 0.17.19 + '@esbuild/linux-arm': 0.17.19 + '@esbuild/linux-arm64': 0.17.19 + '@esbuild/linux-ia32': 0.17.19 + '@esbuild/linux-loong64': 0.17.19 + '@esbuild/linux-mips64el': 0.17.19 + '@esbuild/linux-ppc64': 0.17.19 + '@esbuild/linux-riscv64': 0.17.19 + '@esbuild/linux-s390x': 0.17.19 + '@esbuild/linux-x64': 0.17.19 + '@esbuild/netbsd-x64': 0.17.19 + '@esbuild/openbsd-x64': 0.17.19 + '@esbuild/sunos-x64': 0.17.19 + '@esbuild/win32-arm64': 0.17.19 + '@esbuild/win32-ia32': 0.17.19 + '@esbuild/win32-x64': 0.17.19 + + esbuild@0.18.20: + optionalDependencies: + '@esbuild/android-arm': 0.18.20 + '@esbuild/android-arm64': 0.18.20 + '@esbuild/android-x64': 0.18.20 + '@esbuild/darwin-arm64': 0.18.20 + '@esbuild/darwin-x64': 0.18.20 + '@esbuild/freebsd-arm64': 0.18.20 + '@esbuild/freebsd-x64': 0.18.20 + '@esbuild/linux-arm': 0.18.20 + '@esbuild/linux-arm64': 0.18.20 + '@esbuild/linux-ia32': 0.18.20 + '@esbuild/linux-loong64': 0.18.20 + '@esbuild/linux-mips64el': 0.18.20 + '@esbuild/linux-ppc64': 0.18.20 + '@esbuild/linux-riscv64': 0.18.20 + '@esbuild/linux-s390x': 0.18.20 + '@esbuild/linux-x64': 0.18.20 + '@esbuild/netbsd-x64': 0.18.20 + '@esbuild/openbsd-x64': 0.18.20 + '@esbuild/sunos-x64': 0.18.20 + '@esbuild/win32-arm64': 0.18.20 + '@esbuild/win32-ia32': 0.18.20 + '@esbuild/win32-x64': 0.18.20 + + esbuild@0.19.12: + optionalDependencies: + '@esbuild/aix-ppc64': 0.19.12 + '@esbuild/android-arm': 0.19.12 + '@esbuild/android-arm64': 0.19.12 + '@esbuild/android-x64': 0.19.12 + '@esbuild/darwin-arm64': 0.19.12 + '@esbuild/darwin-x64': 0.19.12 + '@esbuild/freebsd-arm64': 0.19.12 + '@esbuild/freebsd-x64': 0.19.12 + '@esbuild/linux-arm': 0.19.12 + '@esbuild/linux-arm64': 0.19.12 + '@esbuild/linux-ia32': 0.19.12 + '@esbuild/linux-loong64': 0.19.12 + '@esbuild/linux-mips64el': 0.19.12 + '@esbuild/linux-ppc64': 0.19.12 + '@esbuild/linux-riscv64': 0.19.12 + '@esbuild/linux-s390x': 0.19.12 + '@esbuild/linux-x64': 0.19.12 + '@esbuild/netbsd-x64': 0.19.12 + '@esbuild/openbsd-x64': 0.19.12 + '@esbuild/sunos-x64': 0.19.12 + '@esbuild/win32-arm64': 0.19.12 + '@esbuild/win32-ia32': 0.19.12 + '@esbuild/win32-x64': 0.19.12 + + esbuild@0.20.2: + optionalDependencies: + '@esbuild/aix-ppc64': 0.20.2 + '@esbuild/android-arm': 0.20.2 + '@esbuild/android-arm64': 0.20.2 + '@esbuild/android-x64': 0.20.2 + '@esbuild/darwin-arm64': 0.20.2 + '@esbuild/darwin-x64': 0.20.2 + '@esbuild/freebsd-arm64': 0.20.2 + '@esbuild/freebsd-x64': 0.20.2 + '@esbuild/linux-arm': 0.20.2 + '@esbuild/linux-arm64': 0.20.2 + '@esbuild/linux-ia32': 0.20.2 + '@esbuild/linux-loong64': 0.20.2 + '@esbuild/linux-mips64el': 0.20.2 + '@esbuild/linux-ppc64': 0.20.2 + '@esbuild/linux-riscv64': 0.20.2 + '@esbuild/linux-s390x': 0.20.2 + '@esbuild/linux-x64': 0.20.2 + '@esbuild/netbsd-x64': 0.20.2 + '@esbuild/openbsd-x64': 0.20.2 + '@esbuild/sunos-x64': 0.20.2 + '@esbuild/win32-arm64': 0.20.2 + '@esbuild/win32-ia32': 0.20.2 + '@esbuild/win32-x64': 0.20.2 + + escalade@3.1.2: {} + + escape-string-regexp@2.0.0: {} + + escape-string-regexp@4.0.0: {} + + escape-string-regexp@5.0.0: {} + + eslint-config-prettier@9.1.0(eslint@8.57.0): + dependencies: + eslint: 8.57.0 + + eslint-plugin-prettier@5.1.3(eslint-config-prettier@9.1.0(eslint@8.57.0))(eslint@8.57.0)(prettier@2.8.8): + dependencies: + eslint: 8.57.0 + prettier: 2.8.8 + prettier-linter-helpers: 1.0.0 + synckit: 0.8.8 + optionalDependencies: + eslint-config-prettier: 9.1.0(eslint@8.57.0) + + eslint-scope@7.2.2: + dependencies: + esrecurse: 4.3.0 + estraverse: 5.3.0 + + eslint-visitor-keys@3.4.3: {} + + eslint@8.57.0: + dependencies: + '@eslint-community/eslint-utils': 4.4.0(eslint@8.57.0) + '@eslint-community/regexpp': 4.10.0 + '@eslint/eslintrc': 2.1.4 + '@eslint/js': 8.57.0 + '@humanwhocodes/config-array': 0.11.14 + '@humanwhocodes/module-importer': 1.0.1 + '@nodelib/fs.walk': 1.2.8 + '@ungap/structured-clone': 1.2.0 + ajv: 6.12.6 + chalk: 4.1.2 + cross-spawn: 7.0.3 + debug: 4.3.4 + doctrine: 3.0.0 + escape-string-regexp: 4.0.0 + eslint-scope: 7.2.2 + eslint-visitor-keys: 3.4.3 + espree: 9.6.1 + esquery: 1.5.0 + esutils: 2.0.3 + fast-deep-equal: 3.1.3 + file-entry-cache: 6.0.1 + find-up: 5.0.0 + glob-parent: 6.0.2 + globals: 13.24.0 + graphemer: 1.4.0 + ignore: 5.3.1 + imurmurhash: 0.1.4 + is-glob: 4.0.3 + is-path-inside: 3.0.3 + js-yaml: 4.1.0 + json-stable-stringify-without-jsonify: 1.0.1 + levn: 0.4.1 + lodash.merge: 4.6.2 + minimatch: 3.1.2 + natural-compare: 1.4.0 + optionator: 0.9.4 + strip-ansi: 6.0.1 + text-table: 0.2.0 + transitivePeerDependencies: + - supports-color + + esniff@2.0.1: + dependencies: + d: 1.0.2 + es5-ext: 0.10.64 + event-emitter: 0.3.5 + type: 2.7.2 + + espree@9.6.1: + dependencies: + acorn: 8.11.3 + acorn-jsx: 5.3.2(acorn@8.11.3) + eslint-visitor-keys: 3.4.3 + + esprima@4.0.1: {} + + esquery@1.5.0: + dependencies: + estraverse: 5.3.0 + + esrecurse@4.3.0: + dependencies: + estraverse: 5.3.0 + + estraverse@5.3.0: {} + + estree-walker@0.6.1: {} + + estree-walker@3.0.3: + dependencies: + '@types/estree': 1.0.5 + + esutils@2.0.3: {} + + event-emitter@0.3.5: + dependencies: + d: 1.0.2 + es5-ext: 0.10.64 + + event-stream@3.3.4: + dependencies: + duplexer: 0.1.2 + from: 0.1.7 + map-stream: 0.1.0 + pause-stream: 0.0.11 + split: 0.3.3 + stream-combiner: 0.0.4 + through: 2.3.8 + + execa@5.1.1: + dependencies: + cross-spawn: 7.0.3 + get-stream: 6.0.1 + human-signals: 2.1.0 + is-stream: 2.0.1 + merge-stream: 2.0.0 + npm-run-path: 4.0.1 + onetime: 5.1.2 + signal-exit: 3.0.7 + strip-final-newline: 2.0.0 + + execa@8.0.1: + dependencies: + cross-spawn: 7.0.3 + get-stream: 8.0.1 + human-signals: 5.0.0 + is-stream: 3.0.0 + merge-stream: 2.0.0 + npm-run-path: 5.3.0 + onetime: 6.0.0 + signal-exit: 4.1.0 + strip-final-newline: 3.0.0 + + exit-hook@2.2.1: {} + + expand-template@2.0.3: {} + + ext@1.7.0: + dependencies: + type: 2.7.2 + + fast-deep-equal@3.1.3: {} + + fast-diff@1.3.0: {} + + fast-glob@3.3.2: + dependencies: + '@nodelib/fs.stat': 2.0.5 + '@nodelib/fs.walk': 1.2.8 + glob-parent: 5.1.2 + merge2: 1.4.1 + micromatch: 4.0.5 + + fast-json-stable-stringify@2.1.0: {} + + fast-levenshtein@2.0.6: {} + + fast-xml-parser@4.2.5: + dependencies: + strnum: 1.0.5 + + fastq@1.17.1: + dependencies: + reusify: 1.0.4 + + fetch-blob@3.2.0: + dependencies: + node-domexception: 1.0.0 + web-streams-polyfill: 3.3.3 + + fflate@0.8.2: {} + + figures@5.0.0: + dependencies: + escape-string-regexp: 5.0.0 + is-unicode-supported: 1.3.0 + + file-entry-cache@6.0.1: + dependencies: + flat-cache: 3.2.0 + + file-uri-to-path@1.0.0: {} + + fill-range@7.1.1: + dependencies: + to-regex-range: 5.0.1 + + find-up@5.0.0: + dependencies: + locate-path: 6.0.0 + path-exists: 4.0.0 + + find-up@6.3.0: + dependencies: + locate-path: 7.2.0 + path-exists: 5.0.0 + + flat-cache@3.2.0: + dependencies: + flatted: 3.3.1 + keyv: 4.5.4 + rimraf: 3.0.2 + + flatted@3.3.1: {} + + foreground-child@3.1.1: + dependencies: + cross-spawn: 7.0.3 + signal-exit: 4.1.0 + + form-data@4.0.0: + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + mime-types: 2.1.35 + + formdata-polyfill@4.0.10: + dependencies: + fetch-blob: 3.2.0 + + from@0.1.7: {} + + fs-constants@1.0.0: {} + + fs-extra@11.2.0: + dependencies: + graceful-fs: 4.2.11 + jsonfile: 6.1.0 + universalify: 2.0.1 + + fs.realpath@1.0.0: {} + + fsevents@2.3.3: + optional: true + + function-bind@1.1.2: {} + + fx@34.0.0: {} + + generate-function@2.3.1: + dependencies: + is-property: 1.0.2 + + get-caller-file@2.0.5: {} + + get-func-name@2.0.2: {} + + get-port@6.1.2: {} + + get-source@2.0.12: + dependencies: + data-uri-to-buffer: 2.0.2 + source-map: 0.6.1 + + get-stream@6.0.1: {} + + get-stream@8.0.1: {} + + get-tsconfig@4.7.5: + dependencies: + resolve-pkg-maps: 1.0.0 + + github-from-package@0.0.0: {} + + glob-parent@5.1.2: + dependencies: + is-glob: 4.0.3 + + glob-parent@6.0.2: + dependencies: + is-glob: 4.0.3 + + glob-to-regexp@0.4.1: {} + + glob@10.3.15: + dependencies: + foreground-child: 3.1.1 + jackspeak: 2.3.6 + minimatch: 9.0.4 + minipass: 7.1.1 + path-scurry: 1.11.1 + + glob@7.2.3: + dependencies: + fs.realpath: 1.0.0 + inflight: 1.0.6 + inherits: 2.0.4 + minimatch: 3.1.2 + once: 1.4.0 + path-is-absolute: 1.0.1 + + glob@8.1.0: + dependencies: + fs.realpath: 1.0.0 + inflight: 1.0.6 + inherits: 2.0.4 + minimatch: 5.1.6 + once: 1.4.0 + + globals@13.24.0: + dependencies: + type-fest: 0.20.2 + + globby@11.1.0: + dependencies: + array-union: 2.1.0 + dir-glob: 3.0.1 + fast-glob: 3.3.2 + ignore: 5.3.1 + merge2: 1.4.1 + slash: 3.0.0 + + globby@13.2.2: + dependencies: + dir-glob: 3.0.1 + fast-glob: 3.3.2 + ignore: 5.3.1 + merge2: 1.4.1 + slash: 4.0.0 + + globrex@0.1.2: {} + + graceful-fs@4.2.11: {} + + graphemer@1.4.0: {} + + hanji@0.0.5: + dependencies: + lodash.throttle: 4.1.1 + sisteransi: 1.0.5 + + has-flag@4.0.0: {} + + hasown@2.0.2: + dependencies: + function-bind: 1.1.2 + + heap@0.2.7: {} + + hono@4.3.9: {} + + human-signals@2.1.0: {} + + human-signals@5.0.0: {} + + iconv-lite@0.6.3: + dependencies: + safer-buffer: 2.1.2 + + ieee754@1.2.1: {} + + ignore-by-default@2.1.0: {} + + ignore@5.3.1: {} + + import-fresh@3.3.0: + dependencies: + parent-module: 1.0.1 + resolve-from: 4.0.0 + + imurmurhash@0.1.4: {} + + indent-string@5.0.0: {} + + inflight@1.0.6: + dependencies: + once: 1.4.0 + wrappy: 1.0.2 + + inherits@2.0.4: {} + + ini@1.3.8: {} + + irregular-plurals@3.5.0: {} + + is-binary-path@2.1.0: + dependencies: + binary-extensions: 2.3.0 + + is-core-module@2.13.1: + dependencies: + hasown: 2.0.2 + + is-error@2.2.2: {} + + is-extglob@2.1.1: {} + + is-fullwidth-code-point@3.0.0: {} + + is-fullwidth-code-point@4.0.0: {} + + is-glob@4.0.3: + dependencies: + is-extglob: 2.1.1 + + is-number@7.0.0: {} + + is-path-inside@3.0.3: {} + + is-plain-object@5.0.0: {} + + is-promise@2.2.2: {} + + is-promise@4.0.0: {} + + is-property@1.0.2: {} + + is-stream@2.0.1: {} + + is-stream@3.0.0: {} + + is-unicode-supported@1.3.0: {} + + is-what@4.1.16: {} + + isexe@2.0.0: {} + + jackspeak@2.3.6: + dependencies: + '@isaacs/cliui': 8.0.2 + optionalDependencies: + '@pkgjs/parseargs': 0.11.0 + + joycon@3.1.1: {} + + js-base64@3.7.7: {} + + js-string-escape@1.0.1: {} + + js-tokens@9.0.0: {} + + js-yaml@3.14.1: + dependencies: + argparse: 1.0.10 + esprima: 4.0.1 + + js-yaml@4.1.0: + dependencies: + argparse: 2.0.1 + + json-buffer@3.0.1: {} + + json-diff@0.9.0: + dependencies: + cli-color: 2.0.4 + difflib: 0.2.4(patch_hash=jq4t3ysdpnbunjeje4v7nrqn2q) + dreamopt: 0.8.0 + + json-diff@1.0.6: + dependencies: + '@ewoudenberg/difflib': 0.1.0 + colors: 1.4.0 + dreamopt: 0.8.0 + + json-schema-traverse@0.4.1: {} + + json-stable-stringify-without-jsonify@1.0.1: {} + + jsonfile@6.1.0: + dependencies: + universalify: 2.0.1 + optionalDependencies: + graceful-fs: 4.2.11 + + keyv@4.5.4: + dependencies: + json-buffer: 3.0.1 + + levn@0.4.1: + dependencies: + prelude-ls: 1.2.1 + type-check: 0.4.0 + + libsql@0.2.0: + dependencies: + '@neon-rs/load': 0.0.4 + detect-libc: 2.0.2 + optionalDependencies: + '@libsql/darwin-arm64': 0.2.0 + '@libsql/darwin-x64': 0.2.0 + '@libsql/linux-arm64-gnu': 0.2.0 + '@libsql/linux-arm64-musl': 0.2.0 + '@libsql/linux-x64-gnu': 0.2.0 + '@libsql/linux-x64-musl': 0.2.0 + '@libsql/win32-x64-msvc': 0.2.0 + optional: true + + lilconfig@3.1.1: {} + + lines-and-columns@1.2.4: {} + + load-json-file@7.0.1: {} + + load-tsconfig@0.2.5: {} + + local-pkg@0.5.0: + dependencies: + mlly: 1.7.0 + pkg-types: 1.1.1 + + locate-path@6.0.0: + dependencies: + p-locate: 5.0.0 + + locate-path@7.2.0: + dependencies: + p-locate: 6.0.0 + + lodash.merge@4.6.2: {} + + lodash.sortby@4.7.0: {} + + lodash.throttle@4.1.1: {} + + lodash@4.17.21: {} + + long@4.0.0: {} + + loupe@2.3.7: + dependencies: + get-func-name: 2.0.2 + + lru-cache@10.2.2: {} + + lru-cache@6.0.0: + dependencies: + yallist: 4.0.0 + + lru-cache@7.18.3: {} + + lru-queue@0.1.0: + dependencies: + es5-ext: 0.10.64 + + magic-string@0.25.9: + dependencies: + sourcemap-codec: 1.4.8 + + magic-string@0.30.10: + dependencies: + '@jridgewell/sourcemap-codec': 1.4.15 + + map-age-cleaner@0.1.3: + dependencies: + p-defer: 1.0.0 + + map-stream@0.1.0: {} + + marked-terminal@6.2.0(marked@9.1.6): + dependencies: + ansi-escapes: 6.2.1 + cardinal: 2.1.1 + chalk: 5.3.0 + cli-table3: 0.6.5 + marked: 9.1.6 + node-emoji: 2.1.3 + supports-hyperlinks: 3.0.0 + + marked@9.1.6: {} + + matcher@5.0.0: + dependencies: + escape-string-regexp: 5.0.0 + + md5-hex@3.0.1: + dependencies: + blueimp-md5: 2.19.0 + + mem@9.0.2: + dependencies: + map-age-cleaner: 0.1.3 + mimic-fn: 4.0.0 + + memoizee@0.4.15: + dependencies: + d: 1.0.2 + es5-ext: 0.10.64 + es6-weak-map: 2.0.3 + event-emitter: 0.3.5 + is-promise: 2.2.2 + lru-queue: 0.1.0 + next-tick: 1.1.0 + timers-ext: 0.1.7 + + merge-stream@2.0.0: {} + + merge2@1.4.1: {} + + micromatch@4.0.5: + dependencies: + braces: 3.0.2 + picomatch: 2.3.1 + + mime-db@1.52.0: {} + + mime-types@2.1.35: + dependencies: + mime-db: 1.52.0 + + mime@3.0.0: {} + + mimic-fn@2.1.0: {} + + mimic-fn@4.0.0: {} + + mimic-response@3.1.0: {} + + miniflare@3.20240512.0(bufferutil@4.0.8)(utf-8-validate@6.0.3): + dependencies: + '@cspotcode/source-map-support': 0.8.1 + acorn: 8.11.3 + acorn-walk: 8.3.2 + capnp-ts: 0.7.0 + exit-hook: 2.2.1 + glob-to-regexp: 0.4.1 + stoppable: 1.1.0 + undici: 5.28.4 + workerd: 1.20240512.0 + ws: 8.17.0(bufferutil@4.0.8)(utf-8-validate@6.0.3) + youch: 3.3.3 + zod: 3.23.8 + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate + + minimatch@3.1.2: + dependencies: + brace-expansion: 1.1.11 + + minimatch@5.1.6: + dependencies: + brace-expansion: 2.0.1 + + minimatch@7.4.6: + dependencies: + brace-expansion: 2.0.1 + + minimatch@9.0.4: + dependencies: + brace-expansion: 2.0.1 + + minimist@1.2.8: {} + + minipass@7.1.1: {} + + mkdirp-classic@0.5.3: {} + + mlly@1.7.0: + dependencies: + acorn: 8.11.3 + pathe: 1.1.2 + pkg-types: 1.1.1 + ufo: 1.5.3 + + ms@2.1.2: {} + + ms@2.1.3: {} + + mustache@4.2.0: {} + + mysql2@2.3.3: + dependencies: + denque: 2.1.0 + generate-function: 2.3.1 + iconv-lite: 0.6.3 + long: 4.0.0 + lru-cache: 6.0.0 + named-placeholders: 1.1.3 + seq-queue: 0.0.5 + sqlstring: 2.3.3 + + mz@2.7.0: + dependencies: + any-promise: 1.3.0 + object-assign: 4.1.1 + thenify-all: 1.6.0 + + named-placeholders@1.1.3: + dependencies: + lru-cache: 7.18.3 + + nan@2.19.0: + optional: true + + nanoid@3.3.7: {} + + napi-build-utils@1.0.2: {} + + natural-compare@1.4.0: {} + + next-tick@1.1.0: {} + + node-abi@3.62.0: + dependencies: + semver: 7.6.2 + + node-domexception@1.0.0: {} + + node-emoji@2.1.3: + dependencies: + '@sindresorhus/is': 4.6.0 + char-regex: 1.0.2 + emojilib: 2.4.0 + skin-tone: 2.0.0 + + node-fetch@2.7.0: + dependencies: + whatwg-url: 5.0.0 + + node-fetch@3.3.1: + dependencies: + data-uri-to-buffer: 4.0.1 + fetch-blob: 3.2.0 + formdata-polyfill: 4.0.10 + + node-fetch@3.3.2: + dependencies: + data-uri-to-buffer: 4.0.1 + fetch-blob: 3.2.0 + formdata-polyfill: 4.0.10 + + node-forge@1.3.1: {} + + node-gyp-build@4.8.1: {} + + nofilter@3.1.0: {} + + normalize-path@3.0.0: {} + + npm-run-path@4.0.1: + dependencies: + path-key: 3.1.1 + + npm-run-path@5.3.0: + dependencies: + path-key: 4.0.0 + + object-assign@4.1.1: {} + + obuf@1.1.2: {} + + once@1.4.0: + dependencies: + wrappy: 1.0.2 + + onetime@5.1.2: + dependencies: + mimic-fn: 2.1.0 + + onetime@6.0.0: + dependencies: + mimic-fn: 4.0.0 + + optionator@0.9.4: + dependencies: + deep-is: 0.1.4 + fast-levenshtein: 2.0.6 + levn: 0.4.1 + prelude-ls: 1.2.1 + type-check: 0.4.0 + word-wrap: 1.2.5 + + p-defer@1.0.0: {} + + p-event@5.0.1: + dependencies: + p-timeout: 5.1.0 + + p-limit@3.1.0: + dependencies: + yocto-queue: 0.1.0 + + p-limit@4.0.0: + dependencies: + yocto-queue: 1.0.0 + + p-limit@5.0.0: + dependencies: + yocto-queue: 1.0.0 + + p-locate@5.0.0: + dependencies: + p-limit: 3.1.0 + + p-locate@6.0.0: + dependencies: + p-limit: 4.0.0 + + p-map@5.5.0: + dependencies: + aggregate-error: 4.0.1 + + p-timeout@5.1.0: {} + + parent-module@1.0.1: + dependencies: + callsites: 3.1.0 + + parse-ms@3.0.0: {} + + path-exists@4.0.0: {} + + path-exists@5.0.0: {} + + path-is-absolute@1.0.1: {} + + path-key@3.1.1: {} + + path-key@4.0.0: {} + + path-parse@1.0.7: {} + + path-scurry@1.11.1: + dependencies: + lru-cache: 10.2.2 + minipass: 7.1.1 + + path-to-regexp@6.2.2: {} + + path-type@4.0.0: {} + + pathe@1.1.2: {} + + pathval@1.1.1: {} + + pause-stream@0.0.11: + dependencies: + through: 2.3.8 + + pg-cloudflare@1.1.1: + optional: true + + pg-connection-string@2.6.4: {} + + pg-int8@1.0.1: {} + + pg-numeric@1.0.2: {} + + pg-pool@3.6.2(pg@8.11.5): + dependencies: + pg: 8.11.5 + + pg-protocol@1.6.1: {} + + pg-types@2.2.0: + dependencies: + pg-int8: 1.0.1 + postgres-array: 2.0.0 + postgres-bytea: 1.0.0 + postgres-date: 1.0.7 + postgres-interval: 1.2.0 + + pg-types@4.0.2: + dependencies: + pg-int8: 1.0.1 + pg-numeric: 1.0.2 + postgres-array: 3.0.2 + postgres-bytea: 3.0.0 + postgres-date: 2.1.0 + postgres-interval: 3.0.0 + postgres-range: 1.1.4 + + pg@8.11.5: + dependencies: + pg-connection-string: 2.6.4 + pg-pool: 3.6.2(pg@8.11.5) + pg-protocol: 1.6.1 + pg-types: 2.2.0 + pgpass: 1.0.5 + optionalDependencies: + pg-cloudflare: 1.1.1 + + pgpass@1.0.5: + dependencies: + split2: 4.2.0 + + picocolors@1.0.1: {} + + picomatch@2.3.1: {} + + pirates@4.0.6: {} + + pkg-conf@4.0.0: + dependencies: + find-up: 6.3.0 + load-json-file: 7.0.1 + + pkg-types@1.1.1: + dependencies: + confbox: 0.1.7 + mlly: 1.7.0 + pathe: 1.1.2 + + plur@5.1.0: + dependencies: + irregular-plurals: 3.5.0 + + pluralize@8.0.0: {} + + postcss-load-config@4.0.2(postcss@8.4.38): + dependencies: + lilconfig: 3.1.1 + yaml: 2.4.2 + optionalDependencies: + postcss: 8.4.38 + + postcss@8.4.38: + dependencies: + nanoid: 3.3.7 + picocolors: 1.0.1 + source-map-js: 1.2.0 + + postgres-array@2.0.0: {} + + postgres-array@3.0.2: {} + + postgres-bytea@1.0.0: {} + + postgres-bytea@3.0.0: + dependencies: + obuf: 1.1.2 + + postgres-date@1.0.7: {} + + postgres-date@2.1.0: {} + + postgres-interval@1.2.0: + dependencies: + xtend: 4.0.2 + + postgres-interval@3.0.0: {} + + postgres-range@1.1.4: {} + + postgres@3.4.4: {} + + prebuild-install@7.1.2: + dependencies: + detect-libc: 2.0.3 + expand-template: 2.0.3 + github-from-package: 0.0.0 + minimist: 1.2.8 + mkdirp-classic: 0.5.3 + napi-build-utils: 1.0.2 + node-abi: 3.62.0 + pump: 3.0.0 + rc: 1.2.8 + simple-get: 4.0.1 + tar-fs: 2.1.1 + tunnel-agent: 0.6.0 + + prelude-ls@1.2.1: {} + + prettier-linter-helpers@1.0.0: + dependencies: + fast-diff: 1.3.0 + + prettier@2.8.8: {} + + pretty-format@29.7.0: + dependencies: + '@jest/schemas': 29.6.3 + ansi-styles: 5.2.0 + react-is: 18.3.1 + + pretty-ms@8.0.0: + dependencies: + parse-ms: 3.0.0 + + printable-characters@1.0.42: {} + + ps-tree@1.2.0: + dependencies: + event-stream: 3.3.4 + + pump@3.0.0: + dependencies: + end-of-stream: 1.4.4 + once: 1.4.0 + + punycode@2.3.1: {} + + queue-microtask@1.2.3: {} + + rc@1.2.8: + dependencies: + deep-extend: 0.6.0 + ini: 1.3.8 + minimist: 1.2.8 + strip-json-comments: 2.0.1 + + react-is@18.3.1: {} + + readable-stream@3.6.2: + dependencies: + inherits: 2.0.4 + string_decoder: 1.3.0 + util-deprecate: 1.0.2 + + readdirp@3.6.0: + dependencies: + picomatch: 2.3.1 + + redeyed@2.1.1: + dependencies: + esprima: 4.0.1 + + require-directory@2.1.1: {} + + resolve-cwd@3.0.0: + dependencies: + resolve-from: 5.0.0 + + resolve-from@4.0.0: {} + + resolve-from@5.0.0: {} + + resolve-pkg-maps@1.0.0: {} + + resolve.exports@2.0.2: {} + + resolve@1.22.8: + dependencies: + is-core-module: 2.13.1 + path-parse: 1.0.7 + supports-preserve-symlinks-flag: 1.0.0 + + reusify@1.0.4: {} + + rimraf@3.0.2: + dependencies: + glob: 7.2.3 + + rollup-plugin-inject@3.0.2: + dependencies: + estree-walker: 0.6.1 + magic-string: 0.25.9 + rollup-pluginutils: 2.8.2 + + rollup-plugin-node-polyfills@0.2.1: + dependencies: + rollup-plugin-inject: 3.0.2 + + rollup-pluginutils@2.8.2: + dependencies: + estree-walker: 0.6.1 + + rollup@4.17.2: + dependencies: + '@types/estree': 1.0.5 + optionalDependencies: + '@rollup/rollup-android-arm-eabi': 4.17.2 + '@rollup/rollup-android-arm64': 4.17.2 + '@rollup/rollup-darwin-arm64': 4.17.2 + '@rollup/rollup-darwin-x64': 4.17.2 + '@rollup/rollup-linux-arm-gnueabihf': 4.17.2 + '@rollup/rollup-linux-arm-musleabihf': 4.17.2 + '@rollup/rollup-linux-arm64-gnu': 4.17.2 + '@rollup/rollup-linux-arm64-musl': 4.17.2 + '@rollup/rollup-linux-powerpc64le-gnu': 4.17.2 + '@rollup/rollup-linux-riscv64-gnu': 4.17.2 + '@rollup/rollup-linux-s390x-gnu': 4.17.2 + '@rollup/rollup-linux-x64-gnu': 4.17.2 + '@rollup/rollup-linux-x64-musl': 4.17.2 + '@rollup/rollup-win32-arm64-msvc': 4.17.2 + '@rollup/rollup-win32-ia32-msvc': 4.17.2 + '@rollup/rollup-win32-x64-msvc': 4.17.2 + fsevents: 2.3.3 + + run-parallel@1.2.0: + dependencies: + queue-microtask: 1.2.3 + + safe-buffer@5.2.1: {} + + safer-buffer@2.1.2: {} + + selfsigned@2.4.1: + dependencies: + '@types/node-forge': 1.3.11 + node-forge: 1.3.1 + + semver@7.6.2: {} + + seq-queue@0.0.5: {} + + serialize-error@7.0.1: + dependencies: + type-fest: 0.13.1 + + shebang-command@2.0.0: + dependencies: + shebang-regex: 3.0.0 + + shebang-regex@3.0.0: {} + + siginfo@2.0.0: {} + + signal-exit@3.0.7: {} + + signal-exit@4.1.0: {} + + simple-concat@1.0.1: {} + + simple-get@4.0.1: + dependencies: + decompress-response: 6.0.0 + once: 1.4.0 + simple-concat: 1.0.1 + + sisteransi@1.0.5: {} + + skin-tone@2.0.0: + dependencies: + unicode-emoji-modifier-base: 1.0.0 + + slash@3.0.0: {} + + slash@4.0.0: {} + + slice-ansi@5.0.0: + dependencies: + ansi-styles: 6.2.1 + is-fullwidth-code-point: 4.0.0 + + source-map-js@1.2.0: {} + + source-map-support@0.5.21: + dependencies: + buffer-from: 1.1.2 + source-map: 0.6.1 + + source-map@0.6.1: {} + + source-map@0.8.0-beta.0: + dependencies: + whatwg-url: 7.1.0 + + sourcemap-codec@1.4.8: {} + + split-ca@1.0.1: {} + + split2@4.2.0: {} + + split@0.3.3: + dependencies: + through: 2.3.8 + + sprintf-js@1.0.3: {} + + sqlstring@2.3.3: {} + + ssh2@1.15.0: + dependencies: + asn1: 0.2.6 + bcrypt-pbkdf: 1.0.2 + optionalDependencies: + cpu-features: 0.0.10 + nan: 2.19.0 + + stack-utils@2.0.6: + dependencies: + escape-string-regexp: 2.0.0 + + stackback@0.0.2: {} + + stacktracey@2.1.8: + dependencies: + as-table: 1.0.55 + get-source: 2.0.12 + + std-env@3.7.0: {} + + stoppable@1.1.0: {} + + stream-combiner@0.0.4: + dependencies: + duplexer: 0.1.2 + + string-width@4.2.3: + dependencies: + emoji-regex: 8.0.0 + is-fullwidth-code-point: 3.0.0 + strip-ansi: 6.0.1 + + string-width@5.1.2: + dependencies: + eastasianwidth: 0.2.0 + emoji-regex: 9.2.2 + strip-ansi: 7.1.0 + + string_decoder@1.3.0: + dependencies: + safe-buffer: 5.2.1 + + strip-ansi@6.0.1: + dependencies: + ansi-regex: 5.0.1 + + strip-ansi@7.1.0: + dependencies: + ansi-regex: 6.0.1 + + strip-final-newline@2.0.0: {} + + strip-final-newline@3.0.0: {} + + strip-json-comments@2.0.1: {} + + strip-json-comments@3.1.1: {} + + strip-literal@2.1.0: + dependencies: + js-tokens: 9.0.0 + + strnum@1.0.5: {} + + sucrase@3.35.0: + dependencies: + '@jridgewell/gen-mapping': 0.3.5 + commander: 4.1.1 + glob: 10.3.15 + lines-and-columns: 1.2.4 + mz: 2.7.0 + pirates: 4.0.6 + ts-interface-checker: 0.1.13 + + superjson@2.2.1: + dependencies: + copy-anything: 3.0.5 + + supertap@3.0.1: + dependencies: + indent-string: 5.0.0 + js-yaml: 3.14.1 + serialize-error: 7.0.1 + strip-ansi: 7.1.0 + + supports-color@7.2.0: + dependencies: + has-flag: 4.0.0 + + supports-hyperlinks@3.0.0: + dependencies: + has-flag: 4.0.0 + supports-color: 7.2.0 + + supports-preserve-symlinks-flag@1.0.0: {} + + synckit@0.8.8: + dependencies: + '@pkgr/core': 0.1.1 + tslib: 2.6.2 + + tar-fs@2.0.1: + dependencies: + chownr: 1.1.4 + mkdirp-classic: 0.5.3 + pump: 3.0.0 + tar-stream: 2.2.0 + + tar-fs@2.1.1: + dependencies: + chownr: 1.1.4 + mkdirp-classic: 0.5.3 + pump: 3.0.0 + tar-stream: 2.2.0 + + tar-stream@2.2.0: + dependencies: + bl: 4.1.0 + end-of-stream: 1.4.4 + fs-constants: 1.0.0 + inherits: 2.0.4 + readable-stream: 3.6.2 + + temp-dir@3.0.0: {} + + text-table@0.2.0: {} + + thenify-all@1.6.0: + dependencies: + thenify: 3.3.1 + + thenify@3.3.1: + dependencies: + any-promise: 1.3.0 + + through@2.3.8: {} + + time-zone@1.0.0: {} + + timers-ext@0.1.7: + dependencies: + es5-ext: 0.10.64 + next-tick: 1.1.0 + + tinybench@2.8.0: {} + + tinypool@0.8.4: {} + + tinyspy@2.2.1: {} + + to-regex-range@5.0.1: + dependencies: + is-number: 7.0.0 + + tr46@0.0.3: {} + + tr46@1.0.1: + dependencies: + punycode: 2.3.1 + + tree-kill@1.2.2: {} + + ts-api-utils@1.3.0(typescript@5.4.5): + dependencies: + typescript: 5.4.5 + + ts-expose-internals-conditionally@1.0.0-empty.0: {} + + ts-interface-checker@0.1.13: {} + + tsconfck@3.0.3(typescript@5.4.5): + optionalDependencies: + typescript: 5.4.5 + + tslib@1.14.1: {} + + tslib@2.6.2: {} + + tsup@8.0.2(postcss@8.4.38)(typescript@5.4.5): + dependencies: + bundle-require: 4.1.0(esbuild@0.19.12) + cac: 6.7.14 + chokidar: 3.6.0 + debug: 4.3.4 + esbuild: 0.19.12 + execa: 5.1.1 + globby: 11.1.0 + joycon: 3.1.1 + postcss-load-config: 4.0.2(postcss@8.4.38) + resolve-from: 5.0.0 + rollup: 4.17.2 + source-map: 0.8.0-beta.0 + sucrase: 3.35.0 + tree-kill: 1.2.2 + optionalDependencies: + postcss: 8.4.38 + typescript: 5.4.5 + transitivePeerDependencies: + - supports-color + - ts-node + + tsx@3.14.0: + dependencies: + esbuild: 0.18.20 + get-tsconfig: 4.7.5 + source-map-support: 0.5.21 + optionalDependencies: + fsevents: 2.3.3 + + tunnel-agent@0.6.0: + dependencies: + safe-buffer: 5.2.1 + + tweetnacl@0.14.5: {} + + type-check@0.4.0: + dependencies: + prelude-ls: 1.2.1 + + type-detect@4.0.8: {} + + type-fest@0.13.1: {} + + type-fest@0.20.2: {} + + type@2.7.2: {} + + typescript@5.3.3: {} + + typescript@5.4.5: {} + + ufo@1.5.3: {} + + undici-types@5.26.5: {} + + undici@5.28.4: + dependencies: + '@fastify/busboy': 2.1.1 + + unicode-emoji-modifier-base@1.0.0: {} + + universalify@2.0.1: {} + + uri-js@4.4.1: + dependencies: + punycode: 2.3.1 + + utf-8-validate@6.0.3: + dependencies: + node-gyp-build: 4.8.1 + + util-deprecate@1.0.2: {} + + uuid@9.0.1: {} + + validate-npm-package-name@5.0.1: {} + + vite-node@1.6.0(@types/node@18.19.33): + dependencies: + cac: 6.7.14 + debug: 4.3.4 + pathe: 1.1.2 + picocolors: 1.0.1 + vite: 5.2.11(@types/node@18.19.33) + transitivePeerDependencies: + - '@types/node' + - less + - lightningcss + - sass + - stylus + - sugarss + - supports-color + - terser + + vite-tsconfig-paths@4.3.2(typescript@5.4.5)(vite@5.2.11(@types/node@18.19.33)): + dependencies: + debug: 4.3.4 + globrex: 0.1.2 + tsconfck: 3.0.3(typescript@5.4.5) + optionalDependencies: + vite: 5.2.11(@types/node@18.19.33) + transitivePeerDependencies: + - supports-color + - typescript + + vite@5.2.11(@types/node@18.19.33): + dependencies: + esbuild: 0.20.2 + postcss: 8.4.38 + rollup: 4.17.2 + optionalDependencies: + '@types/node': 18.19.33 + fsevents: 2.3.3 + + vitest@1.6.0(@types/node@18.19.33): + dependencies: + '@vitest/expect': 1.6.0 + '@vitest/runner': 1.6.0 + '@vitest/snapshot': 1.6.0 + '@vitest/spy': 1.6.0 + '@vitest/utils': 1.6.0 + acorn-walk: 8.3.2 + chai: 4.4.1 + debug: 4.3.4 + execa: 8.0.1 + local-pkg: 0.5.0 + magic-string: 0.30.10 + pathe: 1.1.2 + picocolors: 1.0.1 + std-env: 3.7.0 + strip-literal: 2.1.0 + tinybench: 2.8.0 + tinypool: 0.8.4 + vite: 5.2.11(@types/node@18.19.33) + vite-node: 1.6.0(@types/node@18.19.33) + why-is-node-running: 2.2.2 + optionalDependencies: + '@types/node': 18.19.33 + transitivePeerDependencies: + - less + - lightningcss + - sass + - stylus + - sugarss + - supports-color + - terser + + web-streams-polyfill@3.3.3: {} + + webidl-conversions@3.0.1: {} + + webidl-conversions@4.0.2: {} + + webpod@0.0.2: {} + + well-known-symbols@2.0.0: {} + + whatwg-url@5.0.0: + dependencies: + tr46: 0.0.3 + webidl-conversions: 3.0.1 + + whatwg-url@7.1.0: + dependencies: + lodash.sortby: 4.7.0 + tr46: 1.0.1 + webidl-conversions: 4.0.2 + + which@2.0.2: + dependencies: + isexe: 2.0.0 + + which@3.0.1: + dependencies: + isexe: 2.0.0 + + why-is-node-running@2.2.2: + dependencies: + siginfo: 2.0.0 + stackback: 0.0.2 + + word-wrap@1.2.5: {} + + wordwrap@1.0.0: {} + + workerd@1.20240512.0: + optionalDependencies: + '@cloudflare/workerd-darwin-64': 1.20240512.0 + '@cloudflare/workerd-darwin-arm64': 1.20240512.0 + '@cloudflare/workerd-linux-64': 1.20240512.0 + '@cloudflare/workerd-linux-arm64': 1.20240512.0 + '@cloudflare/workerd-windows-64': 1.20240512.0 + + wrangler@3.57.0(@cloudflare/workers-types@4.20240512.0)(bufferutil@4.0.8)(utf-8-validate@6.0.3): + dependencies: + '@cloudflare/kv-asset-handler': 0.3.2 + '@esbuild-plugins/node-globals-polyfill': 0.2.3(esbuild@0.17.19) + '@esbuild-plugins/node-modules-polyfill': 0.2.2(esbuild@0.17.19) + blake3-wasm: 2.1.5 + chokidar: 3.6.0 + esbuild: 0.17.19 + miniflare: 3.20240512.0(bufferutil@4.0.8)(utf-8-validate@6.0.3) + nanoid: 3.3.7 + path-to-regexp: 6.2.2 + resolve: 1.22.8 + resolve.exports: 2.0.2 + selfsigned: 2.4.1 + source-map: 0.6.1 + xxhash-wasm: 1.0.2 + optionalDependencies: + '@cloudflare/workers-types': 4.20240512.0 + fsevents: 2.3.3 + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate + + wrap-ansi@7.0.0: + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + + wrap-ansi@8.1.0: + dependencies: + ansi-styles: 6.2.1 + string-width: 5.1.2 + strip-ansi: 7.1.0 + + wrappy@1.0.2: {} + + write-file-atomic@5.0.1: + dependencies: + imurmurhash: 0.1.4 + signal-exit: 4.1.0 + + ws@8.14.2(bufferutil@4.0.8)(utf-8-validate@6.0.3): + optionalDependencies: + bufferutil: 4.0.8 + utf-8-validate: 6.0.3 + + ws@8.17.0(bufferutil@4.0.8)(utf-8-validate@6.0.3): + optionalDependencies: + bufferutil: 4.0.8 + utf-8-validate: 6.0.3 + + xtend@4.0.2: {} + + xxhash-wasm@1.0.2: {} + + y18n@5.0.8: {} + + yallist@4.0.0: {} + + yaml@2.4.2: {} + + yargs-parser@21.1.1: {} + + yargs@17.7.2: + dependencies: + cliui: 8.0.1 + escalade: 3.1.2 + get-caller-file: 2.0.5 + require-directory: 2.1.1 + string-width: 4.2.3 + y18n: 5.0.8 + yargs-parser: 21.1.1 + + yocto-queue@0.1.0: {} + + yocto-queue@1.0.0: {} + + youch@3.3.3: + dependencies: + cookie: 0.5.0 + mustache: 4.2.0 + stacktracey: 2.1.8 + + zod@3.23.8: {} + + zx@7.2.3: + dependencies: + '@types/fs-extra': 11.0.4 + '@types/minimist': 1.2.5 + '@types/node': 18.19.33 + '@types/ps-tree': 1.1.6 + '@types/which': 3.0.3 + chalk: 5.3.0 + fs-extra: 11.2.0 + fx: 34.0.0 + globby: 13.2.2 + minimist: 1.2.8 + node-fetch: 3.3.1 + ps-tree: 1.2.0 + webpod: 0.0.2 + which: 3.0.1 + yaml: 2.4.2 diff --git a/drizzle-kit/schema.ts b/drizzle-kit/schema.ts new file mode 100644 index 000000000..e69de29bb diff --git a/drizzle-kit/src/@types/utils.ts b/drizzle-kit/src/@types/utils.ts new file mode 100644 index 000000000..3f14151a4 --- /dev/null +++ b/drizzle-kit/src/@types/utils.ts @@ -0,0 +1,51 @@ +declare global { + interface String { + trimChar(char: string): string; + squashSpaces(): string; + capitalise(): string; + camelCase(): string; + concatIf(it: string, condition: boolean): string; + } + + interface Array { + random(): T; + } +} +import camelcase from 'camelcase'; + +String.prototype.trimChar = function(char: string) { + let start = 0; + let end = this.length; + + while (start < end && this[start] === char) ++start; + while (end > start && this[end - 1] === char) --end; + + // this.toString() due to ava deep equal issue with String { "value" } + return start > 0 || end < this.length + ? this.substring(start, end) + : this.toString(); +}; + +String.prototype.squashSpaces = function() { + return this.replace(/ +/g, ' ').trim(); +}; + +String.prototype.camelCase = function() { + return camelcase(String(this)); +}; + +String.prototype.capitalise = function() { + return this && this.length > 0 + ? `${this[0].toUpperCase()}${this.slice(1)}` + : String(this); +}; + +String.prototype.concatIf = function(it: string, condition: boolean) { + return condition ? `${this}${it}` : String(this); +}; + +Array.prototype.random = function() { + return this[~~(Math.random() * this.length)]; +}; + +export {}; diff --git a/drizzle-kit/src/api.ts b/drizzle-kit/src/api.ts new file mode 100644 index 000000000..3922da088 --- /dev/null +++ b/drizzle-kit/src/api.ts @@ -0,0 +1,460 @@ +import { randomUUID } from 'crypto'; +import { LibSQLDatabase } from 'drizzle-orm/libsql'; +import type { MySql2Database } from 'drizzle-orm/mysql2'; +import { PgDatabase } from 'drizzle-orm/pg-core'; +import { SingleStoreDriverDatabase } from 'drizzle-orm/singlestore'; +import { + columnsResolver, + enumsResolver, + schemasResolver, + sequencesResolver, + tablesResolver, +} from './cli/commands/migrate'; +import { pgPushIntrospect } from './cli/commands/pgIntrospect'; +import { pgSuggestions } from './cli/commands/pgPushUtils'; +import { updateUpToV6 as upPgV6, updateUpToV7 as upPgV7 } from './cli/commands/pgUp'; +import { sqlitePushIntrospect } from './cli/commands/sqliteIntrospect'; +import { logSuggestionsAndReturn } from './cli/commands/sqlitePushUtils'; +import { originUUID } from './global'; +import { fillPgSnapshot } from './migrationPreparator'; +import { MySqlSchema as MySQLSchemaKit, mysqlSchema, squashMysqlScheme } from './serializer/mysqlSchema'; +import { generateMySqlSnapshot } from './serializer/mysqlSerializer'; +import { prepareFromExports } from './serializer/pgImports'; +import { PgSchema as PgSchemaKit, pgSchema, squashPgScheme } from './serializer/pgSchema'; +import { generatePgSnapshot } from './serializer/pgSerializer'; +import { + SingleStoreSchema as SingleStoreSchemaKit, + singlestoreSchema, + squashSingleStoreScheme, +} from './serializer/singlestoreSchema'; +import { generateSingleStoreSnapshot } from './serializer/singlestoreSerializer'; +import { SQLiteSchema as SQLiteSchemaKit, sqliteSchema, squashSqliteScheme } from './serializer/sqliteSchema'; +import { generateSqliteSnapshot } from './serializer/sqliteSerializer'; +import type { DB, SQLiteDB } from './utils'; +export type DrizzleSnapshotJSON = PgSchemaKit; +export type DrizzleSQLiteSnapshotJSON = SQLiteSchemaKit; +export type DrizzleMySQLSnapshotJSON = MySQLSchemaKit; +export type DrizzleSingleStoreSnapshotJSON = SingleStoreSchemaKit; + +export const generateDrizzleJson = ( + imports: Record, + prevId?: string, + schemaFilters?: string[], +): PgSchemaKit => { + const prepared = prepareFromExports(imports); + + const id = randomUUID(); + + const snapshot = generatePgSnapshot( + prepared.tables, + prepared.enums, + prepared.schemas, + prepared.sequences, + schemaFilters, + ); + + return fillPgSnapshot({ + serialized: snapshot, + id, + idPrev: prevId ?? originUUID, + }); +}; + +export const generateMigration = async ( + prev: DrizzleSnapshotJSON, + cur: DrizzleSnapshotJSON, +) => { + const { applyPgSnapshotsDiff } = await import('./snapshotsDiffer'); + + const validatedPrev = pgSchema.parse(prev); + const validatedCur = pgSchema.parse(cur); + + const squashedPrev = squashPgScheme(validatedPrev); + const squashedCur = squashPgScheme(validatedCur); + + const { sqlStatements, _meta } = await applyPgSnapshotsDiff( + squashedPrev, + squashedCur, + schemasResolver, + enumsResolver, + sequencesResolver, + tablesResolver, + columnsResolver, + validatedPrev, + validatedCur, + ); + + return sqlStatements; +}; + +export const pushSchema = async ( + imports: Record, + drizzleInstance: PgDatabase, + schemaFilters?: string[], +) => { + const { applyPgSnapshotsDiff } = await import('./snapshotsDiffer'); + const { sql } = await import('drizzle-orm'); + + const db: DB = { + query: async (query: string, params?: any[]) => { + const res = await drizzleInstance.execute(sql.raw(query)); + return res.rows; + }, + }; + + const cur = generateDrizzleJson(imports); + const { schema: prev } = await pgPushIntrospect( + db, + [], + schemaFilters ?? ['public'], + ); + + const validatedPrev = pgSchema.parse(prev); + const validatedCur = pgSchema.parse(cur); + + const squashedPrev = squashPgScheme(validatedPrev, 'push'); + const squashedCur = squashPgScheme(validatedCur, 'push'); + + const { statements } = await applyPgSnapshotsDiff( + squashedPrev, + squashedCur, + schemasResolver, + enumsResolver, + sequencesResolver, + tablesResolver, + columnsResolver, + validatedPrev, + validatedCur, + 'push', + ); + + const { shouldAskForApprove, statementsToExecute, infoToPrint } = await pgSuggestions(db, statements); + + return { + hasDataLoss: shouldAskForApprove, + warnings: infoToPrint, + statementsToExecute, + apply: async () => { + for (const dStmnt of statementsToExecute) { + await db.query(dStmnt); + } + }, + }; +}; + +// SQLite + +export const generateSQLiteDrizzleJson = async ( + imports: Record, + prevId?: string, +): Promise => { + const { prepareFromExports } = await import('./serializer/sqliteImports'); + + const prepared = prepareFromExports(imports); + + const id = randomUUID(); + + const snapshot = generateSqliteSnapshot(prepared.tables); + + return { + ...snapshot, + id, + prevId: prevId ?? originUUID, + }; +}; + +export const generateSQLiteMigration = async ( + prev: DrizzleSQLiteSnapshotJSON, + cur: DrizzleSQLiteSnapshotJSON, +) => { + const { applySqliteSnapshotsDiff } = await import('./snapshotsDiffer'); + + const validatedPrev = sqliteSchema.parse(prev); + const validatedCur = sqliteSchema.parse(cur); + + const squashedPrev = squashSqliteScheme(validatedPrev); + const squashedCur = squashSqliteScheme(validatedCur); + + const { sqlStatements } = await applySqliteSnapshotsDiff( + squashedPrev, + squashedCur, + tablesResolver, + columnsResolver, + validatedPrev, + validatedCur, + ); + + return sqlStatements; +}; + +export const pushSQLiteSchema = async ( + imports: Record, + drizzleInstance: LibSQLDatabase, +) => { + const { applySqliteSnapshotsDiff } = await import('./snapshotsDiffer'); + const { sql } = await import('drizzle-orm'); + + const db: SQLiteDB = { + query: async (query: string, params?: any[]) => { + const res = drizzleInstance.all(sql.raw(query)); + return res; + }, + run: async (query: string) => { + return Promise.resolve(drizzleInstance.run(sql.raw(query))).then( + () => {}, + ); + }, + }; + + const cur = await generateSQLiteDrizzleJson(imports); + const { schema: prev } = await sqlitePushIntrospect(db, []); + + const validatedPrev = sqliteSchema.parse(prev); + const validatedCur = sqliteSchema.parse(cur); + + const squashedPrev = squashSqliteScheme(validatedPrev, 'push'); + const squashedCur = squashSqliteScheme(validatedCur, 'push'); + + const { statements, _meta } = await applySqliteSnapshotsDiff( + squashedPrev, + squashedCur, + tablesResolver, + columnsResolver, + validatedPrev, + validatedCur, + 'push', + ); + + const { shouldAskForApprove, statementsToExecute, infoToPrint } = await logSuggestionsAndReturn( + db, + statements, + squashedPrev, + squashedCur, + _meta!, + ); + + return { + hasDataLoss: shouldAskForApprove, + warnings: infoToPrint, + statementsToExecute, + apply: async () => { + for (const dStmnt of statementsToExecute) { + await db.query(dStmnt); + } + }, + }; +}; + +// MySQL + +export const generateMySQLDrizzleJson = async ( + imports: Record, + prevId?: string, +): Promise => { + const { prepareFromExports } = await import('./serializer/mysqlImports'); + + const prepared = prepareFromExports(imports); + + const id = randomUUID(); + + const snapshot = generateMySqlSnapshot(prepared.tables); + + return { + ...snapshot, + id, + prevId: prevId ?? originUUID, + }; +}; + +export const generateMySQLMigration = async ( + prev: DrizzleMySQLSnapshotJSON, + cur: DrizzleMySQLSnapshotJSON, +) => { + const { applyMysqlSnapshotsDiff } = await import('./snapshotsDiffer'); + + const validatedPrev = mysqlSchema.parse(prev); + const validatedCur = mysqlSchema.parse(cur); + + const squashedPrev = squashMysqlScheme(validatedPrev); + const squashedCur = squashMysqlScheme(validatedCur); + + const { sqlStatements } = await applyMysqlSnapshotsDiff( + squashedPrev, + squashedCur, + tablesResolver, + columnsResolver, + validatedPrev, + validatedCur, + ); + + return sqlStatements; +}; + +export const pushMySQLSchema = async ( + imports: Record, + drizzleInstance: MySql2Database, + databaseName: string, +) => { + const { applyMysqlSnapshotsDiff } = await import('./snapshotsDiffer'); + const { logSuggestionsAndReturn } = await import( + './cli/commands/mysqlPushUtils' + ); + const { mysqlPushIntrospect } = await import( + './cli/commands/mysqlIntrospect' + ); + const { sql } = await import('drizzle-orm'); + + const db: DB = { + query: async (query: string, params?: any[]) => { + const res = await drizzleInstance.execute(sql.raw(query)); + return res[0] as unknown as any[]; + }, + }; + const cur = await generateMySQLDrizzleJson(imports); + const { schema: prev } = await mysqlPushIntrospect(db, databaseName, []); + + const validatedPrev = mysqlSchema.parse(prev); + const validatedCur = mysqlSchema.parse(cur); + + const squashedPrev = squashMysqlScheme(validatedPrev); + const squashedCur = squashMysqlScheme(validatedCur); + + const { statements } = await applyMysqlSnapshotsDiff( + squashedPrev, + squashedCur, + tablesResolver, + columnsResolver, + validatedPrev, + validatedCur, + 'push', + ); + + const { shouldAskForApprove, statementsToExecute, infoToPrint } = await logSuggestionsAndReturn( + db, + statements, + validatedCur, + ); + + return { + hasDataLoss: shouldAskForApprove, + warnings: infoToPrint, + statementsToExecute, + apply: async () => { + for (const dStmnt of statementsToExecute) { + await db.query(dStmnt); + } + }, + }; +}; + +// SingleStore + +export const generateSingleStoreDrizzleJson = async ( + imports: Record, + prevId?: string, +): Promise => { + const { prepareFromExports } = await import('./serializer/singlestoreImports'); + + const prepared = prepareFromExports(imports); + + const id = randomUUID(); + + const snapshot = generateSingleStoreSnapshot(prepared.tables); + + return { + ...snapshot, + id, + prevId: prevId ?? originUUID, + }; +}; + +export const generateSingleStoreMigration = async ( + prev: DrizzleSingleStoreSnapshotJSON, + cur: DrizzleSingleStoreSnapshotJSON, +) => { + const { applySingleStoreSnapshotsDiff } = await import('./snapshotsDiffer'); + + const validatedPrev = singlestoreSchema.parse(prev); + const validatedCur = singlestoreSchema.parse(cur); + + const squashedPrev = squashSingleStoreScheme(validatedPrev); + const squashedCur = squashSingleStoreScheme(validatedCur); + + const { sqlStatements } = await applySingleStoreSnapshotsDiff( + squashedPrev, + squashedCur, + tablesResolver, + columnsResolver, + validatedPrev, + validatedCur, + ); + + return sqlStatements; +}; + +export const pushSingleStoreSchema = async ( + imports: Record, + drizzleInstance: SingleStoreDriverDatabase, + databaseName: string, +) => { + const { applySingleStoreSnapshotsDiff } = await import('./snapshotsDiffer'); + const { logSuggestionsAndReturn } = await import( + './cli/commands/singlestorePushUtils' + ); + const { singlestorePushIntrospect } = await import( + './cli/commands/singlestoreIntrospect' + ); + const { sql } = await import('drizzle-orm'); + + const db: DB = { + query: async (query: string, params?: any[]) => { + const res = await drizzleInstance.execute(sql.raw(query)); + return res[0] as unknown as any[]; + }, + }; + const cur = await generateSingleStoreDrizzleJson(imports); + const { schema: prev } = await singlestorePushIntrospect(db, databaseName, []); + + const validatedPrev = singlestoreSchema.parse(prev); + const validatedCur = singlestoreSchema.parse(cur); + + const squashedPrev = squashSingleStoreScheme(validatedPrev); + const squashedCur = squashSingleStoreScheme(validatedCur); + + const { statements } = await applySingleStoreSnapshotsDiff( + squashedPrev, + squashedCur, + tablesResolver, + columnsResolver, + validatedPrev, + validatedCur, + 'push', + ); + + const { shouldAskForApprove, statementsToExecute, infoToPrint } = await logSuggestionsAndReturn( + db, + statements, + validatedCur, + ); + + return { + hasDataLoss: shouldAskForApprove, + warnings: infoToPrint, + statementsToExecute, + apply: async () => { + for (const dStmnt of statementsToExecute) { + await db.query(dStmnt); + } + }, + }; +}; + +export const upPgSnapshot = (snapshot: Record) => { + if (snapshot.version === '5') { + return upPgV7(upPgV6(snapshot)); + } + if (snapshot.version === '6') { + return upPgV7(snapshot); + } + return snapshot; +}; diff --git a/drizzle-kit/src/cli/commands/_es5.ts b/drizzle-kit/src/cli/commands/_es5.ts new file mode 100644 index 000000000..51838ffe5 --- /dev/null +++ b/drizzle-kit/src/cli/commands/_es5.ts @@ -0,0 +1,2 @@ +const _ = ''; +export default _; diff --git a/drizzle-kit/src/cli/commands/check.ts b/drizzle-kit/src/cli/commands/check.ts new file mode 100644 index 000000000..092057372 --- /dev/null +++ b/drizzle-kit/src/cli/commands/check.ts @@ -0,0 +1,52 @@ +import { Dialect } from '../../schemaValidator'; +import { prepareOutFolder, validateWithReport } from '../../utils'; + +export const checkHandler = (out: string, dialect: Dialect) => { + const { snapshots } = prepareOutFolder(out, dialect); + const report = validateWithReport(snapshots, dialect); + + if (report.nonLatest.length > 0) { + console.log( + report.nonLatest + .map((it) => { + return `${it} is not of the latest version, please run "drizzle-kit up"`; + }) + .join('\n'), + ); + process.exit(1); + } + + if (report.malformed.length) { + const message = report.malformed + .map((it) => { + return `${it} data is malformed`; + }) + .join('\n'); + console.log(message); + } + + const collisionEntries = Object.entries(report.idsMap).filter( + (it) => it[1].snapshots.length > 1, + ); + + const message = collisionEntries + .map((it) => { + const data = it[1]; + return `[${ + data.snapshots.join( + ', ', + ) + }] are pointing to a parent snapshot: ${data.parent}/snapshot.json which is a collision.`; + }) + .join('\n'); + + if (message) { + console.log(message); + } + + const abort = report.malformed.length!! || collisionEntries.length > 0; + + if (abort) { + process.exit(1); + } +}; diff --git a/drizzle-kit/src/cli/commands/drop.ts b/drizzle-kit/src/cli/commands/drop.ts new file mode 100644 index 000000000..183e9459d --- /dev/null +++ b/drizzle-kit/src/cli/commands/drop.ts @@ -0,0 +1,60 @@ +import chalk from 'chalk'; +import { readFileSync, rmSync, writeFileSync } from 'fs'; +import fs from 'fs'; +import { render } from 'hanji'; +import { join } from 'path'; +import { Journal } from '../../utils'; +import { DropMigrationView } from '../views'; +import { embeddedMigrations } from './migrate'; + +export const dropMigration = async ({ + out, + bundle, +}: { + out: string; + bundle: boolean; +}) => { + const metaFilePath = join(out, 'meta', '_journal.json'); + const journal = JSON.parse(readFileSync(metaFilePath, 'utf-8')) as Journal; + + if (journal.entries.length === 0) { + console.log( + `[${chalk.blue('i')}] no migration entries found in ${metaFilePath}`, + ); + return; + } + + const result = await render(new DropMigrationView(journal.entries)); + if (result.status === 'aborted') return; + + delete journal.entries[journal.entries.indexOf(result.data!)]; + + const resultJournal: Journal = { + ...journal, + entries: journal.entries.filter(Boolean), + }; + const sqlFilePath = join(out, `${result.data.tag}.sql`); + const snapshotFilePath = join( + out, + 'meta', + `${result.data.tag.split('_')[0]}_snapshot.json`, + ); + rmSync(sqlFilePath); + rmSync(snapshotFilePath); + writeFileSync(metaFilePath, JSON.stringify(resultJournal, null, 2)); + + if (bundle) { + fs.writeFileSync( + join(out, `migrations.js`), + embeddedMigrations(resultJournal), + ); + } + + console.log( + `[${chalk.green('✓')}] ${ + chalk.bold( + result.data.tag, + ) + } migration successfully dropped`, + ); +}; diff --git a/drizzle-kit/src/cli/commands/introspect.ts b/drizzle-kit/src/cli/commands/introspect.ts new file mode 100644 index 000000000..3558bf83c --- /dev/null +++ b/drizzle-kit/src/cli/commands/introspect.ts @@ -0,0 +1,633 @@ +import chalk from 'chalk'; +import { writeFileSync } from 'fs'; +import { render, renderWithTask } from 'hanji'; +import { Minimatch } from 'minimatch'; +import { join } from 'path'; +import { plural, singular } from 'pluralize'; +import { drySingleStore, SingleStoreSchema, squashSingleStoreScheme } from 'src/serializer/singlestoreSchema'; +import { assertUnreachable, originUUID } from '../../global'; +import { schemaToTypeScript as mysqlSchemaToTypeScript } from '../../introspect-mysql'; +import { paramNameFor, schemaToTypeScript as postgresSchemaToTypeScript } from '../../introspect-pg'; +import { schemaToTypeScript as singlestoreSchemaToTypeScript } from '../../introspect-singlestore'; +import { schemaToTypeScript as sqliteSchemaToTypeScript } from '../../introspect-sqlite'; +import { dryMySql, MySqlSchema, squashMysqlScheme } from '../../serializer/mysqlSchema'; +import { fromDatabase as fromMysqlDatabase } from '../../serializer/mysqlSerializer'; +import { dryPg, type PgSchema, squashPgScheme } from '../../serializer/pgSchema'; +import { fromDatabase as fromPostgresDatabase } from '../../serializer/pgSerializer'; +import { fromDatabase as fromSingleStoreDatabase } from '../../serializer/singlestoreSerializer'; +import { drySQLite, type SQLiteSchema, squashSqliteScheme } from '../../serializer/sqliteSchema'; +import { fromDatabase as fromSqliteDatabase } from '../../serializer/sqliteSerializer'; +import { + applyMysqlSnapshotsDiff, + applyPgSnapshotsDiff, + applySingleStoreSnapshotsDiff, + applySqliteSnapshotsDiff, +} from '../../snapshotsDiffer'; +import { prepareOutFolder } from '../../utils'; +import type { Casing, Prefix } from '../validations/common'; +import type { MysqlCredentials } from '../validations/mysql'; +import type { PostgresCredentials } from '../validations/postgres'; +import { SingleStoreCredentials } from '../validations/singlestore'; +import type { SqliteCredentials } from '../validations/sqlite'; +import { IntrospectProgress } from '../views'; +import { + columnsResolver, + enumsResolver, + schemasResolver, + sequencesResolver, + tablesResolver, + writeResult, +} from './migrate'; + +export const introspectPostgres = async ( + casing: Casing, + out: string, + breakpoints: boolean, + credentials: PostgresCredentials, + tablesFilter: string[], + schemasFilter: string[], + prefix: Prefix, +) => { + const { preparePostgresDB } = await import('../connections'); + const db = await preparePostgresDB(credentials); + + const matchers = tablesFilter.map((it) => { + return new Minimatch(it); + }); + + const filter = (tableName: string) => { + if (matchers.length === 0) return true; + + let flags: boolean[] = []; + + for (let matcher of matchers) { + if (matcher.negate) { + if (!matcher.match(tableName)) { + flags.push(false); + } + } + + if (matcher.match(tableName)) { + flags.push(true); + } + } + + if (flags.length > 0) { + return flags.every(Boolean); + } + return false; + }; + + const progress = new IntrospectProgress(true); + const res = await renderWithTask( + progress, + fromPostgresDatabase(db, filter, schemasFilter, (stage, count, status) => { + progress.update(stage, count, status); + }), + ); + + const schema = { id: originUUID, prevId: '', ...res } as PgSchema; + const ts = postgresSchemaToTypeScript(schema, casing); + const relationsTs = relationsToTypeScript(schema, casing); + const { internal, ...schemaWithoutInternals } = schema; + + const schemaFile = join(out, 'schema.ts'); + writeFileSync(schemaFile, ts.file); + const relationsFile = join(out, 'relations.ts'); + writeFileSync(relationsFile, relationsTs.file); + console.log(); + + const { snapshots, journal } = prepareOutFolder(out, 'postgresql'); + + if (snapshots.length === 0) { + const { sqlStatements, _meta } = await applyPgSnapshotsDiff( + squashPgScheme(dryPg), + squashPgScheme(schema), + schemasResolver, + enumsResolver, + sequencesResolver, + tablesResolver, + columnsResolver, + dryPg, + schema, + ); + + writeResult({ + cur: schema, + sqlStatements, + journal, + _meta, + outFolder: out, + breakpoints, + type: 'introspect', + prefixMode: prefix, + }); + } else { + render( + `[${ + chalk.blue( + 'i', + ) + }] No SQL generated, you already have migrations in project`, + ); + } + + render( + `[${ + chalk.green( + '✓', + ) + }] You schema file is ready ➜ ${chalk.bold.underline.blue(schemaFile)} 🚀`, + ); + render( + `[${ + chalk.green( + '✓', + ) + }] You relations file is ready ➜ ${ + chalk.bold.underline.blue( + relationsFile, + ) + } 🚀`, + ); + process.exit(0); +}; + +export const introspectMysql = async ( + casing: Casing, + out: string, + breakpoints: boolean, + credentials: MysqlCredentials, + tablesFilter: string[], + prefix: Prefix, +) => { + const { connectToMySQL } = await import('../connections'); + const { db, database } = await connectToMySQL(credentials); + + const matchers = tablesFilter.map((it) => { + return new Minimatch(it); + }); + + const filter = (tableName: string) => { + if (matchers.length === 0) return true; + + let flags: boolean[] = []; + + for (let matcher of matchers) { + if (matcher.negate) { + if (!matcher.match(tableName)) { + flags.push(false); + } + } + + if (matcher.match(tableName)) { + flags.push(true); + } + } + + if (flags.length > 0) { + return flags.every(Boolean); + } + return false; + }; + + const progress = new IntrospectProgress(); + const res = await renderWithTask( + progress, + fromMysqlDatabase(db, database, filter, (stage, count, status) => { + progress.update(stage, count, status); + }), + ); + + const schema = { id: originUUID, prevId: '', ...res } as MySqlSchema; + const ts = mysqlSchemaToTypeScript(schema, casing); + const relationsTs = relationsToTypeScript(schema, casing); + + const schemaFile = join(out, 'schema.ts'); + writeFileSync(schemaFile, ts.file); + const relationsFile = join(out, 'relations.ts'); + writeFileSync(relationsFile, relationsTs.file); + console.log(); + + const { snapshots, journal } = prepareOutFolder(out, 'mysql'); + + if (snapshots.length === 0) { + const { sqlStatements, _meta } = await applyMysqlSnapshotsDiff( + squashMysqlScheme(dryMySql), + squashMysqlScheme(schema), + tablesResolver, + columnsResolver, + dryMySql, + schema, + ); + + writeResult({ + cur: schema, + sqlStatements, + journal, + _meta, + outFolder: out, + breakpoints, + type: 'introspect', + prefixMode: prefix, + }); + } else { + render( + `[${ + chalk.blue( + 'i', + ) + }] No SQL generated, you already have migrations in project`, + ); + } + + render( + `[${ + chalk.green( + '✓', + ) + }] You schema file is ready ➜ ${chalk.bold.underline.blue(schemaFile)} 🚀`, + ); + render( + `[${ + chalk.green( + '✓', + ) + }] You relations file is ready ➜ ${ + chalk.bold.underline.blue( + relationsFile, + ) + } 🚀`, + ); + process.exit(0); +}; + +export const introspectSingleStore = async ( + casing: Casing, + out: string, + breakpoints: boolean, + credentials: SingleStoreCredentials, + tablesFilter: string[], + prefix: Prefix, +) => { + const { connectToSingleStore } = await import('../connections'); + const { db, database } = await connectToSingleStore(credentials); + + const matchers = tablesFilter.map((it) => { + return new Minimatch(it); + }); + + const filter = (tableName: string) => { + if (matchers.length === 0) return true; + + let flags: boolean[] = []; + + for (let matcher of matchers) { + if (matcher.negate) { + if (!matcher.match(tableName)) { + flags.push(false); + } + } + + if (matcher.match(tableName)) { + flags.push(true); + } + } + + if (flags.length > 0) { + return flags.every(Boolean); + } + return false; + }; + + const progress = new IntrospectProgress(); + const res = await renderWithTask( + progress, + fromSingleStoreDatabase(db, database, filter, (stage, count, status) => { + progress.update(stage, count, status); + }), + ); + + const schema = { id: originUUID, prevId: '', ...res } as SingleStoreSchema; + const ts = singlestoreSchemaToTypeScript(schema, casing); + const { internal, ...schemaWithoutInternals } = schema; + + const schemaFile = join(out, 'schema.ts'); + writeFileSync(schemaFile, ts.file); + console.log(); + + const { snapshots, journal } = prepareOutFolder(out, 'postgresql'); + + if (snapshots.length === 0) { + const { sqlStatements, _meta } = await applySingleStoreSnapshotsDiff( + squashSingleStoreScheme(drySingleStore), + squashSingleStoreScheme(schema), + tablesResolver, + columnsResolver, + drySingleStore, + schema, + ); + + writeResult({ + cur: schema, + sqlStatements, + journal, + _meta, + outFolder: out, + breakpoints, + type: 'introspect', + prefixMode: prefix, + }); + } else { + render( + `[${ + chalk.blue( + 'i', + ) + }] No SQL generated, you already have migrations in project`, + ); + } + + render( + `[${ + chalk.green( + '✓', + ) + }] You schema file is ready ➜ ${chalk.bold.underline.blue(schemaFile)} 🚀`, + ); + process.exit(0); +}; + +export const introspectSqlite = async ( + casing: Casing, + out: string, + breakpoints: boolean, + credentials: SqliteCredentials, + tablesFilter: string[], + prefix: Prefix, +) => { + const { connectToSQLite } = await import('../connections'); + const db = await connectToSQLite(credentials); + + const matchers = tablesFilter.map((it) => { + return new Minimatch(it); + }); + + const filter = (tableName: string) => { + if (matchers.length === 0) return true; + + let flags: boolean[] = []; + + for (let matcher of matchers) { + if (matcher.negate) { + if (!matcher.match(tableName)) { + flags.push(false); + } + } + + if (matcher.match(tableName)) { + flags.push(true); + } + } + + if (flags.length > 0) { + return flags.every(Boolean); + } + return false; + }; + + const progress = new IntrospectProgress(); + const res = await renderWithTask( + progress, + fromSqliteDatabase(db, filter, (stage, count, status) => { + progress.update(stage, count, status); + }), + ); + + const schema = { id: originUUID, prevId: '', ...res } as SQLiteSchema; + const ts = sqliteSchemaToTypeScript(schema, casing); + const relationsTs = relationsToTypeScript(schema, casing); + + // check orm and orm-pg api version + + const schemaFile = join(out, 'schema.ts'); + writeFileSync(schemaFile, ts.file); + const relationsFile = join(out, 'relations.ts'); + writeFileSync(relationsFile, relationsTs.file); + console.log(); + + const { snapshots, journal } = prepareOutFolder(out, 'sqlite'); + + if (snapshots.length === 0) { + const { sqlStatements, _meta } = await applySqliteSnapshotsDiff( + squashSqliteScheme(drySQLite), + squashSqliteScheme(schema), + tablesResolver, + columnsResolver, + drySQLite, + schema, + ); + + writeResult({ + cur: schema, + sqlStatements, + journal, + _meta, + outFolder: out, + breakpoints, + type: 'introspect', + prefixMode: prefix, + }); + } else { + render( + `[${ + chalk.blue( + 'i', + ) + }] No SQL generated, you already have migrations in project`, + ); + } + + render( + `[${ + chalk.green( + '✓', + ) + }] You schema file is ready ➜ ${chalk.bold.underline.blue(schemaFile)} 🚀`, + ); + render( + `[${ + chalk.green( + '✓', + ) + }] You relations file is ready ➜ ${ + chalk.bold.underline.blue( + relationsFile, + ) + } 🚀`, + ); + process.exit(0); +}; + +const withCasing = (value: string, casing: Casing) => { + if (casing === 'preserve') { + return value; + } + if (casing === 'camel') { + return value.camelCase(); + } + + assertUnreachable(casing); +}; + +export const relationsToTypeScript = ( + schema: { + tables: Record< + string, + { + schema?: string; + foreignKeys: Record< + string, + { + name: string; + tableFrom: string; + columnsFrom: string[]; + tableTo: string; + schemaTo?: string; + columnsTo: string[]; + onUpdate?: string | undefined; + onDelete?: string | undefined; + } + >; + } + >; + }, + casing: Casing, +) => { + const imports: string[] = []; + const tableRelations: Record< + string, + { + name: string; + type: 'one' | 'many'; + tableFrom: string; + schemaFrom?: string; + columnFrom: string; + tableTo: string; + schemaTo?: string; + columnTo: string; + relationName?: string; + }[] + > = {}; + + Object.values(schema.tables).forEach((table) => { + Object.values(table.foreignKeys).forEach((fk) => { + const tableNameFrom = paramNameFor(fk.tableFrom, table.schema); + const tableNameTo = paramNameFor(fk.tableTo, fk.schemaTo); + const tableFrom = withCasing(tableNameFrom, casing); + const tableTo = withCasing(tableNameTo, casing); + const columnFrom = withCasing(fk.columnsFrom[0], casing); + const columnTo = withCasing(fk.columnsTo[0], casing); + + imports.push(tableTo, tableFrom); + + // const keyFrom = `${schemaFrom}.${tableFrom}`; + const keyFrom = tableFrom; + + if (!tableRelations[keyFrom]) { + tableRelations[keyFrom] = []; + } + + tableRelations[keyFrom].push({ + name: singular(tableTo), + type: 'one', + tableFrom, + columnFrom, + tableTo, + columnTo, + }); + + // const keyTo = `${schemaTo}.${tableTo}`; + const keyTo = tableTo; + + if (!tableRelations[keyTo]) { + tableRelations[keyTo] = []; + } + + tableRelations[keyTo].push({ + name: plural(tableFrom), + type: 'many', + tableFrom: tableTo, + columnFrom: columnTo, + tableTo: tableFrom, + columnTo: columnFrom, + }); + }); + }); + + const uniqueImports = [...new Set(imports)]; + + const importsTs = `import { relations } from "drizzle-orm/relations";\nimport { ${ + uniqueImports.join( + ', ', + ) + } } from "./schema";\n\n`; + + const relationStatements = Object.entries(tableRelations).map( + ([table, relations]) => { + const hasOne = relations.some((it) => it.type === 'one'); + const hasMany = relations.some((it) => it.type === 'many'); + + // * change relation names if they are duplicated or if there are multiple relations between two tables + const preparedRelations = relations.map( + (relation, relationIndex, originArray) => { + let name = relation.name; + let relationName; + const hasMultipleRelations = originArray.some( + (it, originIndex) => relationIndex !== originIndex && it.tableTo === relation.tableTo, + ); + if (hasMultipleRelations) { + relationName = relation.type === 'one' + ? `${relation.tableFrom}_${relation.columnFrom}_${relation.tableTo}_${relation.columnTo}` + : `${relation.tableTo}_${relation.columnTo}_${relation.tableFrom}_${relation.columnFrom}`; + } + const hasDuplicatedRelation = originArray.some( + (it, originIndex) => relationIndex !== originIndex && it.name === relation.name, + ); + if (hasDuplicatedRelation) { + name = `${relation.name}_${relation.type === 'one' ? relation.columnFrom : relation.columnTo}`; + } + return { + ...relation, + name, + relationName, + }; + }, + ); + + const fields = preparedRelations.map((relation) => { + if (relation.type === 'one') { + return `\t${relation.name}: one(${relation.tableTo}, {\n\t\tfields: [${relation.tableFrom}.${relation.columnFrom}],\n\t\treferences: [${relation.tableTo}.${relation.columnTo}]${ + relation.relationName + ? `,\n\t\trelationName: "${relation.relationName}"` + : '' + }\n\t}),`; + } else { + return `\t${relation.name}: many(${relation.tableTo}${ + relation.relationName + ? `, {\n\t\trelationName: "${relation.relationName}"\n\t}` + : '' + }),`; + } + }); + + return `export const ${table}Relations = relations(${table}, ({${hasOne ? 'one' : ''}${ + hasOne && hasMany ? ', ' : '' + }${hasMany ? 'many' : ''}}) => ({\n${fields.join('\n')}\n}));`; + }, + ); + + return { + file: importsTs + relationStatements.join('\n\n'), + }; +}; diff --git a/drizzle-kit/src/cli/commands/migrate.ts b/drizzle-kit/src/cli/commands/migrate.ts new file mode 100644 index 000000000..726c8ed4b --- /dev/null +++ b/drizzle-kit/src/cli/commands/migrate.ts @@ -0,0 +1,999 @@ +import fs from 'fs'; +import { + prepareMySqlDbPushSnapshot, + prepareMySqlMigrationSnapshot, + preparePgDbPushSnapshot, + preparePgMigrationSnapshot, + prepareSingleStoreDbPushSnapshot, + prepareSingleStoreMigrationSnapshot, + prepareSQLiteDbPushSnapshot, + prepareSqliteMigrationSnapshot, +} from '../../migrationPreparator'; + +import chalk from 'chalk'; +import { render } from 'hanji'; +import path, { join } from 'path'; +import { SingleStoreSchema, singlestoreSchema, squashSingleStoreScheme } from 'src/serializer/singlestoreSchema'; +import { TypeOf } from 'zod'; +import type { CommonSchema } from '../../schemaValidator'; +import { MySqlSchema, mysqlSchema, squashMysqlScheme } from '../../serializer/mysqlSchema'; +import { PgSchema, pgSchema, squashPgScheme } from '../../serializer/pgSchema'; +import { SQLiteSchema, sqliteSchema, squashSqliteScheme } from '../../serializer/sqliteSchema'; +import { + applyMysqlSnapshotsDiff, + applyPgSnapshotsDiff, + applySingleStoreSnapshotsDiff, + applySqliteSnapshotsDiff, + Column, + ColumnsResolverInput, + ColumnsResolverOutput, + Enum, + ResolverInput, + ResolverOutput, + ResolverOutputWithMoved, + Sequence, + Table, +} from '../../snapshotsDiffer'; +import { assertV1OutFolder, Journal, prepareMigrationFolder } from '../../utils'; +import { prepareMigrationMetadata } from '../../utils/words'; +import { Prefix } from '../validations/common'; +import { withStyle } from '../validations/outputs'; +import { + isRenamePromptItem, + RenamePropmtItem, + ResolveColumnSelect, + ResolveSchemasSelect, + ResolveSelect, + schema, +} from '../views'; +import { GenerateConfig } from './utils'; + +export type Named = { + name: string; +}; + +export type NamedWithSchema = { + name: string; + schema: string; +}; + +export const schemasResolver = async ( + input: ResolverInput, +): Promise> => { + try { + const { created, deleted, renamed } = await promptSchemasConflict( + input.created, + input.deleted, + ); + + return { created: created, deleted: deleted, renamed: renamed }; + } catch (e) { + console.error(e); + throw e; + } +}; + +export const tablesResolver = async ( + input: ResolverInput
, +): Promise> => { + try { + const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict( + input.created, + input.deleted, + 'table', + ); + + return { + created: created, + deleted: deleted, + moved: moved, + renamed: renamed, + }; + } catch (e) { + console.error(e); + throw e; + } +}; + +export const sequencesResolver = async ( + input: ResolverInput, +): Promise> => { + try { + const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict( + input.created, + input.deleted, + 'sequence', + ); + + return { + created: created, + deleted: deleted, + moved: moved, + renamed: renamed, + }; + } catch (e) { + console.error(e); + throw e; + } +}; + +export const enumsResolver = async ( + input: ResolverInput, +): Promise> => { + try { + const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict( + input.created, + input.deleted, + 'enum', + ); + + return { + created: created, + deleted: deleted, + moved: moved, + renamed: renamed, + }; + } catch (e) { + console.error(e); + throw e; + } +}; + +export const columnsResolver = async ( + input: ColumnsResolverInput, +): Promise> => { + const result = await promptColumnsConflicts( + input.tableName, + input.created, + input.deleted, + ); + return { + tableName: input.tableName, + schema: input.schema, + created: result.created, + deleted: result.deleted, + renamed: result.renamed, + }; +}; + +export const prepareAndMigratePg = async (config: GenerateConfig) => { + const outFolder = config.out; + const schemaPath = config.schema; + + try { + assertV1OutFolder(outFolder); + + const { snapshots, journal } = prepareMigrationFolder( + outFolder, + 'postgresql', + ); + + const { prev, cur, custom } = await preparePgMigrationSnapshot( + snapshots, + schemaPath, + ); + + const validatedPrev = pgSchema.parse(prev); + const validatedCur = pgSchema.parse(cur); + + if (config.custom) { + writeResult({ + cur: custom, + sqlStatements: [], + journal, + outFolder, + name: config.name, + breakpoints: config.breakpoints, + type: 'custom', + prefixMode: config.prefix, + }); + return; + } + + const squashedPrev = squashPgScheme(validatedPrev); + const squashedCur = squashPgScheme(validatedCur); + + const { sqlStatements, _meta } = await applyPgSnapshotsDiff( + squashedPrev, + squashedCur, + schemasResolver, + enumsResolver, + sequencesResolver, + tablesResolver, + columnsResolver, + validatedPrev, + validatedCur, + ); + + writeResult({ + cur, + sqlStatements, + journal, + outFolder, + name: config.name, + breakpoints: config.breakpoints, + prefixMode: config.prefix, + }); + } catch (e) { + console.error(e); + } +}; + +export const preparePgPush = async ( + schemaPath: string | string[], + snapshot: PgSchema, + schemaFilter: string[], +) => { + const { prev, cur } = await preparePgDbPushSnapshot( + snapshot, + schemaPath, + schemaFilter, + ); + + const validatedPrev = pgSchema.parse(prev); + const validatedCur = pgSchema.parse(cur); + + const squashedPrev = squashPgScheme(validatedPrev, 'push'); + const squashedCur = squashPgScheme(validatedCur, 'push'); + + const { sqlStatements, statements, _meta } = await applyPgSnapshotsDiff( + squashedPrev, + squashedCur, + schemasResolver, + enumsResolver, + sequencesResolver, + tablesResolver, + columnsResolver, + validatedPrev, + validatedCur, + 'push', + ); + + return { sqlStatements, statements, squashedPrev, squashedCur }; +}; + +// Not needed for now +function mysqlSchemaSuggestions( + curSchema: TypeOf, + prevSchema: TypeOf, +) { + const suggestions: string[] = []; + const usedSuggestions: string[] = []; + const suggestionTypes = { + serial: withStyle.errorWarning( + `We deprecated the use of 'serial' for MySQL starting from version 0.20.0. In MySQL, 'serial' is simply an alias for 'bigint unsigned not null auto_increment unique,' which creates all constraints and indexes for you. This may make the process less explicit for both users and drizzle-kit push commands`, + ), + }; + + for (const table of Object.values(curSchema.tables)) { + for (const column of Object.values(table.columns)) { + if (column.type === 'serial') { + if (!usedSuggestions.includes('serial')) { + suggestions.push(suggestionTypes['serial']); + } + + const uniqueForSerial = Object.values( + prevSchema.tables[table.name].uniqueConstraints, + ).find((it) => it.columns[0] === column.name); + + suggestions.push( + `\n` + + withStyle.suggestion( + `We are suggesting to change ${ + chalk.blue( + column.name, + ) + } column in ${ + chalk.blueBright( + table.name, + ) + } table from serial to bigint unsigned\n\n${ + chalk.blueBright( + `bigint("${column.name}", { mode: "number", unsigned: true }).notNull().autoincrement().unique(${ + uniqueForSerial?.name ? `"${uniqueForSerial?.name}"` : '' + })`, + ) + }`, + ), + ); + } + } + } + + return suggestions; +} + +// Intersect with prepareAnMigrate +export const prepareMySQLPush = async ( + schemaPath: string | string[], + snapshot: MySqlSchema, +) => { + try { + const { prev, cur } = await prepareMySqlDbPushSnapshot( + snapshot, + schemaPath, + ); + + const validatedPrev = mysqlSchema.parse(prev); + const validatedCur = mysqlSchema.parse(cur); + + const squashedPrev = squashMysqlScheme(validatedPrev); + const squashedCur = squashMysqlScheme(validatedCur); + + const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( + squashedPrev, + squashedCur, + tablesResolver, + columnsResolver, + validatedPrev, + validatedCur, + 'push', + ); + + return { sqlStatements, statements, validatedCur, validatedPrev }; + } catch (e) { + console.error(e); + process.exit(1); + } +}; + +export const prepareAndMigrateMysql = async (config: GenerateConfig) => { + const outFolder = config.out; + const schemaPath = config.schema; + + try { + // TODO: remove + assertV1OutFolder(outFolder); + + const { snapshots, journal } = prepareMigrationFolder(outFolder, 'mysql'); + const { prev, cur, custom } = await prepareMySqlMigrationSnapshot( + snapshots, + schemaPath, + ); + + const validatedPrev = mysqlSchema.parse(prev); + const validatedCur = mysqlSchema.parse(cur); + + if (config.custom) { + writeResult({ + cur: custom, + sqlStatements: [], + journal, + outFolder, + name: config.name, + breakpoints: config.breakpoints, + type: 'custom', + prefixMode: config.prefix, + }); + return; + } + + const squashedPrev = squashMysqlScheme(validatedPrev); + const squashedCur = squashMysqlScheme(validatedCur); + + const { sqlStatements, statements, _meta } = await applyMysqlSnapshotsDiff( + squashedPrev, + squashedCur, + tablesResolver, + columnsResolver, + validatedPrev, + validatedCur, + ); + + writeResult({ + cur, + sqlStatements, + journal, + _meta, + outFolder, + name: config.name, + breakpoints: config.breakpoints, + prefixMode: config.prefix, + }); + } catch (e) { + console.error(e); + } +}; + +// Not needed for now +function singleStoreSchemaSuggestions( + curSchema: TypeOf, + prevSchema: TypeOf, +) { + const suggestions: string[] = []; + const usedSuggestions: string[] = []; + const suggestionTypes = { + // TODO: Check if SingleStore has serial type + serial: withStyle.errorWarning( + `We deprecated the use of 'serial' for SingleStore starting from version 0.20.0. In SingleStore, 'serial' is simply an alias for 'bigint unsigned not null auto_increment unique,' which creates all constraints and indexes for you. This may make the process less explicit for both users and drizzle-kit push commands`, + ), + }; + + for (const table of Object.values(curSchema.tables)) { + for (const column of Object.values(table.columns)) { + if (column.type === 'serial') { + if (!usedSuggestions.includes('serial')) { + suggestions.push(suggestionTypes['serial']); + } + + const uniqueForSerial = Object.values( + prevSchema.tables[table.name].uniqueConstraints, + ).find((it) => it.columns[0] === column.name); + + suggestions.push( + `\n` + + withStyle.suggestion( + `We are suggesting to change ${ + chalk.blue( + column.name, + ) + } column in ${ + chalk.blueBright( + table.name, + ) + } table from serial to bigint unsigned\n\n${ + chalk.blueBright( + `bigint("${column.name}", { mode: "number", unsigned: true }).notNull().autoincrement().unique(${ + uniqueForSerial?.name ? `"${uniqueForSerial?.name}"` : '' + })`, + ) + }`, + ), + ); + } + } + } + + return suggestions; +} + +// Intersect with prepareAnMigrate +export const prepareSingleStorePush = async ( + schemaPath: string | string[], + snapshot: SingleStoreSchema, +) => { + try { + const { prev, cur } = await prepareSingleStoreDbPushSnapshot( + snapshot, + schemaPath, + ); + + const validatedPrev = singlestoreSchema.parse(prev); + const validatedCur = singlestoreSchema.parse(cur); + + const squashedPrev = squashSingleStoreScheme(validatedPrev); + const squashedCur = squashSingleStoreScheme(validatedCur); + + const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff( + squashedPrev, + squashedCur, + tablesResolver, + columnsResolver, + validatedPrev, + validatedCur, + 'push', + ); + + return { sqlStatements, statements, validatedCur, validatedPrev }; + } catch (e) { + console.error(e); + process.exit(1); + } +}; + +export const prepareAndMigrateSingleStore = async (config: GenerateConfig) => { + const outFolder = config.out; + const schemaPath = config.schema; + + try { + // TODO: remove + assertV1OutFolder(outFolder); + + const { snapshots, journal } = prepareMigrationFolder(outFolder, 'singlestore'); + const { prev, cur, custom } = await prepareSingleStoreMigrationSnapshot( + snapshots, + schemaPath, + ); + + const validatedPrev = singlestoreSchema.parse(prev); + const validatedCur = singlestoreSchema.parse(cur); + + if (config.custom) { + writeResult({ + cur: custom, + sqlStatements: [], + journal, + outFolder, + name: config.name, + breakpoints: config.breakpoints, + type: 'custom', + prefixMode: config.prefix, + }); + return; + } + + const squashedPrev = squashSingleStoreScheme(validatedPrev); + const squashedCur = squashSingleStoreScheme(validatedCur); + + const { sqlStatements, statements, _meta } = await applySingleStoreSnapshotsDiff( + squashedPrev, + squashedCur, + tablesResolver, + columnsResolver, + validatedPrev, + validatedCur, + ); + + writeResult({ + cur, + sqlStatements, + journal, + _meta, + outFolder, + name: config.name, + breakpoints: config.breakpoints, + prefixMode: config.prefix, + }); + } catch (e) { + console.error(e); + } +}; + +export const prepareAndMigrateSqlite = async (config: GenerateConfig) => { + const outFolder = config.out; + const schemaPath = config.schema; + + try { + assertV1OutFolder(outFolder); + + const { snapshots, journal } = prepareMigrationFolder(outFolder, 'sqlite'); + const { prev, cur, custom } = await prepareSqliteMigrationSnapshot( + snapshots, + schemaPath, + ); + + const validatedPrev = sqliteSchema.parse(prev); + const validatedCur = sqliteSchema.parse(cur); + + if (config.custom) { + writeResult({ + cur: custom, + sqlStatements: [], + journal, + outFolder, + name: config.name, + breakpoints: config.breakpoints, + bundle: config.bundle, + type: 'custom', + prefixMode: config.prefix, + }); + return; + } + + const squashedPrev = squashSqliteScheme(validatedPrev); + const squashedCur = squashSqliteScheme(validatedCur); + + const { sqlStatements, _meta } = await applySqliteSnapshotsDiff( + squashedPrev, + squashedCur, + tablesResolver, + columnsResolver, + validatedPrev, + validatedCur, + ); + + writeResult({ + cur, + sqlStatements, + journal, + _meta, + outFolder, + name: config.name, + breakpoints: config.breakpoints, + bundle: config.bundle, + prefixMode: config.prefix, + }); + } catch (e) { + console.error(e); + } +}; + +export const prepareSQLitePush = async ( + schemaPath: string | string[], + snapshot: SQLiteSchema, +) => { + const { prev, cur } = await prepareSQLiteDbPushSnapshot(snapshot, schemaPath); + + const validatedPrev = sqliteSchema.parse(prev); + const validatedCur = sqliteSchema.parse(cur); + + const squashedPrev = squashSqliteScheme(validatedPrev, 'push'); + const squashedCur = squashSqliteScheme(validatedCur, 'push'); + + const { sqlStatements, statements, _meta } = await applySqliteSnapshotsDiff( + squashedPrev, + squashedCur, + tablesResolver, + columnsResolver, + validatedPrev, + validatedCur, + 'push', + ); + + return { + sqlStatements, + statements, + squashedPrev, + squashedCur, + meta: _meta, + }; +}; + +const freeeeeeze = (obj: any) => { + Object.freeze(obj); + for (let key in obj) { + if (obj.hasOwnProperty(key) && typeof obj[key] === 'object') { + freeeeeeze(obj[key]); + } + } +}; + +export const promptColumnsConflicts = async ( + tableName: string, + newColumns: T[], + missingColumns: T[], +) => { + if (newColumns.length === 0 || missingColumns.length === 0) { + return { created: newColumns, renamed: [], deleted: missingColumns }; + } + const result: { created: T[]; renamed: { from: T; to: T }[]; deleted: T[] } = { + created: [], + renamed: [], + deleted: [], + }; + + let index = 0; + let leftMissing = [...missingColumns]; + + do { + const created = newColumns[index]; + + const renames: RenamePropmtItem[] = leftMissing.map((it) => { + return { from: it, to: created }; + }); + + const promptData: (RenamePropmtItem | T)[] = [created, ...renames]; + + const { status, data } = await render( + new ResolveColumnSelect(tableName, created, promptData), + ); + if (status === 'aborted') { + console.error('ERROR'); + process.exit(1); + } + + if (isRenamePromptItem(data)) { + console.log( + `${chalk.yellow('~')} ${data.from.name} › ${data.to.name} ${ + chalk.gray( + 'column will be renamed', + ) + }`, + ); + result.renamed.push(data); + // this will make [item1, undefined, item2] + delete leftMissing[leftMissing.indexOf(data.from)]; + // this will make [item1, item2] + leftMissing = leftMissing.filter(Boolean); + } else { + console.log( + `${chalk.green('+')} ${data.name} ${ + chalk.gray( + 'column will be created', + ) + }`, + ); + result.created.push(created); + } + index += 1; + } while (index < newColumns.length); + console.log( + chalk.gray(`--- all columns conflicts in ${tableName} table resolved ---\n`), + ); + + result.deleted.push(...leftMissing); + return result; +}; + +export const promptNamedWithSchemasConflict = async ( + newItems: T[], + missingItems: T[], + entity: 'table' | 'enum' | 'sequence', +): Promise<{ + created: T[]; + renamed: { from: T; to: T }[]; + moved: { name: string; schemaFrom: string; schemaTo: string }[]; + deleted: T[]; +}> => { + if (missingItems.length === 0 || newItems.length === 0) { + return { + created: newItems, + renamed: [], + moved: [], + deleted: missingItems, + }; + } + + const result: { + created: T[]; + renamed: { from: T; to: T }[]; + moved: { name: string; schemaFrom: string; schemaTo: string }[]; + deleted: T[]; + } = { created: [], renamed: [], moved: [], deleted: [] }; + let index = 0; + let leftMissing = [...missingItems]; + do { + const created = newItems[index]; + const renames: RenamePropmtItem[] = leftMissing.map((it) => { + return { from: it, to: created }; + }); + + const promptData: (RenamePropmtItem | T)[] = [created, ...renames]; + + const { status, data } = await render( + new ResolveSelect(created, promptData, entity), + ); + if (status === 'aborted') { + console.error('ERROR'); + process.exit(1); + } + + if (isRenamePromptItem(data)) { + const schemaFromPrefix = !data.from.schema || data.from.schema === 'public' + ? '' + : `${data.from.schema}.`; + const schemaToPrefix = !data.to.schema || data.to.schema === 'public' + ? '' + : `${data.to.schema}.`; + + console.log( + `${chalk.yellow('~')} ${schemaFromPrefix}${data.from.name} › ${schemaToPrefix}${data.to.name} ${ + chalk.gray( + `${entity} will be renamed/moved`, + ) + }`, + ); + + if (data.from.name !== data.to.name) { + result.renamed.push(data); + } + + if (data.from.schema !== data.to.schema) { + result.moved.push({ + name: data.from.name, + schemaFrom: data.from.schema || 'public', + schemaTo: data.to.schema || 'public', + }); + } + + delete leftMissing[leftMissing.indexOf(data.from)]; + leftMissing = leftMissing.filter(Boolean); + } else { + console.log( + `${chalk.green('+')} ${data.name} ${ + chalk.gray( + `${entity} will be created`, + ) + }`, + ); + result.created.push(created); + } + index += 1; + } while (index < newItems.length); + console.log(chalk.gray(`--- all ${entity} conflicts resolved ---\n`)); + result.deleted.push(...leftMissing); + return result; +}; + +export const promptSchemasConflict = async ( + newSchemas: T[], + missingSchemas: T[], +): Promise<{ created: T[]; renamed: { from: T; to: T }[]; deleted: T[] }> => { + if (missingSchemas.length === 0 || newSchemas.length === 0) { + return { created: newSchemas, renamed: [], deleted: missingSchemas }; + } + + const result: { created: T[]; renamed: { from: T; to: T }[]; deleted: T[] } = { + created: [], + renamed: [], + deleted: [], + }; + let index = 0; + let leftMissing = [...missingSchemas]; + do { + const created = newSchemas[index]; + const renames: RenamePropmtItem[] = leftMissing.map((it) => { + return { from: it, to: created }; + }); + + const promptData: (RenamePropmtItem | T)[] = [created, ...renames]; + + const { status, data } = await render( + new ResolveSchemasSelect(created, promptData), + ); + if (status === 'aborted') { + console.error('ERROR'); + process.exit(1); + } + + if (isRenamePromptItem(data)) { + console.log( + `${chalk.yellow('~')} ${data.from.name} › ${data.to.name} ${ + chalk.gray( + 'schema will be renamed', + ) + }`, + ); + result.renamed.push(data); + delete leftMissing[leftMissing.indexOf(data.from)]; + leftMissing = leftMissing.filter(Boolean); + } else { + console.log( + `${chalk.green('+')} ${data.name} ${ + chalk.gray( + 'schema will be created', + ) + }`, + ); + result.created.push(created); + } + index += 1; + } while (index < newSchemas.length); + console.log(chalk.gray('--- all schemas conflicts resolved ---\n')); + result.deleted.push(...leftMissing); + return result; +}; + +export const BREAKPOINT = '--> statement-breakpoint\n'; + +export const writeResult = ({ + cur, + sqlStatements, + journal, + _meta = { + columns: {}, + schemas: {}, + tables: {}, + }, + outFolder, + breakpoints, + name, + bundle = false, + type = 'none', + prefixMode, +}: { + cur: CommonSchema; + sqlStatements: string[]; + journal: Journal; + _meta?: any; + outFolder: string; + breakpoints: boolean; + prefixMode: Prefix; + name?: string; + bundle?: boolean; + type?: 'introspect' | 'custom' | 'none'; +}) => { + if (type === 'none') { + console.log(schema(cur)); + + if (sqlStatements.length === 0) { + console.log('No schema changes, nothing to migrate 😴'); + return; + } + } + + // append entry to _migrations.json + // append entry to _journal.json->entries + // dialect in _journal.json + // append sql file to out folder + // append snapshot file to meta folder + const lastEntryInJournal = journal.entries[journal.entries.length - 1]; + const idx = typeof lastEntryInJournal === 'undefined' ? 0 : lastEntryInJournal.idx + 1; + + const { prefix, tag } = prepareMigrationMetadata(idx, prefixMode, name); + + const toSave = JSON.parse(JSON.stringify(cur)); + toSave['_meta'] = _meta; + + // todo: save results to a new migration folder + const metaFolderPath = join(outFolder, 'meta'); + const metaJournal = join(metaFolderPath, '_journal.json'); + + fs.writeFileSync( + join(metaFolderPath, `${prefix}_snapshot.json`), + JSON.stringify(toSave, null, 2), + ); + + const sqlDelimiter = breakpoints ? BREAKPOINT : '\n'; + let sql = sqlStatements.join(sqlDelimiter); + + if (type === 'introspect') { + sql = + `-- Current sql file was generated after introspecting the database\n-- If you want to run this migration please uncomment this code before executing migrations\n/*\n${sql}\n*/`; + } + + if (type === 'custom') { + console.log('Prepared empty file for your custom SQL migration!'); + sql = '-- Custom SQL migration file, put you code below! --'; + } + + journal.entries.push({ + idx, + version: cur.version, + when: +new Date(), + tag, + breakpoints: breakpoints, + }); + + fs.writeFileSync(metaJournal, JSON.stringify(journal, null, 2)); + + fs.writeFileSync(`${outFolder}/${tag}.sql`, sql); + + // js file with .sql imports for React Native / Expo + if (bundle) { + const js = embeddedMigrations(journal); + fs.writeFileSync(`${outFolder}/migrations.js`, js); + } + + render( + `[${ + chalk.green( + '✓', + ) + }] Your SQL migration file ➜ ${ + chalk.bold.underline.blue( + path.join(`${outFolder}/${tag}.sql`), + ) + } 🚀`, + ); +}; + +export const embeddedMigrations = (journal: Journal) => { + let content = + '// This file is required for Expo/React Native SQLite migrations - https://orm.drizzle.team/quick-sqlite/expo\n\n'; + content += "import journal from './meta/_journal.json';\n"; + journal.entries.forEach((entry) => { + content += `import m${entry.idx.toString().padStart(4, '0')} from './${entry.tag}.sql';\n`; + }); + + content += ` + export default { + journal, + migrations: { + ${ + journal.entries + .map((it) => `m${it.idx.toString().padStart(4, '0')}`) + .join(',\n') + } + } + } + `; + return content; +}; + +export const prepareSnapshotFolderName = () => { + const now = new Date(); + return `${now.getFullYear()}${two(now.getUTCMonth() + 1)}${ + two( + now.getUTCDate(), + ) + }${two(now.getUTCHours())}${two(now.getUTCMinutes())}${ + two( + now.getUTCSeconds(), + ) + }`; +}; + +const two = (input: number): string => { + return input.toString().padStart(2, '0'); +}; diff --git a/drizzle-kit/src/cli/commands/mysqlIntrospect.ts b/drizzle-kit/src/cli/commands/mysqlIntrospect.ts new file mode 100644 index 000000000..f0132be64 --- /dev/null +++ b/drizzle-kit/src/cli/commands/mysqlIntrospect.ts @@ -0,0 +1,53 @@ +import { renderWithTask } from 'hanji'; +import { Minimatch } from 'minimatch'; +import { originUUID } from '../../global'; +import type { MySqlSchema } from '../../serializer/mysqlSchema'; +import { fromDatabase } from '../../serializer/mysqlSerializer'; +import type { DB } from '../../utils'; +import { ProgressView } from '../views'; + +export const mysqlPushIntrospect = async ( + db: DB, + databaseName: string, + filters: string[], +) => { + const matchers = filters.map((it) => { + return new Minimatch(it); + }); + + const filter = (tableName: string) => { + if (matchers.length === 0) return true; + + let flags: boolean[] = []; + + for (let matcher of matchers) { + if (matcher.negate) { + if (!matcher.match(tableName)) { + flags.push(false); + } + } + + if (matcher.match(tableName)) { + flags.push(true); + } + } + + if (flags.length > 0) { + return flags.every(Boolean); + } + return false; + }; + + const progress = new ProgressView( + 'Pulling schema from database...', + 'Pulling schema from database...', + ); + const res = await renderWithTask( + progress, + fromDatabase(db, databaseName, filter), + ); + + const schema = { id: originUUID, prevId: '', ...res } as MySqlSchema; + const { internal, ...schemaWithoutInternals } = schema; + return { schema: schemaWithoutInternals }; +}; diff --git a/drizzle-kit/src/cli/commands/mysqlPushUtils.ts b/drizzle-kit/src/cli/commands/mysqlPushUtils.ts new file mode 100644 index 000000000..db1134e63 --- /dev/null +++ b/drizzle-kit/src/cli/commands/mysqlPushUtils.ts @@ -0,0 +1,352 @@ +import chalk from 'chalk'; +import { render } from 'hanji'; +import { TypeOf } from 'zod'; +import { JsonAlterColumnTypeStatement, JsonStatement } from '../../jsonStatements'; +import { mysqlSchema, MySqlSquasher } from '../../serializer/mysqlSchema'; +import type { DB } from '../../utils'; +import { Select } from '../selector-ui'; +import { withStyle } from '../validations/outputs'; + +export const filterStatements = ( + statements: JsonStatement[], + currentSchema: TypeOf, + prevSchema: TypeOf, +) => { + return statements.filter((statement) => { + if (statement.type === 'alter_table_alter_column_set_type') { + // Don't need to handle it on migrations step and introspection + // but for both it should be skipped + if ( + statement.oldDataType.startsWith('tinyint') + && statement.newDataType.startsWith('boolean') + ) { + return false; + } + + if ( + statement.oldDataType.startsWith('bigint unsigned') + && statement.newDataType.startsWith('serial') + ) { + return false; + } + + if ( + statement.oldDataType.startsWith('serial') + && statement.newDataType.startsWith('bigint unsigned') + ) { + return false; + } + } else if (statement.type === 'alter_table_alter_column_set_default') { + if ( + statement.newDefaultValue === false + && statement.oldDefaultValue === 0 + && statement.newDataType === 'boolean' + ) { + return false; + } + if ( + statement.newDefaultValue === true + && statement.oldDefaultValue === 1 + && statement.newDataType === 'boolean' + ) { + return false; + } + } else if (statement.type === 'delete_unique_constraint') { + const unsquashed = MySqlSquasher.unsquashUnique(statement.data); + // only if constraint was removed from a serial column, than treat it as removed + // const serialStatement = statements.find( + // (it) => it.type === "alter_table_alter_column_set_type" + // ) as JsonAlterColumnTypeStatement; + // if ( + // serialStatement?.oldDataType.startsWith("bigint unsigned") && + // serialStatement?.newDataType.startsWith("serial") && + // serialStatement.columnName === + // MySqlSquasher.unsquashUnique(statement.data).columns[0] + // ) { + // return false; + // } + // Check if uniqueindex was only on this column, that is serial + + // if now serial and was not serial and was unique index + if ( + unsquashed.columns.length === 1 + && currentSchema.tables[statement.tableName].columns[unsquashed.columns[0]] + .type === 'serial' + && prevSchema.tables[statement.tableName].columns[unsquashed.columns[0]] + .type === 'serial' + && currentSchema.tables[statement.tableName].columns[unsquashed.columns[0]] + .name === unsquashed.columns[0] + ) { + return false; + } + } else if (statement.type === 'alter_table_alter_column_drop_notnull') { + // only if constraint was removed from a serial column, than treat it as removed + const serialStatement = statements.find( + (it) => it.type === 'alter_table_alter_column_set_type', + ) as JsonAlterColumnTypeStatement; + if ( + serialStatement?.oldDataType.startsWith('bigint unsigned') + && serialStatement?.newDataType.startsWith('serial') + && serialStatement.columnName === statement.columnName + && serialStatement.tableName === statement.tableName + ) { + return false; + } + if (statement.newDataType === 'serial' && !statement.columnNotNull) { + return false; + } + if (statement.columnAutoIncrement) { + return false; + } + } + + return true; + }); +}; + +export const logSuggestionsAndReturn = async ( + db: DB, + statements: JsonStatement[], + json2: TypeOf, +) => { + let shouldAskForApprove = false; + const statementsToExecute: string[] = []; + const infoToPrint: string[] = []; + + const tablesToRemove: string[] = []; + const columnsToRemove: string[] = []; + const schemasToRemove: string[] = []; + const tablesToTruncate: string[] = []; + + for (const statement of statements) { + if (statement.type === 'drop_table') { + const res = await db.query( + `select count(*) as count from \`${statement.tableName}\``, + ); + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to delete ${ + chalk.underline( + statement.tableName, + ) + } table with ${count} items`, + ); + tablesToRemove.push(statement.tableName); + shouldAskForApprove = true; + } + } else if (statement.type === 'alter_table_drop_column') { + const res = await db.query( + `select count(*) as count from \`${statement.tableName}\``, + ); + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to delete ${ + chalk.underline( + statement.columnName, + ) + } column in ${statement.tableName} table with ${count} items`, + ); + columnsToRemove.push(`${statement.tableName}_${statement.columnName}`); + shouldAskForApprove = true; + } + } else if (statement.type === 'drop_schema') { + const res = await db.query( + `select count(*) as count from information_schema.tables where table_schema = \`${statement.name}\`;`, + ); + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to delete ${ + chalk.underline( + statement.name, + ) + } schema with ${count} tables`, + ); + schemasToRemove.push(statement.name); + shouldAskForApprove = true; + } + } else if (statement.type === 'alter_table_alter_column_set_type') { + const res = await db.query( + `select count(*) as count from \`${statement.tableName}\``, + ); + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to change ${ + chalk.underline( + statement.columnName, + ) + } column type from ${ + chalk.underline( + statement.oldDataType, + ) + } to ${chalk.underline(statement.newDataType)} with ${count} items`, + ); + statementsToExecute.push(`truncate table ${statement.tableName};`); + tablesToTruncate.push(statement.tableName); + shouldAskForApprove = true; + } + } else if (statement.type === 'alter_table_alter_column_drop_default') { + if (statement.columnNotNull) { + const res = await db.query( + `select count(*) as count from \`${statement.tableName}\``, + ); + + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to remove default value from ${ + chalk.underline( + statement.columnName, + ) + } not-null column with ${count} items`, + ); + + tablesToTruncate.push(statement.tableName); + statementsToExecute.push(`truncate table ${statement.tableName};`); + + shouldAskForApprove = true; + } + } + // shouldAskForApprove = true; + } else if (statement.type === 'alter_table_alter_column_set_notnull') { + if (typeof statement.columnDefault === 'undefined') { + const res = await db.query( + `select count(*) as count from \`${statement.tableName}\``, + ); + + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to set not-null constraint to ${ + chalk.underline( + statement.columnName, + ) + } column without default, which contains ${count} items`, + ); + + tablesToTruncate.push(statement.tableName); + statementsToExecute.push(`truncate table ${statement.tableName};`); + + shouldAskForApprove = true; + } + } + } else if (statement.type === 'alter_table_alter_column_drop_pk') { + const res = await db.query( + `select count(*) as count from \`${statement.tableName}\``, + ); + + // if drop pk and json2 has autoincrement in table -> exit process with error + if ( + Object.values(json2.tables[statement.tableName].columns).filter( + (column) => column.autoincrement, + ).length > 0 + ) { + console.log( + `${ + withStyle.errorWarning( + `You have removed the primary key from a ${statement.tableName} table without removing the auto-increment property from this table. As the database error states: 'there can be only one auto column, and it must be defined as a key. Make sure to remove autoincrement from ${statement.tableName} table`, + ) + }`, + ); + process.exit(1); + } + + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to change ${ + chalk.underline( + statement.tableName, + ) + } primary key. This statements may fail and you table may left without primary key`, + ); + + tablesToTruncate.push(statement.tableName); + shouldAskForApprove = true; + } + } else if (statement.type === 'delete_composite_pk') { + // if drop pk and json2 has autoincrement in table -> exit process with error + if ( + Object.values(json2.tables[statement.tableName].columns).filter( + (column) => column.autoincrement, + ).length > 0 + ) { + console.log( + `${ + withStyle.errorWarning( + `You have removed the primary key from a ${statement.tableName} table without removing the auto-increment property from this table. As the database error states: 'there can be only one auto column, and it must be defined as a key. Make sure to remove autoincrement from ${statement.tableName} table`, + ) + }`, + ); + process.exit(1); + } + } else if (statement.type === 'alter_table_add_column') { + if ( + statement.column.notNull + && typeof statement.column.default === 'undefined' + ) { + const res = await db.query( + `select count(*) as count from \`${statement.tableName}\``, + ); + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to add not-null ${ + chalk.underline( + statement.column.name, + ) + } column without default value, which contains ${count} items`, + ); + + tablesToTruncate.push(statement.tableName); + statementsToExecute.push(`truncate table ${statement.tableName};`); + + shouldAskForApprove = true; + } + } + } else if (statement.type === 'create_unique_constraint') { + const res = await db.query( + `select count(*) as count from \`${statement.tableName}\``, + ); + const count = Number(res[0].count); + if (count > 0) { + const unsquashedUnique = MySqlSquasher.unsquashUnique(statement.data); + console.log( + `· You're about to add ${ + chalk.underline( + unsquashedUnique.name, + ) + } unique constraint to the table, which contains ${count} items. If this statement fails, you will receive an error from the database. Do you want to truncate ${ + chalk.underline( + statement.tableName, + ) + } table?\n`, + ); + const { status, data } = await render( + new Select([ + 'No, add the constraint without truncating the table', + `Yes, truncate the table`, + ]), + ); + if (data?.index === 1) { + tablesToTruncate.push(statement.tableName); + statementsToExecute.push(`truncate table ${statement.tableName};`); + shouldAskForApprove = true; + } + } + } + } + + return { + statementsToExecute, + shouldAskForApprove, + infoToPrint, + columnsToRemove: [...new Set(columnsToRemove)], + schemasToRemove: [...new Set(schemasToRemove)], + tablesToTruncate: [...new Set(tablesToTruncate)], + tablesToRemove: [...new Set(tablesToRemove)], + }; +}; diff --git a/drizzle-kit/src/cli/commands/mysqlUp.ts b/drizzle-kit/src/cli/commands/mysqlUp.ts new file mode 100644 index 000000000..6c7d2ebe5 --- /dev/null +++ b/drizzle-kit/src/cli/commands/mysqlUp.ts @@ -0,0 +1,101 @@ +import chalk from 'chalk'; +import fs, { writeFileSync } from 'fs'; +import path from 'path'; +import { Column, MySqlSchema, MySqlSchemaV4, MySqlSchemaV5, mysqlSchemaV5, Table } from '../../serializer/mysqlSchema'; +import { prepareOutFolder, validateWithReport } from '../../utils'; + +export const upMysqlHandler = (out: string) => {}; + +export const upMySqlHandlerV4toV5 = (obj: MySqlSchemaV4): MySqlSchemaV5 => { + const mappedTables: Record = {}; + + for (const [key, table] of Object.entries(obj.tables)) { + const mappedColumns: Record = {}; + for (const [ckey, column] of Object.entries(table.columns)) { + let newDefault: any = column.default; + let newType: string = column.type; + let newAutoIncrement: boolean | undefined = column.autoincrement; + + if (column.type.toLowerCase().startsWith('datetime')) { + if (typeof column.default !== 'undefined') { + if (column.default.startsWith("'") && column.default.endsWith("'")) { + newDefault = `'${ + column.default + .substring(1, column.default.length - 1) + .replace('T', ' ') + .slice(0, 23) + }'`; + } else { + newDefault = column.default.replace('T', ' ').slice(0, 23); + } + } + + newType = column.type.toLowerCase().replace('datetime (', 'datetime('); + } else if (column.type.toLowerCase() === 'date') { + if (typeof column.default !== 'undefined') { + if (column.default.startsWith("'") && column.default.endsWith("'")) { + newDefault = `'${ + column.default + .substring(1, column.default.length - 1) + .split('T')[0] + }'`; + } else { + newDefault = column.default.split('T')[0]; + } + } + newType = column.type.toLowerCase().replace('date (', 'date('); + } else if (column.type.toLowerCase().startsWith('timestamp')) { + if (typeof column.default !== 'undefined') { + if (column.default.startsWith("'") && column.default.endsWith("'")) { + newDefault = `'${ + column.default + .substring(1, column.default.length - 1) + .replace('T', ' ') + .slice(0, 23) + }'`; + } else { + newDefault = column.default.replace('T', ' ').slice(0, 23); + } + } + newType = column.type + .toLowerCase() + .replace('timestamp (', 'timestamp('); + } else if (column.type.toLowerCase().startsWith('time')) { + newType = column.type.toLowerCase().replace('time (', 'time('); + } else if (column.type.toLowerCase().startsWith('decimal')) { + newType = column.type.toLowerCase().replace(', ', ','); + } else if (column.type.toLowerCase().startsWith('enum')) { + newType = column.type.toLowerCase(); + } else if (column.type.toLowerCase().startsWith('serial')) { + newAutoIncrement = true; + } + mappedColumns[ckey] = { + ...column, + default: newDefault, + type: newType, + autoincrement: newAutoIncrement, + }; + } + + mappedTables[key] = { + ...table, + columns: mappedColumns, + compositePrimaryKeys: {}, + uniqueConstraints: {}, + }; + } + + return { + version: '5', + dialect: obj.dialect, + id: obj.id, + prevId: obj.prevId, + tables: mappedTables, + schemas: obj.schemas, + _meta: { + schemas: {} as Record, + tables: {} as Record, + columns: {} as Record, + }, + }; +}; diff --git a/drizzle-kit/src/cli/commands/pgIntrospect.ts b/drizzle-kit/src/cli/commands/pgIntrospect.ts new file mode 100644 index 000000000..dbd3ba238 --- /dev/null +++ b/drizzle-kit/src/cli/commands/pgIntrospect.ts @@ -0,0 +1,52 @@ +import { renderWithTask } from 'hanji'; +import { Minimatch } from 'minimatch'; +import { originUUID } from '../../global'; +import type { PgSchema } from '../../serializer/pgSchema'; +import { fromDatabase } from '../../serializer/pgSerializer'; +import type { DB } from '../../utils'; +import { ProgressView } from '../views'; + +export const pgPushIntrospect = async ( + db: DB, + filters: string[], + schemaFilters: string[], +) => { + const matchers = filters.map((it) => { + return new Minimatch(it); + }); + + const filter = (tableName: string) => { + if (matchers.length === 0) return true; + + let flags: boolean[] = []; + + for (let matcher of matchers) { + if (matcher.negate) { + if (!matcher.match(tableName)) { + flags.push(false); + } + } + + if (matcher.match(tableName)) { + flags.push(true); + } + } + + if (flags.length > 0) { + return flags.every(Boolean); + } + return false; + }; + const progress = new ProgressView( + 'Pulling schema from database...', + 'Pulling schema from database...', + ); + const res = await renderWithTask( + progress, + fromDatabase(db, filter, schemaFilters), + ); + + const schema = { id: originUUID, prevId: '', ...res } as PgSchema; + const { internal, ...schemaWithoutInternals } = schema; + return { schema: schemaWithoutInternals }; +}; diff --git a/drizzle-kit/src/cli/commands/pgPushUtils.ts b/drizzle-kit/src/cli/commands/pgPushUtils.ts new file mode 100644 index 000000000..eee0dc954 --- /dev/null +++ b/drizzle-kit/src/cli/commands/pgPushUtils.ts @@ -0,0 +1,348 @@ +import chalk from 'chalk'; +import { render } from 'hanji'; +import type { JsonStatement } from '../../jsonStatements'; +import { PgSquasher } from '../../serializer/pgSchema'; +import { fromJson } from '../../sqlgenerator'; +import type { DB } from '../../utils'; +import { Select } from '../selector-ui'; + +// export const filterStatements = (statements: JsonStatement[]) => { +// return statements.filter((statement) => { +// if (statement.type === "alter_table_alter_column_set_type") { +// // Don't need to handle it on migrations step and introspection +// // but for both it should be skipped +// if ( +// statement.oldDataType.startsWith("tinyint") && +// statement.newDataType.startsWith("boolean") +// ) { +// return false; +// } +// } else if (statement.type === "alter_table_alter_column_set_default") { +// if ( +// statement.newDefaultValue === false && +// statement.oldDefaultValue === 0 && +// statement.newDataType === "boolean" +// ) { +// return false; +// } +// if ( +// statement.newDefaultValue === true && +// statement.oldDefaultValue === 1 && +// statement.newDataType === "boolean" +// ) { +// return false; +// } +// } +// return true; +// }); +// }; + +function concatSchemaAndTableName(schema: string | undefined, table: string) { + return schema ? `"${schema}"."${table}"` : `"${table}"`; +} + +function tableNameWithSchemaFrom( + schema: string | undefined, + tableName: string, + renamedSchemas: Record, + renamedTables: Record, +) { + const newSchemaName = schema + ? renamedSchemas[schema] + ? renamedSchemas[schema] + : schema + : undefined; + + const newTableName = renamedTables[ + concatSchemaAndTableName(newSchemaName, tableName) + ] + ? renamedTables[concatSchemaAndTableName(newSchemaName, tableName)] + : tableName; + + return concatSchemaAndTableName(newSchemaName, newTableName); +} + +export const pgSuggestions = async (db: DB, statements: JsonStatement[]) => { + let shouldAskForApprove = false; + const statementsToExecute: string[] = []; + const infoToPrint: string[] = []; + + const tablesToRemove: string[] = []; + const columnsToRemove: string[] = []; + const schemasToRemove: string[] = []; + const tablesToTruncate: string[] = []; + + let renamedSchemas: Record = {}; + let renamedTables: Record = {}; + + for (const statement of statements) { + if (statement.type === 'rename_schema') { + renamedSchemas[statement.to] = statement.from; + } else if (statement.type === 'rename_table') { + renamedTables[ + concatSchemaAndTableName(statement.toSchema, statement.tableNameTo) + ] = statement.tableNameFrom; + } else if (statement.type === 'drop_table') { + const res = await db.query( + `select count(*) as count from ${ + tableNameWithSchemaFrom( + statement.schema, + statement.tableName, + renamedSchemas, + renamedTables, + ) + }`, + ); + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to delete ${ + chalk.underline( + statement.tableName, + ) + } table with ${count} items`, + ); + // statementsToExecute.push( + // `truncate table ${tableNameWithSchemaFrom(statement)} cascade;` + // ); + tablesToRemove.push(statement.tableName); + shouldAskForApprove = true; + } + } else if (statement.type === 'alter_table_drop_column') { + const res = await db.query( + `select count(*) as count from ${ + tableNameWithSchemaFrom( + statement.schema, + statement.tableName, + renamedSchemas, + renamedTables, + ) + }`, + ); + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to delete ${ + chalk.underline( + statement.columnName, + ) + } column in ${statement.tableName} table with ${count} items`, + ); + columnsToRemove.push(`${statement.tableName}_${statement.columnName}`); + shouldAskForApprove = true; + } + } else if (statement.type === 'drop_schema') { + const res = await db.query( + `select count(*) as count from information_schema.tables where table_schema = '${statement.name}';`, + ); + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to delete ${ + chalk.underline( + statement.name, + ) + } schema with ${count} tables`, + ); + schemasToRemove.push(statement.name); + shouldAskForApprove = true; + } + } else if (statement.type === 'alter_table_alter_column_set_type') { + const res = await db.query( + `select count(*) as count from ${ + tableNameWithSchemaFrom( + statement.schema, + statement.tableName, + renamedSchemas, + renamedTables, + ) + }`, + ); + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to change ${ + chalk.underline( + statement.columnName, + ) + } column type from ${ + chalk.underline( + statement.oldDataType, + ) + } to ${chalk.underline(statement.newDataType)} with ${count} items`, + ); + statementsToExecute.push( + `truncate table ${ + tableNameWithSchemaFrom( + statement.schema, + statement.tableName, + renamedSchemas, + renamedTables, + ) + } cascade;`, + ); + tablesToTruncate.push(statement.tableName); + shouldAskForApprove = true; + } + } else if (statement.type === 'alter_table_alter_column_drop_pk') { + const res = await db.query( + `select count(*) as count from ${ + tableNameWithSchemaFrom( + statement.schema, + statement.tableName, + renamedSchemas, + renamedTables, + ) + }`, + ); + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to change ${ + chalk.underline( + statement.tableName, + ) + } primary key. This statements may fail and you table may left without primary key`, + ); + + tablesToTruncate.push(statement.tableName); + shouldAskForApprove = true; + } + + const tableNameWithSchema = tableNameWithSchemaFrom( + statement.schema, + statement.tableName, + renamedSchemas, + renamedTables, + ); + + const pkNameResponse = await db.query( + `SELECT constraint_name FROM information_schema.table_constraints + WHERE table_schema = '${ + typeof statement.schema === 'undefined' || statement.schema === '' + ? 'public' + : statement.schema + }' + AND table_name = '${statement.tableName}' + AND constraint_type = 'PRIMARY KEY';`, + ); + + statementsToExecute.push( + `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${pkNameResponse[0].constraint_name}"`, + ); + // we will generate statement for drop pk here and not after all if-else statements + continue; + } else if (statement.type === 'alter_table_add_column') { + if ( + statement.column.notNull + && typeof statement.column.default === 'undefined' + ) { + const res = await db.query( + `select count(*) as count from ${ + tableNameWithSchemaFrom( + statement.schema, + statement.tableName, + renamedSchemas, + renamedTables, + ) + }`, + ); + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to add not-null ${ + chalk.underline( + statement.column.name, + ) + } column without default value, which contains ${count} items`, + ); + + tablesToTruncate.push(statement.tableName); + statementsToExecute.push( + `truncate table ${ + tableNameWithSchemaFrom( + statement.schema, + statement.tableName, + renamedSchemas, + renamedTables, + ) + } cascade;`, + ); + + shouldAskForApprove = true; + } + } + } else if (statement.type === 'create_unique_constraint') { + const res = await db.query( + `select count(*) as count from ${ + tableNameWithSchemaFrom( + statement.schema, + statement.tableName, + renamedSchemas, + renamedTables, + ) + }`, + ); + const count = Number(res[0].count); + if (count > 0) { + const unsquashedUnique = PgSquasher.unsquashUnique(statement.data); + console.log( + `· You're about to add ${ + chalk.underline( + unsquashedUnique.name, + ) + } unique constraint to the table, which contains ${count} items. If this statement fails, you will receive an error from the database. Do you want to truncate ${ + chalk.underline( + statement.tableName, + ) + } table?\n`, + ); + const { status, data } = await render( + new Select([ + 'No, add the constraint without truncating the table', + `Yes, truncate the table`, + ]), + ); + if (data?.index === 1) { + tablesToTruncate.push(statement.tableName); + statementsToExecute.push( + `truncate table ${ + tableNameWithSchemaFrom( + statement.schema, + statement.tableName, + renamedSchemas, + renamedTables, + ) + } cascade;`, + ); + shouldAskForApprove = true; + } + } + } + const stmnt = fromJson([statement], 'postgresql'); + if (typeof stmnt !== 'undefined') { + if (statement.type === 'drop_table') { + statementsToExecute.push( + `DROP TABLE ${ + concatSchemaAndTableName( + statement.schema, + statement.tableName, + ) + } CASCADE;`, + ); + } else { + statementsToExecute.push(...stmnt); + } + } + } + + return { + statementsToExecute, + shouldAskForApprove, + infoToPrint, + columnsToRemove: [...new Set(columnsToRemove)], + schemasToRemove: [...new Set(schemasToRemove)], + tablesToTruncate: [...new Set(tablesToTruncate)], + tablesToRemove: [...new Set(tablesToRemove)], + }; +}; diff --git a/drizzle-kit/src/cli/commands/pgUp.ts b/drizzle-kit/src/cli/commands/pgUp.ts new file mode 100644 index 000000000..f3faaeb62 --- /dev/null +++ b/drizzle-kit/src/cli/commands/pgUp.ts @@ -0,0 +1,176 @@ +import chalk from 'chalk'; +import { writeFileSync } from 'fs'; +import { + Column, + Index, + PgSchema, + PgSchemaV4, + PgSchemaV5, + pgSchemaV5, + PgSchemaV6, + pgSchemaV6, + Table, + TableV5, +} from '../../serializer/pgSchema'; +import { prepareOutFolder, validateWithReport } from '../../utils'; + +export const upPgHandler = (out: string) => { + const { snapshots } = prepareOutFolder(out, 'postgresql'); + const report = validateWithReport(snapshots, 'postgresql'); + + report.nonLatest + .map((it) => ({ + path: it, + raw: report.rawMap[it]!! as Record, + })) + .forEach((it) => { + const path = it.path; + + let resultV6 = it.raw; + if (it.raw.version === '5') { + resultV6 = updateUpToV6(it.raw); + } + + const result = updateUpToV7(resultV6); + + console.log(`[${chalk.green('✓')}] ${path}`); + + writeFileSync(path, JSON.stringify(result, null, 2)); + }); + + console.log("Everything's fine 🐶🔥"); +}; + +export const updateUpToV6 = (json: Record): PgSchemaV6 => { + const schema = pgSchemaV5.parse(json); + const tables = Object.fromEntries( + Object.entries(schema.tables).map((it) => { + const table = it[1]; + const schema = table.schema || 'public'; + return [`${schema}.${table.name}`, table]; + }), + ); + const enums = Object.fromEntries( + Object.entries(schema.enums).map((it) => { + const en = it[1]; + return [ + `public.${en.name}`, + { + name: en.name, + schema: 'public', + values: Object.values(en.values), + }, + ]; + }), + ); + return { + ...schema, + version: '6', + dialect: 'postgresql', + tables: tables, + enums, + }; +}; + +// Changed index format stored in snapshot for PostgreSQL in 0.22.0 +export const updateUpToV7 = (json: Record): PgSchema => { + const schema = pgSchemaV6.parse(json); + const tables = Object.fromEntries( + Object.entries(schema.tables).map((it) => { + const table = it[1]; + const mappedIndexes = Object.fromEntries( + Object.entries(table.indexes).map((idx) => { + const { columns, ...rest } = idx[1]; + const mappedColumns = columns.map((it) => { + return { + expression: it, + isExpression: false, + asc: true, + nulls: 'last', + opClass: undefined, + }; + }); + return [idx[0], { columns: mappedColumns, with: {}, ...rest }]; + }), + ); + return [it[0], { ...table, indexes: mappedIndexes }]; + }), + ); + + return { + ...schema, + version: '7', + dialect: 'postgresql', + sequences: {}, + tables: tables, + }; +}; + +// major migration with of folder structure, etc... +export const upPgHandlerV4toV5 = (obj: PgSchemaV4): PgSchemaV5 => { + const mappedTables: Record = {}; + + for (const [key, table] of Object.entries(obj.tables)) { + const mappedColumns: Record = {}; + for (const [ckey, column] of Object.entries(table.columns)) { + let newDefault: any = column.default; + let newType: string = column.type; + if (column.type.toLowerCase() === 'date') { + if (typeof column.default !== 'undefined') { + if (column.default.startsWith("'") && column.default.endsWith("'")) { + newDefault = `'${ + column.default + .substring(1, column.default.length - 1) + .split('T')[0] + }'`; + } else { + newDefault = column.default.split('T')[0]; + } + } + } else if (column.type.toLowerCase().startsWith('timestamp')) { + if (typeof column.default !== 'undefined') { + if (column.default.startsWith("'") && column.default.endsWith("'")) { + newDefault = `'${ + column.default + .substring(1, column.default.length - 1) + .replace('T', ' ') + .slice(0, 23) + }'`; + } else { + newDefault = column.default.replace('T', ' ').slice(0, 23); + } + } + newType = column.type + .toLowerCase() + .replace('timestamp (', 'timestamp('); + } else if (column.type.toLowerCase().startsWith('time')) { + newType = column.type.toLowerCase().replace('time (', 'time('); + } else if (column.type.toLowerCase().startsWith('interval')) { + newType = column.type.toLowerCase().replace(' (', '('); + } + mappedColumns[ckey] = { ...column, default: newDefault, type: newType }; + } + + mappedTables[key] = { + ...table, + columns: mappedColumns, + compositePrimaryKeys: {}, + uniqueConstraints: {}, + }; + } + + return { + version: '5', + dialect: obj.dialect, + id: obj.id, + prevId: obj.prevId, + tables: mappedTables, + enums: obj.enums, + schemas: obj.schemas, + _meta: { + schemas: {} as Record, + tables: {} as Record, + columns: {} as Record, + }, + }; +}; diff --git a/drizzle-kit/src/cli/commands/push.ts b/drizzle-kit/src/cli/commands/push.ts new file mode 100644 index 000000000..f1b6f7739 --- /dev/null +++ b/drizzle-kit/src/cli/commands/push.ts @@ -0,0 +1,527 @@ +import chalk from 'chalk'; +import { render } from 'hanji'; +import { fromJson } from '../../sqlgenerator'; +import { Select } from '../selector-ui'; +import type { MysqlCredentials } from '../validations/mysql'; +import { withStyle } from '../validations/outputs'; +import type { PostgresCredentials } from '../validations/postgres'; +import { SingleStoreCredentials } from '../validations/singlestore'; +import type { SqliteCredentials } from '../validations/sqlite'; +import { + filterStatements as mySqlFilterStatements, + logSuggestionsAndReturn as mySqlLogSuggestionsAndReturn, +} from './mysqlPushUtils'; +import { pgSuggestions } from './pgPushUtils'; +import { + filterStatements as singleStoreFilterStatements, + logSuggestionsAndReturn as singleStoreLogSuggestionsAndReturn, +} from './singlestorePushUtils'; +import { logSuggestionsAndReturn as sqliteSuggestions } from './sqlitePushUtils'; + +export const mysqlPush = async ( + schemaPath: string | string[], + credentials: MysqlCredentials, + tablesFilter: string[], + strict: boolean, + verbose: boolean, + force: boolean, +) => { + const { connectToMySQL } = await import('../connections'); + const { mysqlPushIntrospect } = await import('./mysqlIntrospect'); + + const { db, database } = await connectToMySQL(credentials); + + const { schema } = await mysqlPushIntrospect(db, database, tablesFilter); + const { prepareMySQLPush } = await import('./migrate'); + + const statements = await prepareMySQLPush(schemaPath, schema); + + const filteredStatements = mySqlFilterStatements( + statements.statements ?? [], + statements.validatedCur, + statements.validatedPrev, + ); + + try { + if (filteredStatements.length === 0) { + render(`[${chalk.blue('i')}] No changes detected`); + } else { + const { + shouldAskForApprove, + statementsToExecute, + columnsToRemove, + tablesToRemove, + tablesToTruncate, + infoToPrint, + schemasToRemove, + } = await mySqlLogSuggestionsAndReturn( + db, + filteredStatements, + statements.validatedCur, + ); + + const filteredSqlStatements = fromJson(filteredStatements, 'mysql'); + + const uniqueSqlStatementsToExecute: string[] = []; + statementsToExecute.forEach((ss) => { + if (!uniqueSqlStatementsToExecute.includes(ss)) { + uniqueSqlStatementsToExecute.push(ss); + } + }); + const uniqueFilteredSqlStatements: string[] = []; + filteredSqlStatements.forEach((ss) => { + if (!uniqueFilteredSqlStatements.includes(ss)) { + uniqueFilteredSqlStatements.push(ss); + } + }); + + if (verbose) { + console.log(); + console.log( + withStyle.warning('You are about to execute current statements:'), + ); + console.log(); + console.log( + [...uniqueSqlStatementsToExecute, ...uniqueFilteredSqlStatements] + .map((s) => chalk.blue(s)) + .join('\n'), + ); + console.log(); + } + + if (!force && strict) { + if (!shouldAskForApprove) { + const { status, data } = await render( + new Select(['No, abort', `Yes, I want to execute all statements`]), + ); + if (data?.index === 0) { + render(`[${chalk.red('x')}] All changes were aborted`); + process.exit(0); + } + } + } + + if (!force && shouldAskForApprove) { + console.log(withStyle.warning('Found data-loss statements:')); + console.log(infoToPrint.join('\n')); + console.log(); + console.log( + chalk.red.bold( + 'THIS ACTION WILL CAUSE DATA LOSS AND CANNOT BE REVERTED\n', + ), + ); + + console.log(chalk.white('Do you still want to push changes?')); + + const { status, data } = await render( + new Select([ + 'No, abort', + `Yes, I want to${ + tablesToRemove.length > 0 + ? ` remove ${tablesToRemove.length} ${tablesToRemove.length > 1 ? 'tables' : 'table'},` + : ' ' + }${ + columnsToRemove.length > 0 + ? ` remove ${columnsToRemove.length} ${columnsToRemove.length > 1 ? 'columns' : 'column'},` + : ' ' + }${ + tablesToTruncate.length > 0 + ? ` truncate ${tablesToTruncate.length} ${tablesToTruncate.length > 1 ? 'tables' : 'table'}` + : '' + }` + .replace(/(^,)|(,$)/g, '') + .replace(/ +(?= )/g, ''), + ]), + ); + if (data?.index === 0) { + render(`[${chalk.red('x')}] All changes were aborted`); + process.exit(0); + } + } + + for (const dStmnt of uniqueSqlStatementsToExecute) { + await db.query(dStmnt); + } + + for (const statement of uniqueFilteredSqlStatements) { + await db.query(statement); + } + if (filteredStatements.length > 0) { + render(`[${chalk.green('✓')}] Changes applied`); + } else { + render(`[${chalk.blue('i')}] No changes detected`); + } + } + } catch (e) { + console.log(e); + } +}; + +export const singlestorePush = async ( + schemaPath: string | string[], + credentials: SingleStoreCredentials, + tablesFilter: string[], + strict: boolean, + verbose: boolean, + force: boolean, +) => { + const { connectToSingleStore } = await import('../connections'); + const { singlestorePushIntrospect } = await import('./singlestoreIntrospect'); + + const { db, database } = await connectToSingleStore(credentials); + + const { schema } = await singlestorePushIntrospect(db, database, tablesFilter); + const { prepareSingleStorePush } = await import('./migrate'); + + const statements = await prepareSingleStorePush(schemaPath, schema); + + const filteredStatements = singleStoreFilterStatements( + statements.statements ?? [], + statements.validatedCur, + statements.validatedPrev, + ); + + try { + if (filteredStatements.length === 0) { + render(`[${chalk.blue('i')}] No changes detected`); + } else { + const { + shouldAskForApprove, + statementsToExecute, + columnsToRemove, + tablesToRemove, + tablesToTruncate, + infoToPrint, + schemasToRemove, + } = await singleStoreLogSuggestionsAndReturn( + db, + filteredStatements, + statements.validatedCur, + ); + + const filteredSqlStatements = fromJson(filteredStatements, 'singlestore'); + + const uniqueSqlStatementsToExecute: string[] = []; + statementsToExecute.forEach((ss) => { + if (!uniqueSqlStatementsToExecute.includes(ss)) { + uniqueSqlStatementsToExecute.push(ss); + } + }); + const uniqueFilteredSqlStatements: string[] = []; + filteredSqlStatements.forEach((ss) => { + if (!uniqueFilteredSqlStatements.includes(ss)) { + uniqueFilteredSqlStatements.push(ss); + } + }); + + if (verbose) { + console.log(); + // console.log(chalk.gray('Verbose logs:')); + console.log( + withStyle.warning('You are about to execute current statements:'), + ); + console.log(); + console.log( + [...uniqueSqlStatementsToExecute, ...uniqueFilteredSqlStatements] + .map((s) => chalk.blue(s)) + .join('\n'), + ); + console.log(); + } + + if (!force && strict) { + if (!shouldAskForApprove) { + const { status, data } = await render( + new Select(['No, abort', `Yes, I want to execute all statements`]), + ); + if (data?.index === 0) { + render(`[${chalk.red('x')}] All changes were aborted`); + process.exit(0); + } + } + } + + if (!force && shouldAskForApprove) { + console.log(withStyle.warning('Found data-loss statements:')); + console.log(infoToPrint.join('\n')); + console.log(); + console.log( + chalk.red.bold( + 'THIS ACTION WILL CAUSE DATA LOSS AND CANNOT BE REVERTED\n', + ), + ); + + console.log(chalk.white('Do you still want to push changes?')); + + const { status, data } = await render( + new Select([ + 'No, abort', + `Yes, I want to${ + tablesToRemove.length > 0 + ? ` remove ${tablesToRemove.length} ${tablesToRemove.length > 1 ? 'tables' : 'table'},` + : ' ' + }${ + columnsToRemove.length > 0 + ? ` remove ${columnsToRemove.length} ${columnsToRemove.length > 1 ? 'columns' : 'column'},` + : ' ' + }${ + tablesToTruncate.length > 0 + ? ` truncate ${tablesToTruncate.length} ${tablesToTruncate.length > 1 ? 'tables' : 'table'}` + : '' + }` + .replace(/(^,)|(,$)/g, '') + .replace(/ +(?= )/g, ''), + ]), + ); + if (data?.index === 0) { + render(`[${chalk.red('x')}] All changes were aborted`); + process.exit(0); + } + } + + for (const dStmnt of uniqueSqlStatementsToExecute) { + await db.query(dStmnt); + } + + for (const statement of uniqueFilteredSqlStatements) { + await db.query(statement); + } + if (filteredStatements.length > 0) { + render(`[${chalk.green('✓')}] Changes applied`); + } else { + render(`[${chalk.blue('i')}] No changes detected`); + } + } + } catch (e) { + console.log(e); + } +}; + +export const pgPush = async ( + schemaPath: string | string[], + verbose: boolean, + strict: boolean, + credentials: PostgresCredentials, + tablesFilter: string[], + schemasFilter: string[], + force: boolean, +) => { + const { preparePostgresDB } = await import('../connections'); + const { pgPushIntrospect } = await import('./pgIntrospect'); + + const db = await preparePostgresDB(credentials); + const { schema } = await pgPushIntrospect(db, tablesFilter, schemasFilter); + + const { preparePgPush } = await import('./migrate'); + + const statements = await preparePgPush(schemaPath, schema, schemasFilter); + + try { + if (statements.sqlStatements.length === 0) { + render(`[${chalk.blue('i')}] No changes detected`); + } else { + // const filteredStatements = filterStatements(statements.statements); + const { + shouldAskForApprove, + statementsToExecute, + columnsToRemove, + tablesToRemove, + tablesToTruncate, + infoToPrint, + schemasToRemove, + } = await pgSuggestions(db, statements.statements); + + if (verbose) { + console.log(); + // console.log(chalk.gray('Verbose logs:')); + console.log( + withStyle.warning('You are about to execute current statements:'), + ); + console.log(); + console.log(statementsToExecute.map((s) => chalk.blue(s)).join('\n')); + console.log(); + } + + if (!force && strict) { + if (!shouldAskForApprove) { + const { status, data } = await render( + new Select(['No, abort', `Yes, I want to execute all statements`]), + ); + if (data?.index === 0) { + render(`[${chalk.red('x')}] All changes were aborted`); + process.exit(0); + } + } + } + + if (!force && shouldAskForApprove) { + console.log(withStyle.warning('Found data-loss statements:')); + console.log(infoToPrint.join('\n')); + console.log(); + console.log( + chalk.red.bold( + 'THIS ACTION WILL CAUSE DATA LOSS AND CANNOT BE REVERTED\n', + ), + ); + + console.log(chalk.white('Do you still want to push changes?')); + + const { status, data } = await render( + new Select([ + 'No, abort', + `Yes, I want to${ + tablesToRemove.length > 0 + ? ` remove ${tablesToRemove.length} ${tablesToRemove.length > 1 ? 'tables' : 'table'},` + : ' ' + }${ + columnsToRemove.length > 0 + ? ` remove ${columnsToRemove.length} ${columnsToRemove.length > 1 ? 'columns' : 'column'},` + : ' ' + }${ + tablesToTruncate.length > 0 + ? ` truncate ${tablesToTruncate.length} ${tablesToTruncate.length > 1 ? 'tables' : 'table'}` + : '' + }` + .replace(/(^,)|(,$)/g, '') + .replace(/ +(?= )/g, ''), + ]), + ); + if (data?.index === 0) { + render(`[${chalk.red('x')}] All changes were aborted`); + process.exit(0); + } + } + + for (const dStmnt of statementsToExecute) { + await db.query(dStmnt); + } + + if (statements.statements.length > 0) { + render(`[${chalk.green('✓')}] Changes applied`); + } else { + render(`[${chalk.blue('i')}] No changes detected`); + } + } + } catch (e) { + console.error(e); + } +}; + +export const sqlitePush = async ( + schemaPath: string | string[], + verbose: boolean, + strict: boolean, + credentials: SqliteCredentials, + tablesFilter: string[], + force: boolean, +) => { + const { connectToSQLite } = await import('../connections'); + const { sqlitePushIntrospect } = await import('./sqliteIntrospect'); + + const db = await connectToSQLite(credentials); + const { schema } = await sqlitePushIntrospect(db, tablesFilter); + const { prepareSQLitePush } = await import('./migrate'); + + const statements = await prepareSQLitePush(schemaPath, schema); + + if (statements.sqlStatements.length === 0) { + render(`\n[${chalk.blue('i')}] No changes detected`); + } else { + const { + shouldAskForApprove, + statementsToExecute, + columnsToRemove, + tablesToRemove, + tablesToTruncate, + infoToPrint, + } = await sqliteSuggestions( + db, + statements.statements, + statements.squashedCur, + statements.squashedPrev, + statements.meta!, + ); + + if (verbose && statementsToExecute.length > 0) { + console.log(); + console.log( + withStyle.warning('You are about to execute current statements:'), + ); + console.log(); + console.log(statementsToExecute.map((s) => chalk.blue(s)).join('\n')); + console.log(); + } + + if (!force && strict) { + if (!shouldAskForApprove) { + const { status, data } = await render( + new Select(['No, abort', `Yes, I want to execute all statements`]), + ); + if (data?.index === 0) { + render(`[${chalk.red('x')}] All changes were aborted`); + process.exit(0); + } + } + } + + if (!force && shouldAskForApprove) { + console.log(withStyle.warning('Found data-loss statements:')); + console.log(infoToPrint.join('\n')); + console.log(); + console.log( + chalk.red.bold( + 'THIS ACTION WILL CAUSE DATA LOSS AND CANNOT BE REVERTED\n', + ), + ); + + console.log(chalk.white('Do you still want to push changes?')); + + const { status, data } = await render( + new Select([ + 'No, abort', + `Yes, I want to${ + tablesToRemove.length > 0 + ? ` remove ${tablesToRemove.length} ${tablesToRemove.length > 1 ? 'tables' : 'table'},` + : ' ' + }${ + columnsToRemove.length > 0 + ? ` remove ${columnsToRemove.length} ${columnsToRemove.length > 1 ? 'columns' : 'column'},` + : ' ' + }${ + tablesToTruncate.length > 0 + ? ` truncate ${tablesToTruncate.length} ${tablesToTruncate.length > 1 ? 'tables' : 'table'}` + : '' + }` + .trimEnd() + .replace(/(^,)|(,$)/g, '') + .replace(/ +(?= )/g, ''), + ]), + ); + if (data?.index === 0) { + render(`[${chalk.red('x')}] All changes were aborted`); + process.exit(0); + } + } + + if (statementsToExecute.length === 0) { + render(`\n[${chalk.blue('i')}] No changes detected`); + } else { + if (!('driver' in credentials)) { + await db.query('begin'); + try { + for (const dStmnt of statementsToExecute) { + await db.query(dStmnt); + } + await db.query('commit'); + } catch (e) { + console.error(e); + await db.query('rollback'); + process.exit(1); + } + } else if (credentials.driver === 'turso') { + await db.batch!(statementsToExecute.map((it) => ({ query: it }))); + } + render(`[${chalk.green('✓')}] Changes applied`); + } + } +}; diff --git a/drizzle-kit/src/cli/commands/singlestoreIntrospect.ts b/drizzle-kit/src/cli/commands/singlestoreIntrospect.ts new file mode 100644 index 000000000..27d8c59c5 --- /dev/null +++ b/drizzle-kit/src/cli/commands/singlestoreIntrospect.ts @@ -0,0 +1,53 @@ +import { renderWithTask } from 'hanji'; +import { Minimatch } from 'minimatch'; +import { originUUID } from '../../global'; +import type { SingleStoreSchema } from '../../serializer/singlestoreSchema'; +import { fromDatabase } from '../../serializer/singlestoreSerializer'; +import type { DB } from '../../utils'; +import { ProgressView } from '../views'; + +export const singlestorePushIntrospect = async ( + db: DB, + databaseName: string, + filters: string[], +) => { + const matchers = filters.map((it) => { + return new Minimatch(it); + }); + + const filter = (tableName: string) => { + if (matchers.length === 0) return true; + + let flags: boolean[] = []; + + for (let matcher of matchers) { + if (matcher.negate) { + if (!matcher.match(tableName)) { + flags.push(false); + } + } + + if (matcher.match(tableName)) { + flags.push(true); + } + } + + if (flags.length > 0) { + return flags.every(Boolean); + } + return false; + }; + + const progress = new ProgressView( + 'Pulling schema from database...', + 'Pulling schema from database...', + ); + const res = await renderWithTask( + progress, + fromDatabase(db, databaseName, filter), + ); + + const schema = { id: originUUID, prevId: '', ...res } as SingleStoreSchema; + const { internal, ...schemaWithoutInternals } = schema; + return { schema: schemaWithoutInternals }; +}; diff --git a/drizzle-kit/src/cli/commands/singlestorePushUtils.ts b/drizzle-kit/src/cli/commands/singlestorePushUtils.ts new file mode 100644 index 000000000..80fad9b2d --- /dev/null +++ b/drizzle-kit/src/cli/commands/singlestorePushUtils.ts @@ -0,0 +1,352 @@ +import chalk from 'chalk'; +import { render } from 'hanji'; +import { TypeOf } from 'zod'; +import { JsonAlterColumnTypeStatement, JsonStatement } from '../../jsonStatements'; +import { singlestoreSchema, SingleStoreSquasher } from '../../serializer/singlestoreSchema'; +import type { DB } from '../../utils'; +import { Select } from '../selector-ui'; +import { withStyle } from '../validations/outputs'; + +export const filterStatements = ( + statements: JsonStatement[], + currentSchema: TypeOf, + prevSchema: TypeOf, +) => { + return statements.filter((statement) => { + if (statement.type === 'alter_table_alter_column_set_type') { + // Don't need to handle it on migrations step and introspection + // but for both it should be skipped + if ( + statement.oldDataType.startsWith('tinyint') + && statement.newDataType.startsWith('boolean') + ) { + return false; + } + + if ( + statement.oldDataType.startsWith('bigint unsigned') + && statement.newDataType.startsWith('serial') + ) { + return false; + } + + if ( + statement.oldDataType.startsWith('serial') + && statement.newDataType.startsWith('bigint unsigned') + ) { + return false; + } + } else if (statement.type === 'alter_table_alter_column_set_default') { + if ( + statement.newDefaultValue === false + && statement.oldDefaultValue === 0 + && statement.newDataType === 'boolean' + ) { + return false; + } + if ( + statement.newDefaultValue === true + && statement.oldDefaultValue === 1 + && statement.newDataType === 'boolean' + ) { + return false; + } + } else if (statement.type === 'delete_unique_constraint') { + const unsquashed = SingleStoreSquasher.unsquashUnique(statement.data); + // only if constraint was removed from a serial column, than treat it as removed + // const serialStatement = statements.find( + // (it) => it.type === "alter_table_alter_column_set_type" + // ) as JsonAlterColumnTypeStatement; + // if ( + // serialStatement?.oldDataType.startsWith("bigint unsigned") && + // serialStatement?.newDataType.startsWith("serial") && + // serialStatement.columnName === + // SingleStoreSquasher.unsquashUnique(statement.data).columns[0] + // ) { + // return false; + // } + // Check if uniqueindex was only on this column, that is serial + + // if now serial and was not serial and was unique index + if ( + unsquashed.columns.length === 1 + && currentSchema.tables[statement.tableName].columns[unsquashed.columns[0]] + .type === 'serial' + && prevSchema.tables[statement.tableName].columns[unsquashed.columns[0]] + .type === 'serial' + && currentSchema.tables[statement.tableName].columns[unsquashed.columns[0]] + .name === unsquashed.columns[0] + ) { + return false; + } + } else if (statement.type === 'alter_table_alter_column_drop_notnull') { + // only if constraint was removed from a serial column, than treat it as removed + const serialStatement = statements.find( + (it) => it.type === 'alter_table_alter_column_set_type', + ) as JsonAlterColumnTypeStatement; + if ( + serialStatement?.oldDataType.startsWith('bigint unsigned') + && serialStatement?.newDataType.startsWith('serial') + && serialStatement.columnName === statement.columnName + && serialStatement.tableName === statement.tableName + ) { + return false; + } + if (statement.newDataType === 'serial' && !statement.columnNotNull) { + return false; + } + if (statement.columnAutoIncrement) { + return false; + } + } + + return true; + }); +}; + +export const logSuggestionsAndReturn = async ( + db: DB, + statements: JsonStatement[], + json2: TypeOf, +) => { + let shouldAskForApprove = false; + const statementsToExecute: string[] = []; + const infoToPrint: string[] = []; + + const tablesToRemove: string[] = []; + const columnsToRemove: string[] = []; + const schemasToRemove: string[] = []; + const tablesToTruncate: string[] = []; + + for (const statement of statements) { + if (statement.type === 'drop_table') { + const res = await db.query( + `select count(*) as count from \`${statement.tableName}\``, + ); + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to delete ${ + chalk.underline( + statement.tableName, + ) + } table with ${count} items`, + ); + tablesToRemove.push(statement.tableName); + shouldAskForApprove = true; + } + } else if (statement.type === 'alter_table_drop_column') { + const res = await db.query( + `select count(*) as count from \`${statement.tableName}\``, + ); + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to delete ${ + chalk.underline( + statement.columnName, + ) + } column in ${statement.tableName} table with ${count} items`, + ); + columnsToRemove.push(`${statement.tableName}_${statement.columnName}`); + shouldAskForApprove = true; + } + } else if (statement.type === 'drop_schema') { + const res = await db.query( + `select count(*) as count from information_schema.tables where table_schema = \`${statement.name}\`;`, + ); + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to delete ${ + chalk.underline( + statement.name, + ) + } schema with ${count} tables`, + ); + schemasToRemove.push(statement.name); + shouldAskForApprove = true; + } + } else if (statement.type === 'alter_table_alter_column_set_type') { + const res = await db.query( + `select count(*) as count from \`${statement.tableName}\``, + ); + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to change ${ + chalk.underline( + statement.columnName, + ) + } column type from ${ + chalk.underline( + statement.oldDataType, + ) + } to ${chalk.underline(statement.newDataType)} with ${count} items`, + ); + statementsToExecute.push(`truncate table ${statement.tableName};`); + tablesToTruncate.push(statement.tableName); + shouldAskForApprove = true; + } + } else if (statement.type === 'alter_table_alter_column_drop_default') { + if (statement.columnNotNull) { + const res = await db.query( + `select count(*) as count from \`${statement.tableName}\``, + ); + + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to remove default value from ${ + chalk.underline( + statement.columnName, + ) + } not-null column with ${count} items`, + ); + + tablesToTruncate.push(statement.tableName); + statementsToExecute.push(`truncate table ${statement.tableName};`); + + shouldAskForApprove = true; + } + } + // shouldAskForApprove = true; + } else if (statement.type === 'alter_table_alter_column_set_notnull') { + if (typeof statement.columnDefault === 'undefined') { + const res = await db.query( + `select count(*) as count from \`${statement.tableName}\``, + ); + + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to set not-null constraint to ${ + chalk.underline( + statement.columnName, + ) + } column without default, which contains ${count} items`, + ); + + tablesToTruncate.push(statement.tableName); + statementsToExecute.push(`truncate table ${statement.tableName};`); + + shouldAskForApprove = true; + } + } + } else if (statement.type === 'alter_table_alter_column_drop_pk') { + const res = await db.query( + `select count(*) as count from \`${statement.tableName}\``, + ); + + // if drop pk and json2 has autoincrement in table -> exit process with error + if ( + Object.values(json2.tables[statement.tableName].columns).filter( + (column) => column.autoincrement, + ).length > 0 + ) { + console.log( + `${ + withStyle.errorWarning( + `You have removed the primary key from a ${statement.tableName} table without removing the auto-increment property from this table. As the database error states: 'there can be only one auto column, and it must be defined as a key. Make sure to remove autoincrement from ${statement.tableName} table`, + ) + }`, + ); + process.exit(1); + } + + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to change ${ + chalk.underline( + statement.tableName, + ) + } primary key. This statements may fail and you table may left without primary key`, + ); + + tablesToTruncate.push(statement.tableName); + shouldAskForApprove = true; + } + } else if (statement.type === 'delete_composite_pk') { + // if drop pk and json2 has autoincrement in table -> exit process with error + if ( + Object.values(json2.tables[statement.tableName].columns).filter( + (column) => column.autoincrement, + ).length > 0 + ) { + console.log( + `${ + withStyle.errorWarning( + `You have removed the primary key from a ${statement.tableName} table without removing the auto-increment property from this table. As the database error states: 'there can be only one auto column, and it must be defined as a key. Make sure to remove autoincrement from ${statement.tableName} table`, + ) + }`, + ); + process.exit(1); + } + } else if (statement.type === 'alter_table_add_column') { + if ( + statement.column.notNull + && typeof statement.column.default === 'undefined' + ) { + const res = await db.query( + `select count(*) as count from \`${statement.tableName}\``, + ); + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to add not-null ${ + chalk.underline( + statement.column.name, + ) + } column without default value, which contains ${count} items`, + ); + + tablesToTruncate.push(statement.tableName); + statementsToExecute.push(`truncate table ${statement.tableName};`); + + shouldAskForApprove = true; + } + } + } else if (statement.type === 'create_unique_constraint') { + const res = await db.query( + `select count(*) as count from \`${statement.tableName}\``, + ); + const count = Number(res[0].count); + if (count > 0) { + const unsquashedUnique = SingleStoreSquasher.unsquashUnique(statement.data); + console.log( + `· You're about to add ${ + chalk.underline( + unsquashedUnique.name, + ) + } unique constraint to the table, which contains ${count} items. If this statement fails, you will receive an error from the database. Do you want to truncate ${ + chalk.underline( + statement.tableName, + ) + } table?\n`, + ); + const { status, data } = await render( + new Select([ + 'No, add the constraint without truncating the table', + `Yes, truncate the table`, + ]), + ); + if (data?.index === 1) { + tablesToTruncate.push(statement.tableName); + statementsToExecute.push(`truncate table ${statement.tableName};`); + shouldAskForApprove = true; + } + } + } + } + + return { + statementsToExecute, + shouldAskForApprove, + infoToPrint, + columnsToRemove: [...new Set(columnsToRemove)], + schemasToRemove: [...new Set(schemasToRemove)], + tablesToTruncate: [...new Set(tablesToTruncate)], + tablesToRemove: [...new Set(tablesToRemove)], + }; +}; diff --git a/drizzle-kit/src/cli/commands/singlestoreUp.ts b/drizzle-kit/src/cli/commands/singlestoreUp.ts new file mode 100644 index 000000000..dc5004ed0 --- /dev/null +++ b/drizzle-kit/src/cli/commands/singlestoreUp.ts @@ -0,0 +1 @@ +export const upSinglestoreHandler = (out: string) => {}; diff --git a/drizzle-kit/src/cli/commands/sqliteIntrospect.ts b/drizzle-kit/src/cli/commands/sqliteIntrospect.ts new file mode 100644 index 000000000..1c62498f5 --- /dev/null +++ b/drizzle-kit/src/cli/commands/sqliteIntrospect.ts @@ -0,0 +1,96 @@ +import { renderWithTask } from 'hanji'; +import { Minimatch } from 'minimatch'; +import { originUUID } from '../../global'; +import { schemaToTypeScript } from '../../introspect-sqlite'; +import type { SQLiteSchema } from '../../serializer/sqliteSchema'; +import { fromDatabase } from '../../serializer/sqliteSerializer'; +import type { SQLiteDB } from '../../utils'; +import { Casing } from '../validations/common'; +import type { SqliteCredentials } from '../validations/sqlite'; +import { IntrospectProgress, ProgressView } from '../views'; + +export const sqliteIntrospect = async ( + credentials: SqliteCredentials, + filters: string[], + casing: Casing, +) => { + const { connectToSQLite } = await import('../connections'); + const db = await connectToSQLite(credentials); + + const matchers = filters.map((it) => { + return new Minimatch(it); + }); + + const filter = (tableName: string) => { + if (matchers.length === 0) return true; + + let flags: boolean[] = []; + + for (let matcher of matchers) { + if (matcher.negate) { + if (!matcher.match(tableName)) { + flags.push(false); + } + } + + if (matcher.match(tableName)) { + flags.push(true); + } + } + + if (flags.length > 0) { + return flags.every(Boolean); + } + return false; + }; + + const progress = new IntrospectProgress(); + const res = await renderWithTask( + progress, + fromDatabase(db, filter, (stage, count, status) => { + progress.update(stage, count, status); + }), + ); + + const schema = { id: originUUID, prevId: '', ...res } as SQLiteSchema; + const ts = schemaToTypeScript(schema, casing); + return { schema, ts }; +}; + +export const sqlitePushIntrospect = async (db: SQLiteDB, filters: string[]) => { + const matchers = filters.map((it) => { + return new Minimatch(it); + }); + + const filter = (tableName: string) => { + if (matchers.length === 0) return true; + + let flags: boolean[] = []; + + for (let matcher of matchers) { + if (matcher.negate) { + if (!matcher.match(tableName)) { + flags.push(false); + } + } + + if (matcher.match(tableName)) { + flags.push(true); + } + } + + if (flags.length > 0) { + return flags.every(Boolean); + } + return false; + }; + + const progress = new ProgressView( + 'Pulling schema from database...', + 'Pulling schema from database...', + ); + const res = await renderWithTask(progress, fromDatabase(db, filter)); + + const schema = { id: originUUID, prevId: '', ...res } as SQLiteSchema; + return { schema }; +}; diff --git a/drizzle-kit/src/cli/commands/sqlitePushUtils.ts b/drizzle-kit/src/cli/commands/sqlitePushUtils.ts new file mode 100644 index 000000000..d11a4ce62 --- /dev/null +++ b/drizzle-kit/src/cli/commands/sqlitePushUtils.ts @@ -0,0 +1,393 @@ +import chalk from 'chalk'; + +import { SQLiteSchemaInternal, SQLiteSchemaSquashed, SQLiteSquasher } from '../../serializer/sqliteSchema'; +import { + CreateSqliteIndexConvertor, + fromJson, + SQLiteCreateTableConvertor, + SQLiteDropTableConvertor, + SqliteRenameTableConvertor, +} from '../../sqlgenerator'; + +import type { JsonStatement } from '../../jsonStatements'; +import type { SQLiteDB } from '../../utils'; + +export const _moveDataStatements = ( + tableName: string, + json: SQLiteSchemaSquashed, + dataLoss: boolean = false, +) => { + const statements: string[] = []; + + // rename table to __old_${tablename} + statements.push( + new SqliteRenameTableConvertor().convert({ + type: 'rename_table', + tableNameFrom: tableName, + tableNameTo: `__old_push_${tableName}`, + fromSchema: '', + toSchema: '', + }), + ); + + // create table statement from a new json2 with proper name + const tableColumns = Object.values(json.tables[tableName].columns); + const referenceData = Object.values(json.tables[tableName].foreignKeys); + const compositePKs = Object.values( + json.tables[tableName].compositePrimaryKeys, + ).map((it) => SQLiteSquasher.unsquashPK(it)); + + const fks = referenceData.map((it) => SQLiteSquasher.unsquashPushFK(it)); + + statements.push( + new SQLiteCreateTableConvertor().convert({ + type: 'sqlite_create_table', + tableName: tableName, + columns: tableColumns, + referenceData: fks, + compositePKs, + }), + ); + + // move data + if (!dataLoss) { + statements.push( + `INSERT INTO "${tableName}" SELECT * FROM "__old_push_${tableName}";`, + ); + } + // drop table with name __old_${tablename} + statements.push( + new SQLiteDropTableConvertor().convert({ + type: 'drop_table', + tableName: `__old_push_${tableName}`, + schema: '', + }), + ); + + for (const idx of Object.values(json.tables[tableName].indexes)) { + statements.push( + new CreateSqliteIndexConvertor().convert({ + type: 'create_index', + tableName: tableName, + schema: '', + data: idx, + }), + ); + } + + return statements; +}; + +export const getOldTableName = ( + tableName: string, + meta: SQLiteSchemaInternal['_meta'], +) => { + for (const key of Object.keys(meta.tables)) { + const value = meta.tables[key]; + if (`"${tableName}"` === value) { + return key.substring(1, key.length - 1); + } + } + return tableName; +}; + +export const getNewTableName = ( + tableName: string, + meta: SQLiteSchemaInternal['_meta'], +) => { + if (typeof meta.tables[`"${tableName}"`] !== 'undefined') { + return meta.tables[`"${tableName}"`].substring( + 1, + meta.tables[`"${tableName}"`].length - 1, + ); + } + return tableName; +}; + +export const logSuggestionsAndReturn = async ( + connection: SQLiteDB, + statements: JsonStatement[], + json1: SQLiteSchemaSquashed, + json2: SQLiteSchemaSquashed, + meta: SQLiteSchemaInternal['_meta'], +) => { + let shouldAskForApprove = false; + const statementsToExecute: string[] = []; + const infoToPrint: string[] = []; + + const tablesToRemove: string[] = []; + const columnsToRemove: string[] = []; + const schemasToRemove: string[] = []; + const tablesToTruncate: string[] = []; + + const tablesContext: Record = {}; + + for (const statement of statements) { + if (statement.type === 'drop_table') { + const res = await connection.query<{ count: string }>( + `select count(*) as count from \`${statement.tableName}\``, + ); + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to delete ${ + chalk.underline( + statement.tableName, + ) + } table with ${count} items`, + ); + tablesToRemove.push(statement.tableName); + shouldAskForApprove = true; + } + const stmnt = fromJson([statement], 'sqlite')[0]; + statementsToExecute.push(stmnt); + } else if (statement.type === 'alter_table_drop_column') { + const newTableName = getOldTableName(statement.tableName, meta); + + const columnIsPartOfPk = Object.values( + json1.tables[newTableName].compositePrimaryKeys, + ).find((c) => SQLiteSquasher.unsquashPK(c).includes(statement.columnName)); + + const columnIsPartOfIndex = Object.values( + json1.tables[newTableName].indexes, + ).find((c) => SQLiteSquasher.unsquashIdx(c).columns.includes(statement.columnName)); + + const columnIsPk = json1.tables[newTableName].columns[statement.columnName].primaryKey; + + const columnIsPartOfFk = Object.values( + json1.tables[newTableName].foreignKeys, + ).find((t) => + SQLiteSquasher.unsquashPushFK(t).columnsFrom.includes( + statement.columnName, + ) + ); + + const res = await connection.query<{ count: string }>( + `select count(*) as count from \`${newTableName}\``, + ); + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to delete ${ + chalk.underline( + statement.columnName, + ) + } column in ${newTableName} table with ${count} items`, + ); + columnsToRemove.push(`${newTableName}_${statement.columnName}`); + shouldAskForApprove = true; + } + + if ( + columnIsPk + || columnIsPartOfPk + || columnIsPartOfIndex + || columnIsPartOfFk + ) { + tablesContext[newTableName] = [ + ..._moveDataStatements(statement.tableName, json2, true), + ]; + // check table that have fk to this table + + const tablesReferncingCurrent: string[] = []; + + for (const table of Object.values(json1.tables)) { + const tablesRefs = Object.values(json1.tables[table.name].foreignKeys) + .filter( + (t) => SQLiteSquasher.unsquashPushFK(t).tableTo === newTableName, + ) + .map((t) => SQLiteSquasher.unsquashPushFK(t).tableFrom); + + tablesReferncingCurrent.push(...tablesRefs); + } + + const uniqueTableRefs = [...new Set(tablesReferncingCurrent)]; + + for (const table of uniqueTableRefs) { + if (typeof tablesContext[table] === 'undefined') { + tablesContext[table] = [..._moveDataStatements(table, json2)]; + } + } + } else { + if (typeof tablesContext[newTableName] === 'undefined') { + const stmnt = fromJson([statement], 'sqlite')[0]; + statementsToExecute.push(stmnt); + } + } + } else if (statement.type === 'sqlite_alter_table_add_column') { + const newTableName = getOldTableName(statement.tableName, meta); + if (statement.column.notNull && !statement.column.default) { + const res = await connection.query<{ count: string }>( + `select count(*) as count from \`${newTableName}\``, + ); + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to add not-null ${ + chalk.underline( + statement.column.name, + ) + } column without default value, which contains ${count} items`, + ); + + tablesToTruncate.push(newTableName); + statementsToExecute.push(`delete from ${newTableName};`); + + shouldAskForApprove = true; + } + } + if (statement.column.primaryKey) { + tablesContext[newTableName] = [ + ..._moveDataStatements(statement.tableName, json2, true), + ]; + const tablesReferncingCurrent: string[] = []; + + for (const table of Object.values(json1.tables)) { + const tablesRefs = Object.values(json1.tables[table.name].foreignKeys) + .filter( + (t) => SQLiteSquasher.unsquashPushFK(t).tableTo === newTableName, + ) + .map((t) => SQLiteSquasher.unsquashPushFK(t).tableFrom); + + tablesReferncingCurrent.push(...tablesRefs); + } + + const uniqueTableRefs = [...new Set(tablesReferncingCurrent)]; + + for (const table of uniqueTableRefs) { + if (typeof tablesContext[table] === 'undefined') { + tablesContext[table] = [..._moveDataStatements(table, json2)]; + } + } + } else { + if (typeof tablesContext[newTableName] === 'undefined') { + const stmnt = fromJson([statement], 'sqlite')[0]; + statementsToExecute.push(stmnt); + } + } + } else if ( + statement.type === 'alter_table_alter_column_set_type' + || statement.type === 'alter_table_alter_column_set_default' + || statement.type === 'alter_table_alter_column_drop_default' + || statement.type === 'alter_table_alter_column_set_notnull' + || statement.type === 'alter_table_alter_column_drop_notnull' + || statement.type === 'alter_table_alter_column_drop_autoincrement' + || statement.type === 'alter_table_alter_column_set_autoincrement' + || statement.type === 'alter_table_alter_column_drop_pk' + || statement.type === 'alter_table_alter_column_set_pk' + ) { + if ( + !( + statement.type === 'alter_table_alter_column_set_notnull' + && statement.columnPk + ) + ) { + const newTableName = getOldTableName(statement.tableName, meta); + if ( + statement.type === 'alter_table_alter_column_set_notnull' + && typeof statement.columnDefault === 'undefined' + ) { + const res = await connection.query<{ count: string }>( + `select count(*) as count from \`${newTableName}\``, + ); + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to add not-null constraint to ${ + chalk.underline( + statement.columnName, + ) + } column without default value, which contains ${count} items`, + ); + + tablesToTruncate.push(newTableName); + shouldAskForApprove = true; + } + tablesContext[newTableName] = _moveDataStatements( + statement.tableName, + json1, + true, + ); + } else { + if (typeof tablesContext[newTableName] === 'undefined') { + tablesContext[newTableName] = _moveDataStatements( + statement.tableName, + json1, + ); + } + } + + const tablesReferncingCurrent: string[] = []; + + for (const table of Object.values(json1.tables)) { + const tablesRefs = Object.values(json1.tables[table.name].foreignKeys) + .filter( + (t) => SQLiteSquasher.unsquashPushFK(t).tableTo === newTableName, + ) + .map((t) => { + return getNewTableName( + SQLiteSquasher.unsquashPushFK(t).tableFrom, + meta, + ); + }); + + tablesReferncingCurrent.push(...tablesRefs); + } + + const uniqueTableRefs = [...new Set(tablesReferncingCurrent)]; + + for (const table of uniqueTableRefs) { + if (typeof tablesContext[table] === 'undefined') { + tablesContext[table] = [..._moveDataStatements(table, json1)]; + } + } + } + } else if ( + statement.type === 'create_reference' + || statement.type === 'delete_reference' + || statement.type === 'alter_reference' + ) { + const fk = SQLiteSquasher.unsquashPushFK(statement.data); + + if (typeof tablesContext[statement.tableName] === 'undefined') { + tablesContext[statement.tableName] = _moveDataStatements( + statement.tableName, + json2, + ); + } + } else if ( + statement.type === 'create_composite_pk' + || statement.type === 'alter_composite_pk' + || statement.type === 'delete_composite_pk' + || statement.type === 'create_unique_constraint' + || statement.type === 'delete_unique_constraint' + ) { + const newTableName = getOldTableName(statement.tableName, meta); + if (typeof tablesContext[newTableName] === 'undefined') { + tablesContext[newTableName] = _moveDataStatements( + statement.tableName, + json2, + ); + } + } else { + const stmnt = fromJson([statement], 'sqlite'); + if (typeof stmnt !== 'undefined') { + statementsToExecute.push(...stmnt); + } + } + } + + for (const context of Object.values(tablesContext)) { + statementsToExecute.push(...context); + } + + return { + statementsToExecute, + shouldAskForApprove, + infoToPrint, + columnsToRemove: [...new Set(columnsToRemove)], + schemasToRemove: [...new Set(schemasToRemove)], + tablesToTruncate: [...new Set(tablesToTruncate)], + tablesToRemove: [...new Set(tablesToRemove)], + }; +}; diff --git a/drizzle-kit/src/cli/commands/sqliteUp.ts b/drizzle-kit/src/cli/commands/sqliteUp.ts new file mode 100644 index 000000000..b76b9e2cd --- /dev/null +++ b/drizzle-kit/src/cli/commands/sqliteUp.ts @@ -0,0 +1,51 @@ +import chalk from 'chalk'; +import { writeFileSync } from 'fs'; +import { mapEntries } from 'src/global'; +import { SQLiteSchema, sqliteSchemaV5 } from 'src/serializer/sqliteSchema'; +import { prepareOutFolder, validateWithReport } from 'src/utils'; + +export const upSqliteHandler = (out: string) => { + const { snapshots } = prepareOutFolder(out, 'sqlite'); + const report = validateWithReport(snapshots, 'sqlite'); + + report.nonLatest + .map((it) => ({ + path: it, + raw: report.rawMap[it]!! as Record, + })) + .forEach((it) => { + const path = it.path; + const result = updateUpToV6(it.raw); + + console.log(`[${chalk.green('✓')}] ${path}`); + + writeFileSync(path, JSON.stringify(result, null, 2)); + }); + + console.log("Everything's fine 🐶🔥"); +}; + +const updateUpToV6 = (json: Record): SQLiteSchema => { + const schema = sqliteSchemaV5.parse(json); + + const tables = mapEntries(schema.tables, (tableKey, table) => { + const columns = mapEntries(table.columns, (key, value) => { + if ( + value.default + && (typeof value.default === 'object' || Array.isArray(value.default)) + ) { + value.default = `'${JSON.stringify(value.default)}'`; + } + return [key, value]; + }); + table.columns = columns; + return [tableKey, table]; + }); + + return { + ...schema, + version: '6', + dialect: 'sqlite', + tables: tables, + }; +}; diff --git a/drizzle-kit/src/cli/commands/utils.ts b/drizzle-kit/src/cli/commands/utils.ts new file mode 100644 index 000000000..e8a8f2b95 --- /dev/null +++ b/drizzle-kit/src/cli/commands/utils.ts @@ -0,0 +1,729 @@ +import chalk from 'chalk'; +import { existsSync } from 'fs'; +import { render } from 'hanji'; +import { join, resolve } from 'path'; +import { object, string } from 'zod'; +import { assertUnreachable } from '../../global'; +import { type Dialect, dialect } from '../../schemaValidator'; +import { prepareFilenames } from '../../serializer'; +import { pullParams, pushParams } from '../validations/cli'; +import { + Casing, + CliConfig, + configCommonSchema, + configMigrations, + Driver, + Prefix, + wrapParam, +} from '../validations/common'; +import { + MysqlCredentials, + mysqlCredentials, + printConfigConnectionIssues as printIssuesMysql, +} from '../validations/mysql'; +import { outputs } from '../validations/outputs'; +import { + PostgresCredentials, + postgresCredentials, + printConfigConnectionIssues as printIssuesPg, +} from '../validations/postgres'; +import { + printConfigConnectionIssues as printIssuesSingleStore, + SingleStoreCredentials, + singlestoreCredentials, +} from '../validations/singlestore'; +import { + printConfigConnectionIssues as printIssuesSqlite, + SqliteCredentials, + sqliteCredentials, +} from '../validations/sqlite'; +import { studioCliParams, studioConfig } from '../validations/studio'; +import { error } from '../views'; + +// NextJs default config is target: es5, which esbuild-register can't consume +const assertES5 = async (unregister: () => void) => { + try { + require('./_es5.ts'); + } catch (e: any) { + if ('errors' in e && Array.isArray(e.errors) && e.errors.length > 0) { + const es5Error = (e.errors as any[]).filter((it) => it.text?.includes(`("es5") is not supported yet`)).length > 0; + if (es5Error) { + console.log( + error( + `Please change compilerOptions.target from 'es5' to 'es6' or above in your tsconfig.json`, + ), + ); + process.exit(1); + } + } + console.error(e); + process.exit(1); + } +}; + +export const safeRegister = async () => { + const { register } = await import('esbuild-register/dist/node'); + let res: { unregister: () => void }; + try { + res = register({ + format: 'cjs', + loader: 'ts', + }); + } catch { + // tsx fallback + res = { + unregister: () => {}, + }; + } + + // has to be outside try catch to be able to run with tsx + await assertES5(res.unregister); + return res; +}; + +export const prepareCheckParams = async ( + options: { + config?: string; + dialect?: Dialect; + out?: string; + }, + from: 'cli' | 'config', +): Promise<{ out: string; dialect: Dialect }> => { + const config = from === 'config' + ? await drizzleConfigFromFile(options.config as string | undefined) + : options; + + if (!config.out || !config.dialect) { + let text = `Please provide required params for AWS Data API driver:\n`; + console.log(error(text)); + console.log(wrapParam('database', config.out)); + console.log(wrapParam('secretArn', config.dialect)); + process.exit(1); + } + return { out: config.out, dialect: config.dialect }; +}; + +export const prepareDropParams = async ( + options: { + config?: string; + out?: string; + driver?: Driver; + }, + from: 'cli' | 'config', +): Promise<{ out: string; bundle: boolean }> => { + const config = from === 'config' + ? await drizzleConfigFromFile(options.config as string | undefined) + : options; + + return { out: config.out || 'drizzle', bundle: config.driver === 'expo' }; +}; + +export type GenerateConfig = { + dialect: Dialect; + schema: string | string[]; + out: string; + breakpoints: boolean; + name?: string; + prefix: Prefix; + custom: boolean; + bundle: boolean; +}; + +export const prepareGenerateConfig = async ( + options: { + config?: string; + schema?: string; + out?: string; + breakpoints?: boolean; + custom?: boolean; + name?: string; + dialect?: Dialect; + driver?: Driver; + prefix?: Prefix; + }, + from: 'config' | 'cli', +): Promise => { + const config = from === 'config' ? await drizzleConfigFromFile(options.config) : options; + + const { schema, out, breakpoints, dialect, driver } = config; + + if (!schema || !dialect) { + console.log(error('Please provide required params:')); + console.log(wrapParam('schema', schema)); + console.log(wrapParam('dialect', dialect)); + console.log(wrapParam('out', out, true)); + process.exit(1); + } + + const fileNames = prepareFilenames(schema); + if (fileNames.length === 0) { + render(`[${chalk.blue('i')}] No schema file in ${schema} was found`); + process.exit(0); + } + + const prefix = ('migrations' in config ? config.migrations?.prefix : options.prefix) + || 'index'; + + return { + dialect: dialect, + name: options.name, + custom: options.custom || false, + prefix, + breakpoints: breakpoints || true, + schema: schema, + out: out || 'drizzle', + bundle: driver === 'expo', + }; +}; + +export const flattenDatabaseCredentials = (config: any) => { + if ('dbCredentials' in config) { + const { dbCredentials, ...rest } = config; + return { + ...rest, + ...dbCredentials, + }; + } + return config; +}; + +const flattenPull = (config: any) => { + if ('dbCredentials' in config) { + const { dbCredentials, introspect, ...rest } = config; + return { + ...rest, + ...dbCredentials, + casing: introspect?.casing, + }; + } + return config; +}; + +export const preparePushConfig = async ( + options: Record, + from: 'cli' | 'config', +): Promise< + ( + | { + dialect: 'mysql'; + credentials: MysqlCredentials; + } + | { + dialect: 'postgresql'; + credentials: PostgresCredentials; + } + | { + dialect: 'sqlite'; + credentials: SqliteCredentials; + } + | { + dialect: 'singlestore'; + credentials: SingleStoreCredentials; + } + ) & { + schemaPath: string | string[]; + verbose: boolean; + strict: boolean; + force: boolean; + tablesFilter: string[]; + schemasFilter: string[]; + } +> => { + const raw = flattenDatabaseCredentials( + from === 'config' + ? await drizzleConfigFromFile(options.config as string | undefined) + : options, + ); + + raw.verbose ||= options.verbose; // if provided in cli to debug + raw.strict ||= options.strict; // if provided in cli only + + const parsed = pushParams.safeParse(raw); + + if (parsed.error) { + console.log(error('Please provide required params:')); + console.log(wrapParam('dialect', raw.dialect)); + console.log(wrapParam('schema', raw.schema)); + process.exit(1); + } + + const config = parsed.data; + + const schemaFiles = prepareFilenames(config.schema); + if (schemaFiles.length === 0) { + render(`[${chalk.blue('i')}] No schema file in ${config.schema} was found`); + process.exit(0); + } + + const tablesFilterConfig = config.tablesFilter; + const tablesFilter = tablesFilterConfig + ? typeof tablesFilterConfig === 'string' + ? [tablesFilterConfig] + : tablesFilterConfig + : []; + + const schemasFilterConfig = config.schemaFilter; + + const schemasFilter = schemasFilterConfig + ? typeof schemasFilterConfig === 'string' + ? [schemasFilterConfig] + : schemasFilterConfig + : []; + + if (config.extensionsFilters) { + if ( + config.extensionsFilters.includes('postgis') + && config.dialect === 'postgresql' + ) { + tablesFilter.push( + ...['!geography_columns', '!geometry_columns', '!spatial_ref_sys'], + ); + } + } + + if (config.dialect === 'postgresql') { + const parsed = postgresCredentials.safeParse(config); + if (!parsed.success) { + printIssuesPg(config); + process.exit(1); + } + + return { + dialect: 'postgresql', + schemaPath: config.schema, + strict: config.strict ?? false, + verbose: config.verbose ?? false, + force: (options.force as boolean) ?? false, + credentials: parsed.data, + tablesFilter, + schemasFilter, + }; + } + + if (config.dialect === 'mysql') { + const parsed = mysqlCredentials.safeParse(config); + if (!parsed.success) { + printIssuesMysql(config); + process.exit(1); + } + return { + dialect: 'mysql', + schemaPath: config.schema, + strict: config.strict ?? false, + verbose: config.verbose ?? false, + force: (options.force as boolean) ?? false, + credentials: parsed.data, + tablesFilter, + schemasFilter, + }; + } + + if (config.dialect === 'singlestore') { + const parsed = singlestoreCredentials.safeParse(config); + if (!parsed.success) { + printIssuesSingleStore(config); + process.exit(1); + } + + return { + dialect: 'singlestore', + schemaPath: config.schema, + strict: config.strict ?? false, + verbose: config.verbose ?? false, + force: (options.force as boolean) ?? false, + credentials: parsed.data, + tablesFilter, + schemasFilter, + }; + } + + if (config.dialect === 'sqlite') { + const parsed = sqliteCredentials.safeParse(config); + if (!parsed.success) { + printIssuesSqlite(config, 'pull'); + process.exit(1); + } + return { + dialect: 'sqlite', + schemaPath: config.schema, + strict: config.strict ?? false, + verbose: config.verbose ?? false, + force: (options.force as boolean) ?? false, + credentials: parsed.data, + tablesFilter, + schemasFilter, + }; + } + + assertUnreachable(config.dialect); +}; + +export const preparePullConfig = async ( + options: Record, + from: 'cli' | 'config', +): Promise< + ( + | { + dialect: 'mysql'; + credentials: MysqlCredentials; + } + | { + dialect: 'postgresql'; + credentials: PostgresCredentials; + } + | { + dialect: 'sqlite'; + credentials: SqliteCredentials; + } + | { + dialect: 'singlestore'; + credentials: SingleStoreCredentials; + } + ) & { + out: string; + breakpoints: boolean; + casing: Casing; + tablesFilter: string[]; + schemasFilter: string[]; + prefix: Prefix; + } +> => { + const raw = flattenPull( + from === 'config' + ? await drizzleConfigFromFile(options.config as string | undefined) + : options, + ); + const parsed = pullParams.safeParse(raw); + + if (parsed.error) { + console.log(error('Please provide required params:')); + console.log(wrapParam('dialect', raw.dialect)); + process.exit(1); + } + + const config = parsed.data; + const dialect = config.dialect; + + const tablesFilterConfig = config.tablesFilter; + const tablesFilter = tablesFilterConfig + ? typeof tablesFilterConfig === 'string' + ? [tablesFilterConfig] + : tablesFilterConfig + : []; + + if (config.extensionsFilters) { + if ( + config.extensionsFilters.includes('postgis') + && dialect === 'postgresql' + ) { + tablesFilter.push( + ...['!geography_columns', '!geometry_columns', '!spatial_ref_sys'], + ); + } + } + + const schemasFilterConfig = config.schemaFilter; // TODO: consistent naming + const schemasFilter = schemasFilterConfig + ? typeof schemasFilterConfig === 'string' + ? [schemasFilterConfig] + : schemasFilterConfig + : []; + + if (dialect === 'postgresql') { + const parsed = postgresCredentials.safeParse(config); + if (!parsed.success) { + printIssuesPg(config); + process.exit(1); + } + + return { + dialect: 'postgresql', + out: config.out, + breakpoints: config.breakpoints, + casing: config.casing, + credentials: parsed.data, + tablesFilter, + schemasFilter, + prefix: config.migrations?.prefix || 'index', + }; + } + + if (dialect === 'mysql') { + const parsed = mysqlCredentials.safeParse(config); + if (!parsed.success) { + printIssuesMysql(config); + process.exit(1); + } + return { + dialect: 'mysql', + out: config.out, + breakpoints: config.breakpoints, + casing: config.casing, + credentials: parsed.data, + tablesFilter, + schemasFilter, + prefix: config.migrations?.prefix || 'index', + }; + } + + if (dialect === 'singlestore') { + const parsed = singlestoreCredentials.safeParse(config); + if (!parsed.success) { + printIssuesSingleStore(config); + process.exit(1); + } + + return { + dialect: 'singlestore', + out: config.out, + breakpoints: config.breakpoints, + casing: config.casing, + credentials: parsed.data, + tablesFilter, + schemasFilter, + prefix: config.migrations?.prefix || 'index', + }; + } + + if (dialect === 'sqlite') { + const parsed = sqliteCredentials.safeParse(config); + if (!parsed.success) { + printIssuesSqlite(config, 'pull'); + process.exit(1); + } + return { + dialect: 'sqlite', + out: config.out, + breakpoints: config.breakpoints, + casing: config.casing, + credentials: parsed.data, + tablesFilter, + schemasFilter, + prefix: config.migrations?.prefix || 'index', + }; + } + + assertUnreachable(dialect); +}; + +export const prepareStudioConfig = async (options: Record) => { + const params = studioCliParams.parse(options); + const config = await drizzleConfigFromFile(params.config); + const result = studioConfig.safeParse(config); + if (!result.success) { + if (!('dialect' in config)) { + console.log(outputs.studio.noDialect()); + } + process.exit(1); + } + + if (!('dbCredentials' in config)) { + console.log(outputs.studio.noCredentials()); + process.exit(1); + } + const { host, port } = params; + const { dialect, schema } = result.data; + const flattened = flattenDatabaseCredentials(config); + + if (dialect === 'postgresql') { + const parsed = postgresCredentials.safeParse(flattened); + if (!parsed.success) { + printIssuesPg(flattened as Record); + process.exit(1); + } + const credentials = parsed.data; + return { + dialect, + schema, + host, + port, + credentials, + }; + } + + if (dialect === 'mysql') { + const parsed = mysqlCredentials.safeParse(flattened); + if (!parsed.success) { + printIssuesMysql(flattened as Record); + process.exit(1); + } + const credentials = parsed.data; + return { + dialect, + schema, + host, + port, + credentials, + }; + } + + if (dialect === 'singlestore') { + const parsed = singlestoreCredentials.safeParse(flattened); + if (!parsed.success) { + printIssuesSingleStore(flattened as Record); + process.exit(1); + } + const credentials = parsed.data; + return { + dialect, + schema, + host, + port, + credentials, + }; + } + + if (dialect === 'sqlite') { + const parsed = sqliteCredentials.safeParse(flattened); + if (!parsed.success) { + printIssuesSqlite(flattened as Record, 'studio'); + process.exit(1); + } + const credentials = parsed.data; + return { + dialect, + schema, + host, + port, + credentials, + }; + } + + assertUnreachable(dialect); +}; + +export const migrateConfig = object({ + dialect, + out: string().optional().default('drizzle'), + migrations: configMigrations, +}); + +export const prepareMigrateConfig = async (configPath: string | undefined) => { + const config = await drizzleConfigFromFile(configPath); + const parsed = migrateConfig.safeParse(config); + if (parsed.error) { + console.log(error('Please provide required params:')); + console.log(wrapParam('dialect', config.dialect)); + process.exit(1); + } + + const { dialect, out } = parsed.data; + const { schema, table } = parsed.data.migrations || {}; + const flattened = flattenDatabaseCredentials(config); + + if (dialect === 'postgresql') { + const parsed = postgresCredentials.safeParse(flattened); + if (!parsed.success) { + printIssuesPg(flattened as Record); + process.exit(1); + } + const credentials = parsed.data; + return { + dialect, + out, + credentials, + schema, + table, + }; + } + + if (dialect === 'mysql') { + const parsed = mysqlCredentials.safeParse(flattened); + if (!parsed.success) { + printIssuesMysql(flattened as Record); + process.exit(1); + } + const credentials = parsed.data; + return { + dialect, + out, + credentials, + schema, + table, + }; + } + + if (dialect === 'singlestore') { + const parsed = singlestoreCredentials.safeParse(flattened); + if (!parsed.success) { + printIssuesSingleStore(flattened as Record); + process.exit(1); + } + const credentials = parsed.data; + return { + dialect, + out, + credentials, + schema, + table, + }; + } + + if (dialect === 'sqlite') { + const parsed = sqliteCredentials.safeParse(flattened); + if (!parsed.success) { + printIssuesSqlite(flattened as Record, 'migrate'); + process.exit(1); + } + const credentials = parsed.data; + return { + dialect, + out, + credentials, + schema, + table, + }; + } + + assertUnreachable(dialect); +}; + +export const drizzleConfigFromFile = async ( + configPath?: string, +): Promise => { + const prefix = process.env.TEST_CONFIG_PATH_PREFIX || ''; + + const defaultTsConfigExists = existsSync(resolve(join(prefix, 'drizzle.config.ts'))); + const defaultJsConfigExists = existsSync(resolve(join(prefix, 'drizzle.config.js'))); + const defaultJsonConfigExists = existsSync( + join(resolve('drizzle.config.json')), + ); + + const defaultConfigPath = defaultTsConfigExists + ? 'drizzle.config.ts' + : defaultJsConfigExists + ? 'drizzle.config.js' + : 'drizzle.config.json'; + + if (!configPath) { + console.log( + chalk.gray( + `No config path provided, using default '${defaultConfigPath}'`, + ), + ); + } + + const path: string = resolve(join(prefix, configPath ?? defaultConfigPath)); + + if (!existsSync(path)) { + console.log(`${path} file does not exist`); + process.exit(1); + } + + console.log(chalk.grey(`Reading config file '${path}'`)); + const { unregister } = await safeRegister(); + const required = require(`${path}`); + const content = required.default ?? required; + unregister(); + + // --- get response and then check by each dialect independently + const res = configCommonSchema.safeParse(content); + if (!res.success) { + if (!('dialect' in content)) { + console.log(error("Please specify 'dialect' param in config file")); + } + process.exit(1); + } + + return res.data; +}; diff --git a/drizzle-kit/src/cli/connections.ts b/drizzle-kit/src/cli/connections.ts new file mode 100644 index 000000000..3357bf146 --- /dev/null +++ b/drizzle-kit/src/cli/connections.ts @@ -0,0 +1,795 @@ +import type { AwsDataApiPgQueryResult, AwsDataApiSessionOptions } from 'drizzle-orm/aws-data-api/pg'; +import type { MigrationConfig } from 'drizzle-orm/migrator'; +import type { PreparedQueryConfig } from 'drizzle-orm/pg-core'; +import fetch from 'node-fetch'; +import ws from 'ws'; +import { assertUnreachable } from '../global'; +import type { ProxyParams } from '../serializer/studio'; +import { type DB, normalisePGliteUrl, normaliseSQLiteUrl, type Proxy, type SQLiteDB, type SqliteProxy } from '../utils'; +import { assertPackages, checkPackage } from './utils'; +import type { MysqlCredentials } from './validations/mysql'; +import { withStyle } from './validations/outputs'; +import type { PostgresCredentials } from './validations/postgres'; +import { SingleStoreCredentials } from './validations/singlestore'; +import type { SqliteCredentials } from './validations/sqlite'; + +export const preparePostgresDB = async ( + credentials: PostgresCredentials, +): Promise< + DB & { + proxy: Proxy; + migrate: (config: string | MigrationConfig) => Promise; + } +> => { + if ('driver' in credentials) { + const { driver } = credentials; + if (driver === 'aws-data-api') { + assertPackages('@aws-sdk/client-rds-data'); + const { RDSDataClient, ExecuteStatementCommand, TypeHint } = await import( + '@aws-sdk/client-rds-data' + ); + const { AwsDataApiSession, drizzle } = await import( + 'drizzle-orm/aws-data-api/pg' + ); + const { migrate } = await import('drizzle-orm/aws-data-api/pg/migrator'); + const { PgDialect } = await import('drizzle-orm/pg-core'); + + const config: AwsDataApiSessionOptions = { + database: credentials.database, + resourceArn: credentials.resourceArn, + secretArn: credentials.secretArn, + }; + const rdsClient = new RDSDataClient(); + const session = new AwsDataApiSession( + rdsClient, + new PgDialect(), + undefined, + config, + undefined, + ); + + const db = drizzle(rdsClient, config); + const migrateFn = async (config: string | MigrationConfig) => { + return migrate(db, config); + }; + + const query = async (sql: string, params: any[]) => { + const prepared = session.prepareQuery( + { sql, params: params ?? [] }, + undefined, + undefined, + false, + ); + const result = await prepared.all(); + return result as any[]; + }; + const proxy = async (params: ProxyParams) => { + const prepared = session.prepareQuery< + PreparedQueryConfig & { + execute: AwsDataApiPgQueryResult; + values: AwsDataApiPgQueryResult; + } + >( + { + sql: params.sql, + params: params.params ?? [], + typings: params.typings, + }, + undefined, + undefined, + params.mode === 'array', + ); + if (params.mode === 'array') { + const result = await prepared.values(); + return result.rows; + } + const result = await prepared.execute(); + return result.rows; + }; + + return { + query, + proxy, + migrate: migrateFn, + }; + } + + if (driver === 'pglite') { + assertPackages('@electric-sql/pglite'); + const { PGlite } = await import('@electric-sql/pglite'); + const { drizzle } = await import('drizzle-orm/pglite'); + const { migrate } = await import('drizzle-orm/pglite/migrator'); + + const pglite = new PGlite(normalisePGliteUrl(credentials.url)); + await pglite.waitReady; + const drzl = drizzle(pglite); + const migrateFn = async (config: MigrationConfig) => { + return migrate(drzl, config); + }; + + const query = async (sql: string, params: any[] = []) => { + const result = await pglite.query(sql, params); + return result.rows as T[]; + }; + + const proxy = async (params: ProxyParams) => { + const preparedParams = preparePGliteParams(params.params); + if ( + params.method === 'values' + || params.method === 'get' + || params.method === 'all' + ) { + const result = await pglite.query(params.sql, preparedParams, { + rowMode: params.mode, + }); + return result.rows; + } + + const result = await pglite.query(params.sql, preparedParams); + return result.rows; + }; + + return { query, proxy, migrate: migrateFn }; + } + + assertUnreachable(driver); + } + + if (await checkPackage('pg')) { + console.log(withStyle.info(`Using 'pg' driver for database querying`)); + const pg = await import('pg'); + const { drizzle } = await import('drizzle-orm/node-postgres'); + const { migrate } = await import('drizzle-orm/node-postgres/migrator'); + + const ssl = 'ssl' in credentials + ? credentials.ssl === 'prefer' + || credentials.ssl === 'require' + || credentials.ssl === 'allow' + ? { rejectUnauthorized: false } + : credentials.ssl === 'verify-full' + ? {} + : credentials.ssl + : {}; + + const client = 'url' in credentials + ? new pg.default.Pool({ connectionString: credentials.url, max: 1 }) + : new pg.default.Pool({ ...credentials, ssl, max: 1 }); + + const db = drizzle(client); + const migrateFn = async (config: string | MigrationConfig) => { + return migrate(db, config); + }; + + const query = async (sql: string, params?: any[]) => { + const result = await client.query(sql, params ?? []); + return result.rows; + }; + + const proxy: Proxy = async (params: ProxyParams) => { + const result = await client.query({ + text: params.sql, + values: params.params, + ...(params.mode === 'array' && { rowMode: 'array' }), + }); + return result.rows; + }; + + return { query, proxy, migrate: migrateFn }; + } + + if (await checkPackage('postgres')) { + console.log( + withStyle.info(`Using 'postgres' driver for database querying`), + ); + const postgres = await import('postgres'); + + const { drizzle } = await import('drizzle-orm/postgres-js'); + const { migrate } = await import('drizzle-orm/postgres-js/migrator'); + + const client = 'url' in credentials + ? postgres.default(credentials.url, { max: 1 }) + : postgres.default({ ...credentials, max: 1 }); + + const db = drizzle(client); + const migrateFn = async (config: string | MigrationConfig) => { + return migrate(db, config); + }; + + const query = async (sql: string, params?: any[]) => { + const result = await client.unsafe(sql, params ?? []); + return result as any[]; + }; + + const proxy = async (params: ProxyParams) => { + if (params.mode === 'object') { + return await client.unsafe(params.sql, params.params); + } + return await client.unsafe(params.sql, params.params).values(); + }; + + return { query, proxy, migrate: migrateFn }; + } + + if (await checkPackage('@vercel/postgres')) { + console.log( + withStyle.info(`Using '@vercel/postgres' driver for database querying`), + ); + console.log( + withStyle.fullWarning( + "'@vercel/postgres' can only connect to remote Neon/Vercel Postgres/Supabase instances through a websocket", + ), + ); + const { VercelPool } = await import('@vercel/postgres'); + const { drizzle } = await import('drizzle-orm/vercel-postgres'); + const { migrate } = await import('drizzle-orm/vercel-postgres/migrator'); + const ssl = 'ssl' in credentials + ? credentials.ssl === 'prefer' + || credentials.ssl === 'require' + || credentials.ssl === 'allow' + ? { rejectUnauthorized: false } + : credentials.ssl === 'verify-full' + ? {} + : credentials.ssl + : {}; + + const client = 'url' in credentials + ? new VercelPool({ connectionString: credentials.url }) + : new VercelPool({ ...credentials, ssl }); + + await client.connect(); + + const db = drizzle(client); + const migrateFn = async (config: string | MigrationConfig) => { + return migrate(db, config); + }; + + const query = async (sql: string, params?: any[]) => { + const result = await client.query(sql, params ?? []); + return result.rows; + }; + + const proxy: Proxy = async (params: ProxyParams) => { + const result = await client.query({ + text: params.sql, + values: params.params, + ...(params.mode === 'array' && { rowMode: 'array' }), + }); + return result.rows; + }; + + return { query, proxy, migrate: migrateFn }; + } + + if (await checkPackage('@neondatabase/serverless')) { + console.log( + withStyle.info( + `Using '@neondatabase/serverless' driver for database querying`, + ), + ); + console.log( + withStyle.fullWarning( + "'@neondatabase/serverless' can only connect to remote Neon/Vercel Postgres/Supabase instances through a websocket", + ), + ); + const { Pool, neonConfig } = await import('@neondatabase/serverless'); + const { drizzle } = await import('drizzle-orm/neon-serverless'); + const { migrate } = await import('drizzle-orm/neon-serverless/migrator'); + + const ssl = 'ssl' in credentials + ? credentials.ssl === 'prefer' + || credentials.ssl === 'require' + || credentials.ssl === 'allow' + ? { rejectUnauthorized: false } + : credentials.ssl === 'verify-full' + ? {} + : credentials.ssl + : {}; + + const client = 'url' in credentials + ? new Pool({ connectionString: credentials.url, max: 1 }) + : new Pool({ ...credentials, max: 1, ssl }); + neonConfig.webSocketConstructor = ws; + + const db = drizzle(client); + const migrateFn = async (config: string | MigrationConfig) => { + return migrate(db, config); + }; + + const query = async (sql: string, params?: any[]) => { + const result = await client.query(sql, params ?? []); + return result.rows; + }; + + const proxy: Proxy = async (params: ProxyParams) => { + const result = await client.query({ + text: params.sql, + values: params.params, + ...(params.mode === 'array' && { rowMode: 'array' }), + }); + return result.rows; + }; + + return { query, proxy, migrate: migrateFn }; + } + + console.error( + "To connect to Postgres database - please install either of 'pg', 'postgres', '@neondatabase/serverless' or '@vercel/postgres' drivers", + ); + process.exit(1); +}; + +const parseMysqlCredentials = (credentials: MysqlCredentials) => { + if ('url' in credentials) { + const url = credentials.url; + + const connectionUrl = new URL(url); + const pathname = connectionUrl.pathname; + + const database = pathname.split('/')[pathname.split('/').length - 1]; + if (!database) { + console.error( + 'You should specify a database name in connection string (mysql://USER:PASSWORD@HOST:PORT/DATABASE)', + ); + process.exit(1); + } + return { database, url }; + } else { + return { + database: credentials.database, + credentials, + }; + } +}; + +export const connectToMySQL = async ( + it: MysqlCredentials, +): Promise<{ + db: DB; + proxy: Proxy; + database: string; + migrate: (config: MigrationConfig) => Promise; +}> => { + const result = parseMysqlCredentials(it); + + if (await checkPackage('mysql2')) { + const { createConnection } = await import('mysql2/promise'); + const { drizzle } = await import('drizzle-orm/mysql2'); + const { migrate } = await import('drizzle-orm/mysql2/migrator'); + + const connection = result.url + ? await createConnection(result.url) + : await createConnection(result.credentials!); // needed for some reason! + + const db = drizzle(connection); + const migrateFn = async (config: MigrationConfig) => { + return migrate(db, config); + }; + + await connection.connect(); + const query: DB['query'] = async ( + sql: string, + params?: any[], + ): Promise => { + const res = await connection.execute(sql, params); + return res[0] as any; + }; + + const proxy: Proxy = async (params: ProxyParams) => { + const result = await connection.query({ + sql: params.sql, + values: params.params, + rowsAsArray: params.mode === 'array', + }); + return result[0] as any[]; + }; + + return { + db: { query }, + proxy, + database: result.database, + migrate: migrateFn, + }; + } + + if (await checkPackage('@planetscale/database')) { + const { connect } = await import('@planetscale/database'); + const { drizzle } = await import('drizzle-orm/planetscale-serverless'); + const { migrate } = await import( + 'drizzle-orm/planetscale-serverless/migrator' + ); + + const connection = connect(result); + + const db = drizzle(connection); + const migrateFn = async (config: MigrationConfig) => { + return migrate(db, config); + }; + + const query = async (sql: string, params?: any[]): Promise => { + const res = await connection.execute(sql, params); + return res.rows as T[]; + }; + const proxy: Proxy = async (params: ProxyParams) => { + const result = params.mode === 'object' + ? await connection.execute(params.sql, params.params) + : await connection.execute(params.sql, params.params, { + as: 'array', + }); + return result.rows; + }; + + return { + db: { query }, + proxy, + database: result.database, + migrate: migrateFn, + }; + } + + console.error( + "To connect to MySQL database - please install either of 'mysql2' or '@planetscale/database' drivers", + ); + process.exit(1); +}; + +const prepareSqliteParams = (params: any[], driver?: string) => { + return params.map((param) => { + if ( + param + && typeof param === 'object' + && 'type' in param + && 'value' in param + && param.type === 'binary' + ) { + const value = typeof param.value === 'object' + ? JSON.stringify(param.value) + : (param.value as string); + + if (driver === 'd1-http') { + return value; + } + + return Buffer.from(value); + } + return param; + }); +}; + +const preparePGliteParams = (params: any[]) => { + return params.map((param) => { + if ( + param + && typeof param === 'object' + && 'type' in param + && 'value' in param + && param.type === 'binary' + ) { + const value = typeof param.value === 'object' + ? JSON.stringify(param.value) + : (param.value as string); + + return value; + } + return param; + }); +}; + +export const connectToSQLite = async ( + credentials: SqliteCredentials, +): Promise< + & SQLiteDB + & SqliteProxy + & { migrate: (config: MigrationConfig) => Promise } +> => { + if ('driver' in credentials) { + const { driver } = credentials; + if (driver === 'turso') { + assertPackages('@libsql/client'); + const { createClient } = await import('@libsql/client'); + const { drizzle } = await import('drizzle-orm/libsql'); + const { migrate } = await import('drizzle-orm/libsql/migrator'); + + const client = createClient({ + url: credentials.url, + authToken: credentials.authToken, + }); + + const drzl = drizzle(client); + const migrateFn = async (config: MigrationConfig) => { + return migrate(drzl, config); + }; + + const db: SQLiteDB = { + query: async (sql: string, params?: any[]) => { + const res = await client.execute({ sql, args: params || [] }); + return res.rows as T[]; + }, + run: async (query: string) => { + await client.execute(query); + }, + batch: async ( + queries: { query: string; values?: any[] | undefined }[], + ) => { + await client.batch( + queries.map((it) => ({ sql: it.query, args: it.values ?? [] })), + ); + }, + }; + const proxy: SqliteProxy = { + proxy: async (params: ProxyParams) => { + const preparedParams = prepareSqliteParams(params.params); + const result = await client.execute({ + sql: params.sql, + args: preparedParams, + }); + + if (params.mode === 'array') { + return result.rows.map((row) => Object.values(row)); + } else { + return result.rows; + } + }, + }; + + return { ...db, ...proxy, migrate: migrateFn }; + } else if (driver === 'd1-http') { + const { drizzle } = await import('drizzle-orm/sqlite-proxy'); + const { migrate } = await import('drizzle-orm/sqlite-proxy/migrator'); + + const remoteCallback: Parameters[0] = async ( + sql, + params, + method, + ) => { + const res = await fetch( + `https://api.cloudflare.com/client/v4/accounts/${credentials.accountId}/d1/database/${credentials.databaseId}/${ + method === 'values' ? 'raw' : 'query' + }`, + { + method: 'POST', + body: JSON.stringify({ sql, params }), + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${credentials.token}`, + }, + }, + ); + + const data = (await res.json()) as + | { + success: true; + result: { + results: + | any[] + | { + columns: string[]; + rows: any[][]; + }; + }[]; + } + | { + success: false; + errors: { code: number; message: string }[]; + }; + + if (!data.success) { + throw new Error( + data.errors.map((it) => `${it.code}: ${it.message}`).join('\n'), + ); + } + + const result = data.result[0].results; + const rows = Array.isArray(result) ? result : result.rows; + + return { + rows, + }; + }; + + const drzl = drizzle(remoteCallback); + const migrateFn = async (config: MigrationConfig) => { + return migrate( + drzl, + async (queries) => { + for (const query of queries) { + await remoteCallback(query, [], 'run'); + } + }, + config, + ); + }; + + const db: SQLiteDB = { + query: async (sql: string, params?: any[]) => { + const res = await remoteCallback(sql, params || [], 'all'); + return res.rows as T[]; + }, + run: async (query: string) => { + await remoteCallback(query, [], 'run'); + }, + }; + const proxy: SqliteProxy = { + proxy: async (params: ProxyParams) => { + const preparedParams = prepareSqliteParams(params.params, 'd1-http'); + const result = await remoteCallback( + params.sql, + preparedParams, + params.mode === 'array' ? 'values' : 'all', + ); + + return result.rows; + }, + }; + return { ...db, ...proxy, migrate: migrateFn }; + } else { + assertUnreachable(driver); + } + } + + if (await checkPackage('@libsql/client')) { + const { createClient } = await import('@libsql/client'); + const { drizzle } = await import('drizzle-orm/libsql'); + const { migrate } = await import('drizzle-orm/libsql/migrator'); + + const client = createClient({ + url: normaliseSQLiteUrl(credentials.url, 'libsql'), + }); + const drzl = drizzle(client); + const migrateFn = async (config: MigrationConfig) => { + return migrate(drzl, config); + }; + + const db: SQLiteDB = { + query: async (sql: string, params?: any[]) => { + const res = await client.execute({ sql, args: params || [] }); + return res.rows as T[]; + }, + run: async (query: string) => { + await client.execute(query); + }, + }; + + const proxy: SqliteProxy = { + proxy: async (params: ProxyParams) => { + const preparedParams = prepareSqliteParams(params.params); + const result = await client.execute({ + sql: params.sql, + args: preparedParams, + }); + + if (params.mode === 'array') { + return result.rows.map((row) => Object.values(row)); + } else { + return result.rows; + } + }, + }; + + return { ...db, ...proxy, migrate: migrateFn }; + } + + if (await checkPackage('better-sqlite3')) { + const { default: Database } = await import('better-sqlite3'); + const { drizzle } = await import('drizzle-orm/better-sqlite3'); + const { migrate } = await import('drizzle-orm/better-sqlite3/migrator'); + + const sqlite = new Database( + normaliseSQLiteUrl(credentials.url, 'better-sqlite'), + ); + const drzl = drizzle(sqlite); + const migrateFn = async (config: MigrationConfig) => { + return migrate(drzl, config); + }; + + const db: SQLiteDB = { + query: async (sql: string, params: any[] = []) => { + return sqlite.prepare(sql).bind(params).all() as T[]; + }, + run: async (query: string) => { + sqlite.prepare(query).run(); + }, + }; + + const proxy: SqliteProxy = { + proxy: async (params: ProxyParams) => { + const preparedParams = prepareSqliteParams(params.params); + if ( + params.method === 'values' + || params.method === 'get' + || params.method === 'all' + ) { + return sqlite + .prepare(params.sql) + .raw(params.mode === 'array') + .all(preparedParams); + } + + return sqlite.prepare(params.sql).run(preparedParams); + }, + }; + return { ...db, ...proxy, migrate: migrateFn }; + } + console.log( + "Please install either 'better-sqlite3' or '@libsql/client' for Drizzle Kit to connect to SQLite databases", + ); + process.exit(1); +}; + +const parseSingleStoreCredentials = (credentials: SingleStoreCredentials) => { + if ('url' in credentials) { + const url = credentials.url; + + const connectionUrl = new URL(url); + const pathname = connectionUrl.pathname; + + const database = pathname.split('/')[pathname.split('/').length - 1]; + if (!database) { + console.error( + 'You should specify a database name in connection string (singlestore://USER:PASSWORD@HOST:PORT/DATABASE)', + ); + process.exit(1); + } + return { database, url }; + } else { + return { + database: credentials.database, + credentials, + }; + } +}; + +export const connectToSingleStore = async ( + it: SingleStoreCredentials, +): Promise<{ + db: DB; + proxy: Proxy; + database: string; + migrate: (config: MigrationConfig) => Promise; +}> => { + const result = parseSingleStoreCredentials(it); + + if (await checkPackage('singlestore')) { + const { createConnection } = await import('mysql2/promise'); + const { drizzle } = await import('drizzle-orm/singlestore'); + const { migrate } = await import('drizzle-orm/singlestore/migrator'); + + const connection = result.url + ? await createConnection(result.url) + : await createConnection(result.credentials!); // needed for some reason! + + const db = drizzle(connection); + const migrateFn = async (config: MigrationConfig) => { + return migrate(db, config); + }; + + await connection.connect(); + const query: DB['query'] = async ( + sql: string, + params?: any[], + ): Promise => { + const res = await connection.execute(sql, params); + return res[0] as any; + }; + + const proxy: Proxy = async (params: ProxyParams) => { + const result = await connection.query({ + sql: params.sql, + values: params.params, + rowsAsArray: params.mode === 'array', + }); + return result[0] as any[]; + }; + + return { + db: { query }, + proxy, + database: result.database, + migrate: migrateFn, + }; + } + + console.error( + "To connect to SingleStore database - please install 'mysql2' drivers", + ); + process.exit(1); +}; diff --git a/drizzle-kit/src/cli/index.ts b/drizzle-kit/src/cli/index.ts new file mode 100644 index 000000000..86bffdf3d --- /dev/null +++ b/drizzle-kit/src/cli/index.ts @@ -0,0 +1,48 @@ +import { command, run } from '@drizzle-team/brocli'; +import chalk from 'chalk'; +import { check, drop, generate, migrate, pull, push, studio, up } from './schema'; +import { ormCoreVersions } from './utils'; + +const version = async () => { + const { npmVersion } = await ormCoreVersions(); + const ormVersion = npmVersion ? `drizzle-orm: v${npmVersion}` : ''; + const envVersion = process.env.DRIZZLE_KIT_VERSION; + const kitVersion = envVersion ? `v${envVersion}` : '--'; + const versions = `drizzle-kit: ${kitVersion}\n${ormVersion}`; + console.log(chalk.gray(versions), '\n'); +}; + +const legacyCommand = (name: string, newName: string) => { + return command({ + name, + hidden: true, + handler: () => { + console.log( + `This command is deprecated, please use updated '${newName}' command (see https://orm.drizzle.team/kit-docs/upgrade-21#how-to-migrate-to-0210)`, + ); + }, + }); +}; + +const legacy = [ + legacyCommand('generate:pg', 'generate'), + legacyCommand('generate:mysql', 'generate'), + legacyCommand('generate:sqlite', 'generate'), + legacyCommand('push:pg', 'push'), + legacyCommand('push:mysql', 'push'), + legacyCommand('push:sqlite', 'push'), + legacyCommand('introspect:pg', 'introspect'), + legacyCommand('introspect:mysql', 'introspect'), + legacyCommand('introspect:sqlite', 'introspect'), + legacyCommand('up:pg', 'up'), + legacyCommand('up:mysql', 'up'), + legacyCommand('up:sqlite', 'up'), + legacyCommand('check:pg', 'check'), + legacyCommand('check:mysql', 'check'), + legacyCommand('check:sqlite', 'check'), +]; + +run([generate, migrate, pull, push, studio, up, check, drop, ...legacy], { + name: 'drizzle-kit', + version: version, +}); diff --git a/drizzle-kit/src/cli/schema.ts b/drizzle-kit/src/cli/schema.ts new file mode 100644 index 000000000..b2a6dbb3a --- /dev/null +++ b/drizzle-kit/src/cli/schema.ts @@ -0,0 +1,689 @@ +import chalk from 'chalk'; +import { checkHandler } from './commands/check'; +import { assertOrmCoreVersion, assertPackages, assertStudioNodeVersion, ormVersionGt } from './utils'; +import '../@types/utils'; +import { assertUnreachable } from '../global'; +import { type Setup } from '../serializer/studio'; +import { assertV1OutFolder } from '../utils'; +import { dropMigration } from './commands/drop'; +import { upMysqlHandler } from './commands/mysqlUp'; +import { upPgHandler } from './commands/pgUp'; +import { upSqliteHandler } from './commands/sqliteUp'; +import { + prepareCheckParams, + prepareDropParams, + prepareGenerateConfig, + prepareMigrateConfig, + preparePullConfig, + preparePushConfig, + prepareStudioConfig, +} from './commands/utils'; +import { assertCollisions, drivers, prefixes } from './validations/common'; +import { withStyle } from './validations/outputs'; +import 'dotenv/config'; +import { boolean, command, number, string } from '@drizzle-team/brocli'; +import { mkdirSync } from 'fs'; +import { renderWithTask } from 'hanji'; +import { dialects } from 'src/schemaValidator'; +import { assertUnreachable } from '../global'; +import { drizzleForSingleStore, prepareSingleStoreSchema, type Setup } from '../serializer/studio'; +import { certs } from '../utils/certs'; +import { upSinglestoreHandler } from './commands/singlestoreUp'; +import { grey, MigrateProgress } from './views'; + +const optionDialect = string('dialect') + .enum(...dialects) + .desc(`Database dialect: 'postgresql', 'mysql', 'sqlite' or 'singlestore'`); +const optionOut = string().desc("Output folder, 'drizzle' by default"); +const optionConfig = string().desc('Path to drizzle config file'); +const optionBreakpoints = boolean().desc( + `Prepare SQL statements with breakpoints`, +); + +const optionDriver = string() + .enum(...drivers) + .desc('Database driver'); + +export const generate = command({ + name: 'generate', + options: { + config: optionConfig, + dialect: optionDialect, + driver: optionDriver, + schema: string().desc('Path to a schema file or folder'), + out: optionOut, + name: string().desc('Migration file name'), + breakpoints: optionBreakpoints, + custom: boolean() + .desc('Prepare empty migration file for custom SQL') + .default(false), + prefix: string() + .enum(...prefixes) + .default('index'), + }, + transform: async (opts) => { + const from = assertCollisions( + 'generate', + opts, + ['prefix', 'name', 'custom'], + ['driver', 'breakpoints', 'schema', 'out', 'dialect'], + ); + return prepareGenerateConfig(opts, from); + }, + handler: async (opts) => { + await assertOrmCoreVersion(); + await assertPackages('drizzle-orm'); + + // const parsed = cliConfigGenerate.parse(opts); + + const { + prepareAndMigratePg, + prepareAndMigrateMysql, + prepareAndMigrateSqlite, + prepareAndMigrateSingleStore, + } = await import('./commands/migrate'); + + const dialect = opts.dialect; + if (dialect === 'postgresql') { + await prepareAndMigratePg(opts); + } else if (dialect === 'mysql') { + await prepareAndMigrateMysql(opts); + } else if (dialect === 'sqlite') { + await prepareAndMigrateSqlite(opts); + } else if (dialect === 'singlestore') { + await prepareAndMigrateSqlite(opts); + } else { + assertUnreachable(dialect); + } + }, +}); + +export const migrate = command({ + name: 'migrate', + options: { + config: optionConfig, + }, + transform: async (opts) => { + return await prepareMigrateConfig(opts.config); + }, + handler: async (opts) => { + await assertOrmCoreVersion(); + await assertPackages('drizzle-orm'); + + const { dialect, schema, table, out, credentials } = opts; + try { + if (dialect === 'postgresql') { + if ('driver' in credentials) { + const { driver } = credentials; + if (driver === 'aws-data-api') { + if (!(await ormVersionGt('0.30.10'))) { + console.log( + "To use 'aws-data-api' driver - please update drizzle-orm to the latest version", + ); + process.exit(1); + } + } else if (driver === 'pglite') { + if (!(await ormVersionGt('0.30.6'))) { + console.log( + "To use 'pglite' driver - please update drizzle-orm to the latest version", + ); + process.exit(1); + } + } else { + assertUnreachable(driver); + } + } + const { preparePostgresDB } = await import('./connections'); + const { migrate } = await preparePostgresDB(credentials); + await renderWithTask( + new MigrateProgress(), + migrate({ + migrationsFolder: out, + migrationsTable: table, + migrationsSchema: schema, + }), + ); + } else if (dialect === 'mysql') { + const { connectToMySQL } = await import('./connections'); + const { migrate } = await connectToMySQL(credentials); + await renderWithTask( + new MigrateProgress(), + migrate({ + migrationsFolder: out, + migrationsTable: table, + migrationsSchema: schema, + }), + ); + } else if (dialect === 'singlestore') { + const { connectToSingleStore } = await import('./connections'); + const { migrate } = await connectToSingleStore(credentials); + await renderWithTask( + new MigrateProgress(), + migrate({ + migrationsFolder: out, + migrationsTable: table, + migrationsSchema: schema, + }), + ); + } else if (dialect === 'sqlite') { + const { connectToSQLite } = await import('./connections'); + const { migrate } = await connectToSQLite(credentials); + await renderWithTask( + new MigrateProgress(), + migrate({ + migrationsFolder: opts.out, + migrationsTable: table, + migrationsSchema: schema, + }), + ); + } else { + assertUnreachable(dialect); + } + } catch (e) { + console.error(e); + process.exit(1); + } + + process.exit(0); + }, +}); + +const optionsFilters = { + tablesFilter: string().desc('Table name filters'), + schemaFilters: string().desc('Schema name filters'), + extensionsFilters: string().desc( + '`Database extensions internal database filters', + ), +} as const; + +const optionsDatabaseCredentials = { + url: string().desc('Database connection URL'), + host: string().desc('Database host'), + port: string().desc('Database port'), + user: string().desc('Database user'), + password: string().desc('Database password'), + database: string().desc('Database name'), + ssl: string().desc('ssl mode'), + // Turso + authToken: string('auth-token').desc('Database auth token [Turso]'), + // specific cases + driver: optionDriver, +} as const; + +export const push = command({ + name: 'push', + options: { + config: optionConfig, + dialect: optionDialect, + schema: string().desc('Path to a schema file or folder'), + ...optionsFilters, + ...optionsDatabaseCredentials, + verbose: boolean() + .desc('Print all statements for each push') + .default(false), + strict: boolean().desc('Always ask for confirmation').default(false), + force: boolean() + .desc( + 'Auto-approve all data loss statements. Note: Data loss statements may truncate your tables and data', + ) + .default(false), + }, + transform: async (opts) => { + const from = assertCollisions( + 'push', + opts, + ['force', 'verbose', 'strict'], + [ + 'schema', + 'dialect', + 'driver', + 'url', + 'host', + 'port', + 'user', + 'password', + 'database', + 'ssl', + 'authToken', + 'schemaFilters', + 'extensionsFilters', + 'tablesFilter', + ], + ); + + return preparePushConfig(opts, from); + }, + handler: async (config) => { + await assertPackages('drizzle-orm'); + await assertOrmCoreVersion(); + + const { + dialect, + schemaPath, + strict, + verbose, + credentials, + tablesFilter, + schemasFilter, + force, + } = config; + + try { + if (dialect === 'mysql') { + const { mysqlPush } = await import('./commands/push'); + await mysqlPush( + schemaPath, + credentials, + tablesFilter, + strict, + verbose, + force, + ); + } else if (dialect === 'postgresql') { + if ('driver' in credentials) { + const { driver } = credentials; + if (driver === 'aws-data-api') { + if (!(await ormVersionGt('0.30.10'))) { + console.log( + "To use 'aws-data-api' driver - please update drizzle-orm to the latest version", + ); + process.exit(1); + } + } else if (driver === 'pglite') { + if (!(await ormVersionGt('0.30.6'))) { + console.log( + "To use 'pglite' driver - please update drizzle-orm to the latest version", + ); + process.exit(1); + } + } else { + assertUnreachable(driver); + } + } + + const { pgPush } = await import('./commands/push'); + await pgPush( + schemaPath, + verbose, + strict, + credentials, + tablesFilter, + schemasFilter, + force, + ); + } else if (dialect === 'sqlite') { + const { sqlitePush } = await import('./commands/push'); + await sqlitePush( + schemaPath, + verbose, + strict, + credentials, + tablesFilter, + force, + ); + } else if (dialect === 'singlestore') { + const { singlestorePush } = await import('./commands/push'); + await singlestorePush( + schemaPath, + credentials, + tablesFilter, + strict, + verbose, + force, + ); + } else { + assertUnreachable(dialect); + } + } catch (e) { + console.error(e); + } + process.exit(0); + }, +}); + +export const check = command({ + name: 'check', + options: { + config: optionConfig, + dialect: optionDialect, + out: optionOut, + }, + transform: async (opts) => { + const from = assertCollisions('check', opts, [], ['dialect', 'out']); + return prepareCheckParams(opts, from); + }, + handler: async (config) => { + await assertOrmCoreVersion(); + + const { out, dialect } = config; + checkHandler(out, dialect); + console.log("Everything's fine 🐶🔥"); + }, +}); + +export const up = command({ + name: 'up', + options: { + config: optionConfig, + dialect: optionDialect, + out: optionOut, + }, + transform: async (opts) => { + const from = assertCollisions('check', opts, [], ['dialect', 'out']); + return prepareCheckParams(opts, from); + }, + handler: async (config) => { + await assertOrmCoreVersion(); + + const { out, dialect } = config; + await assertPackages('drizzle-orm'); + + if (dialect === 'postgresql') { + upPgHandler(out); + } + + if (dialect === 'mysql') { + upMysqlHandler(out); + } + + if (dialect === 'sqlite') { + upSqliteHandler(out); + } + + if (dialect === 'singlestore') { + upSinglestoreHandler(out); + } + }, +}); + +export const pull = command({ + name: 'introspect', + aliases: ['pull'], + options: { + config: optionConfig, + dialect: optionDialect, + out: optionOut, + breakpoints: optionBreakpoints, + casing: string('introspect-casing').enum('camel', 'preserve'), + ...optionsFilters, + ...optionsDatabaseCredentials, + }, + transform: async (opts) => { + const from = assertCollisions( + 'introspect', + opts, + [], + [ + 'dialect', + 'driver', + 'out', + 'url', + 'host', + 'port', + 'user', + 'password', + 'database', + 'ssl', + 'authToken', + 'casing', + 'breakpoints', + 'tablesFilter', + 'schemaFilters', + 'extensionsFilters', + ], + ); + return preparePullConfig(opts, from); + }, + handler: async (config) => { + await assertPackages('drizzle-orm'); + await assertOrmCoreVersion(); + + const { + dialect, + credentials, + out, + casing, + breakpoints, + tablesFilter, + schemasFilter, + prefix, + } = config; + mkdirSync(out, { recursive: true }); + + console.log( + grey( + `Pulling from [${ + schemasFilter + .map((it) => `'${it}'`) + .join(', ') + }] list of schemas`, + ), + ); + console.log(); + + try { + if (dialect === 'postgresql') { + if ('driver' in credentials) { + const { driver } = credentials; + if (driver === 'aws-data-api') { + if (!(await ormVersionGt('0.30.10'))) { + console.log( + "To use 'aws-data-api' driver - please update drizzle-orm to the latest version", + ); + process.exit(1); + } + } else if (driver === 'pglite') { + if (!(await ormVersionGt('0.30.6'))) { + console.log( + "To use 'pglite' driver - please update drizzle-orm to the latest version", + ); + process.exit(1); + } + } else { + assertUnreachable(driver); + } + } + + const { introspectPostgres } = await import('./commands/introspect'); + await introspectPostgres( + casing, + out, + breakpoints, + credentials, + tablesFilter, + schemasFilter, + prefix, + ); + } else if (dialect === 'mysql') { + const { introspectMysql } = await import('./commands/introspect'); + await introspectMysql( + casing, + out, + breakpoints, + credentials, + tablesFilter, + prefix, + ); + } else if (dialect === 'sqlite') { + const { introspectSqlite } = await import('./commands/introspect'); + await introspectSqlite( + casing, + out, + breakpoints, + credentials, + tablesFilter, + prefix, + ); + } else if (dialect === 'singlestore') { + const { introspectSingleStore } = await import('./commands/introspect'); + await introspectSingleStore( + casing, + out, + breakpoints, + credentials, + tablesFilter, + prefix, + ); + } else { + assertUnreachable(dialect); + } + } catch (e) { + console.error(e); + } + process.exit(0); + }, +}); + +export const drop = command({ + name: 'drop', + options: { + config: optionConfig, + out: optionOut, + driver: optionDriver, + }, + transform: async (opts) => { + const from = assertCollisions('check', opts, [], ['driver', 'out']); + return prepareDropParams(opts, from); + }, + handler: async (config) => { + await assertOrmCoreVersion(); + + assertV1OutFolder(config.out); + await dropMigration(config); + }, +}); + +export const studio = command({ + name: 'studio', + options: { + config: optionConfig, + port: number().desc('Custom port for drizzle studio [default=4983]'), + host: string().desc('Custom host for drizzle studio [default=0.0.0.0]'), + verbose: boolean() + .default(false) + .desc('Print all stataments that are executed by Studio'), + }, + handler: async (opts) => { + await assertOrmCoreVersion(); + await assertPackages('drizzle-orm'); + + assertStudioNodeVersion(); + + const { + dialect, + schema: schemaPath, + port, + host, + credentials, + } = await prepareStudioConfig(opts); + + const { + drizzleForPostgres, + preparePgSchema, + prepareMySqlSchema, + drizzleForMySQL, + prepareSQLiteSchema, + drizzleForSQLite, + prepareSingleStoreSchema, + drizzleForSingleStore, + } = await import('../serializer/studio'); + + let setup: Setup; + try { + if (dialect === 'postgresql') { + if ('driver' in credentials) { + const { driver } = credentials; + if (driver === 'aws-data-api') { + if (!(await ormVersionGt('0.30.10'))) { + console.log( + "To use 'aws-data-api' driver - please update drizzle-orm to the latest version", + ); + process.exit(1); + } + } else if (driver === 'pglite') { + if (!(await ormVersionGt('0.30.6'))) { + console.log( + "To use 'pglite' driver - please update drizzle-orm to the latest version", + ); + process.exit(1); + } + } else { + assertUnreachable(driver); + } + } + + const { schema, relations, files } = schemaPath + ? await preparePgSchema(schemaPath) + : { schema: {}, relations: {}, files: [] }; + setup = await drizzleForPostgres(credentials, schema, relations, files); + } else if (dialect === 'mysql') { + const { schema, relations, files } = schemaPath + ? await prepareMySqlSchema(schemaPath) + : { schema: {}, relations: {}, files: [] }; + setup = await drizzleForMySQL(credentials, schema, relations, files); + } else if (dialect === 'sqlite') { + const { schema, relations, files } = schemaPath + ? await prepareSQLiteSchema(schemaPath) + : { schema: {}, relations: {}, files: [] }; + setup = await drizzleForSQLite(credentials, schema, relations, files); + } else if (dialect === 'singlestore') { + const { schema, relations, files } = schemaPath + ? await prepareSingleStoreSchema(schemaPath) + : { schema: {}, relations: {}, files: [] }; + setup = await drizzleForSingleStore(credentials, schema, relations, files); + } else { + assertUnreachable(dialect); + } + + const { prepareServer } = await import('../serializer/studio'); + + const server = await prepareServer(setup); + + console.log(); + console.log( + withStyle.fullWarning( + 'Drizzle Studio is currently in Beta. If you find anything that is not working as expected or should be improved, feel free to create an issue on GitHub: https://github.com/drizzle-team/drizzle-kit-mirror/issues/new or write to us on Discord: https://discord.gg/WcRKz2FFxN', + ), + ); + + const { key, cert } = (await certs()) || {}; + server.start({ + host, + port, + key, + cert, + cb: (err, address) => { + if (err) { + console.error(err); + } else { + const queryParams: { port?: number; host?: string } = {}; + if (port !== 4983) { + queryParams.port = port; + } + + if (host !== '127.0.0.1') { + queryParams.host = host; + } + + const queryString = Object.keys(queryParams) + .map((key: keyof { port?: number; host?: string }) => { + return `${key}=${queryParams[key]}`; + }) + .join('&'); + + console.log( + `\nDrizzle Studio is up and running on ${ + chalk.blue( + `https://local.drizzle.studio${queryString ? `?${queryString}` : ''}`, + ) + }`, + ); + } + }, + }); + } catch (e) { + console.error(e); + process.exit(0); + } + }, +}); diff --git a/drizzle-kit/src/cli/selector-ui.ts b/drizzle-kit/src/cli/selector-ui.ts new file mode 100644 index 000000000..f384831d0 --- /dev/null +++ b/drizzle-kit/src/cli/selector-ui.ts @@ -0,0 +1,38 @@ +import chalk from 'chalk'; +import { Prompt, SelectState } from 'hanji'; + +export class Select extends Prompt<{ index: number; value: string }> { + private readonly data: SelectState<{ label: string; value: string }>; + + constructor(items: string[]) { + super(); + this.on('attach', (terminal) => terminal.toggleCursor('hide')); + this.on('detach', (terminal) => terminal.toggleCursor('show')); + + this.data = new SelectState( + items.map((it) => ({ label: it, value: `${it}-value` })), + ); + this.data.bind(this); + } + + render(status: 'idle' | 'submitted' | 'aborted'): string { + if (status === 'submitted' || status === 'aborted') return ''; + + let text = ``; + this.data.items.forEach((it, idx) => { + text += idx === this.data.selectedIdx + ? `${chalk.green('❯ ' + it.label)}` + : ` ${it.label}`; + text += idx != this.data.items.length - 1 ? '\n' : ''; + }); + + return text; + } + + result() { + return { + index: this.data.selectedIdx, + value: this.data.items[this.data.selectedIdx]!.value!, + }; + } +} diff --git a/drizzle-kit/src/cli/utils.ts b/drizzle-kit/src/cli/utils.ts new file mode 100644 index 000000000..f7e7a2ae9 --- /dev/null +++ b/drizzle-kit/src/cli/utils.ts @@ -0,0 +1,112 @@ +import semver from 'semver'; +import { err, warning } from './views'; + +export const assertExists = (it?: any) => { + if (!it) throw new Error(); +}; + +export const ormVersionGt = async (version: string) => { + const { npmVersion } = await import('drizzle-orm/version'); + if (!semver.gte(npmVersion, version)) { + return false; + } + return true; +}; + +export const assertStudioNodeVersion = () => { + if (semver.gte(process.version, '18.0.0')) return; + + err('Drizzle Studio requires NodeJS v18 or above'); + process.exit(1); +}; + +export const checkPackage = async (it: string) => { + try { + await import(it); + return true; + } catch (e) { + return false; + } +}; + +export const assertPackages = async (...pkgs: string[]) => { + try { + for (let i = 0; i < pkgs.length; i++) { + const it = pkgs[i]; + await import(it); + } + } catch (e) { + err( + `please install required packages: ${ + pkgs + .map((it) => `'${it}'`) + .join(' ') + }`, + ); + process.exit(1); + } +}; + +// ex: either pg or postgres are needed +export const assertEitherPackage = async ( + ...pkgs: string[] +): Promise => { + const availables = [] as string[]; + for (let i = 0; i < pkgs.length; i++) { + try { + const it = pkgs[i]; + await import(it); + availables.push(it); + } catch (e) {} + } + + if (availables.length > 0) { + return availables; + } + + err( + `Please install one of those packages are needed: ${ + pkgs + .map((it) => `'${it}'`) + .join(' or ') + }`, + ); + process.exit(1); +}; + +const requiredApiVersion = 7; +export const assertOrmCoreVersion = async () => { + try { + const { compatibilityVersion } = await import('drizzle-orm/version'); + + await import('drizzle-orm/relations'); + + if (compatibilityVersion && compatibilityVersion === requiredApiVersion) { + return; + } + + if (!compatibilityVersion || compatibilityVersion < requiredApiVersion) { + console.log( + 'This version of drizzle-kit requires newer version of drizzle-orm\nPlease update drizzle-orm package to the latest version 👍', + ); + } else { + console.log( + 'This version of drizzle-kit is outdated\nPlease update drizzle-kit package to the latest version 👍', + ); + } + } catch (e) { + console.log('Please install latest version of drizzle-orm'); + } + process.exit(1); +}; + +export const ormCoreVersions = async () => { + try { + const { compatibilityVersion, npmVersion } = await import( + 'drizzle-orm/version' + ); + return { compatibilityVersion, npmVersion }; + } catch (e) { + return {}; + } +}; diff --git a/drizzle-kit/src/cli/validations/cli.ts b/drizzle-kit/src/cli/validations/cli.ts new file mode 100644 index 000000000..c4bbbe530 --- /dev/null +++ b/drizzle-kit/src/cli/validations/cli.ts @@ -0,0 +1,62 @@ +import { boolean, intersection, literal, object, string, TypeOf, union } from 'zod'; +import { dialect } from '../../schemaValidator'; +import { casing, prefix } from './common'; + +export const cliConfigGenerate = object({ + dialect: dialect.optional(), + schema: union([string(), string().array()]).optional(), + out: string().optional().default('./drizzle'), + config: string().optional(), + name: string().optional(), + prefix: prefix.optional(), + breakpoints: boolean().optional().default(true), + custom: boolean().optional().default(false), +}).strict(); + +export type CliConfigGenerate = TypeOf; + +export const pushParams = object({ + dialect: dialect, + schema: union([string(), string().array()]), + tablesFilter: union([string(), string().array()]).optional(), + schemaFilter: union([string(), string().array()]) + .optional() + .default(['public']), + extensionsFilters: literal('postgis').array().optional(), + verbose: boolean().optional(), + strict: boolean().optional(), +}).passthrough(); + +export type PushParams = TypeOf; + +export const pullParams = object({ + config: string().optional(), + dialect: dialect, + out: string().optional().default('drizzle'), + tablesFilter: union([string(), string().array()]).optional(), + schemaFilter: union([string(), string().array()]) + .optional() + .default(['public']), + extensionsFilters: literal('postgis').array().optional(), + casing, + breakpoints: boolean().optional().default(true), + migrations: object({ + prefix: prefix.optional().default('index'), + }).optional(), +}).passthrough(); + +export type PullParams = TypeOf; + +export const configCheck = object({ + dialect: dialect.optional(), + out: string().optional(), +}); + +export const cliConfigCheck = intersection( + object({ + config: string().optional(), + }), + configCheck, +); + +export type CliCheckConfig = TypeOf; diff --git a/drizzle-kit/src/cli/validations/common.ts b/drizzle-kit/src/cli/validations/common.ts new file mode 100644 index 000000000..a7307f4d6 --- /dev/null +++ b/drizzle-kit/src/cli/validations/common.ts @@ -0,0 +1,186 @@ +import chalk from 'chalk'; +import { UnionToIntersection } from 'hono/utils/types'; +import { any, boolean, enum as enum_, literal, object, string, TypeOf, union } from 'zod'; +import { dialect } from '../../schemaValidator'; +import { outputs } from './outputs'; + +export type Commands = + | 'introspect' + | 'generate' + | 'check' + | 'up' + | 'drop' + | 'push'; + +type Expand = T extends infer O ? { [K in keyof O]: O[K] } : never; +type IsUnion = [T] extends [UnionToIntersection] ? false : true; +type LastTupleElement = TArr extends [ + ...start: infer _, + end: infer Last, +] ? Last + : never; + +export type UniqueArrayOfUnion = Exclude< + TUnion, + TArray[number] +> extends never ? [TUnion] + : [...TArray, Exclude]; + +export const assertCollisions = < + T extends Record, + TKeys extends (keyof T)[], + TRemainingKeys extends Exclude[], + Exhaustive extends TRemainingKeys, + UNIQ extends UniqueArrayOfUnion, +>( + command: Commands, + options: T, + whitelist: Exclude, + remainingKeys: UniqueArrayOfUnion, +): IsUnion> extends false ? 'cli' | 'config' : TKeys => { + const { config, ...rest } = options; + + let atLeastOneParam = false; + for (const key of Object.keys(rest)) { + if (whitelist.includes(key)) continue; + + atLeastOneParam = atLeastOneParam || rest[key] !== undefined; + } + + if (!config && atLeastOneParam) { + return 'cli' as any; + } + + if (!atLeastOneParam) { + return 'config' as any; + } + + // if config and cli - return error - write a reason + console.log(outputs.common.ambiguousParams(command)); + process.exit(1); +}; + +export const sqliteDriversLiterals = [ + literal('turso'), + literal('d1-http'), + literal('expo'), +] as const; + +export const postgresqlDriversLiterals = [ + literal('aws-data-api'), + literal('pglite'), +] as const; + +export const prefixes = [ + 'index', + 'timestamp', + 'supabase', + 'unix', + 'none', +] as const; +export const prefix = enum_(prefixes); +export type Prefix = (typeof prefixes)[number]; + +{ + const _: Prefix = '' as TypeOf; +} + +export const sqliteDriver = union(sqliteDriversLiterals); +export const postgresDriver = union(postgresqlDriversLiterals); +export const driver = union([sqliteDriver, postgresDriver]); + +export const configMigrations = object({ + table: string().optional(), + schema: string().optional(), + prefix: prefix.optional().default('index'), +}).optional(); + +export const configCommonSchema = object({ + dialect: dialect, + schema: union([string(), string().array()]).optional(), + out: string().optional(), + breakpoints: boolean().optional().default(true), + verbose: boolean().optional().default(false), + driver: driver.optional(), + tablesFilter: union([string(), string().array()]).optional(), + schemaFilter: union([string(), string().array()]).default(['public']), + migrations: configMigrations, + dbCredentials: any().optional(), +}).passthrough(); + +export const casing = union([literal('camel'), literal('preserve')]).default( + 'camel', +); + +export const introspectParams = object({ + schema: union([string(), string().array()]).optional(), + out: string().optional().default('./drizzle'), + breakpoints: boolean().default(true), + tablesFilter: union([string(), string().array()]).optional(), + schemaFilter: union([string(), string().array()]).default(['public']), + introspect: object({ + casing, + }).default({ casing: 'camel' }), +}); + +export type IntrospectParams = TypeOf; +export type Casing = TypeOf; + +export const configIntrospectCliSchema = object({ + schema: union([string(), string().array()]).optional(), + out: string().optional().default('./drizzle'), + breakpoints: boolean().default(true), + tablesFilter: union([string(), string().array()]).optional(), + schemaFilter: union([string(), string().array()]).default(['public']), + introspectCasing: union([literal('camel'), literal('preserve')]).default( + 'camel', + ), +}); + +export const configGenerateSchema = object({ + schema: union([string(), string().array()]), + out: string().optional().default('./drizzle'), + breakpoints: boolean().default(true), +}); + +export type GenerateSchema = TypeOf; + +export const configPushSchema = object({ + dialect: dialect, + schema: union([string(), string().array()]), + tablesFilter: union([string(), string().array()]).optional(), + schemaFilter: union([string(), string().array()]).default(['public']), + verbose: boolean().default(false), + strict: boolean().default(false), + out: string().optional(), +}); + +export type CliConfig = TypeOf; +export const drivers = ['turso', 'd1-http', 'expo', 'aws-data-api', 'pglite'] as const; +export type Driver = (typeof drivers)[number]; +const _: Driver = '' as TypeOf; + +export const wrapParam = ( + name: string, + param: any | undefined, + optional: boolean = false, + type?: 'url' | 'secret', +) => { + const check = `[${chalk.green('✓')}]`; + const cross = `[${chalk.red('x')}]`; + if (typeof param === 'string') { + if (param.length === 0) { + return ` ${cross} ${name}: ''`; + } + if (type === 'secret') { + return ` ${check} ${name}: '*****'`; + } else if (type === 'url') { + return ` ${check} ${name}: '${param.replace(/(?<=:\/\/[^:\n]*:)([^@]*)/, '****')}'`; + } + return ` ${check} ${name}: '${param}'`; + } + if (optional) { + return chalk.gray(` ${name}?: `); + } + return ` ${cross} ${name}: ${chalk.gray('undefined')}`; +}; diff --git a/drizzle-kit/src/cli/validations/mysql.ts b/drizzle-kit/src/cli/validations/mysql.ts new file mode 100644 index 000000000..1841dbdd6 --- /dev/null +++ b/drizzle-kit/src/cli/validations/mysql.ts @@ -0,0 +1,61 @@ +import { boolean, coerce, object, string, TypeOf, union } from 'zod'; +import { error } from '../views'; +import { wrapParam } from './common'; +import { outputs } from './outputs'; + +export const mysqlCredentials = union([ + object({ + host: string().min(1), + port: coerce.number().min(1).optional(), + user: string().min(1).optional(), + password: string().min(1).optional(), + database: string().min(1), + ssl: union([ + string(), + object({ + pfx: string().optional(), + key: string().optional(), + passphrase: string().optional(), + cert: string().optional(), + ca: union([string(), string().array()]).optional(), + crl: union([string(), string().array()]).optional(), + ciphers: string().optional(), + rejectUnauthorized: boolean().optional(), + }), + ]).optional(), + }), + object({ + url: string().min(1), + }), +]); + +export type MysqlCredentials = TypeOf; + +export const printCliConnectionIssues = (options: any) => { + const { uri, host, database } = options || {}; + + if (!uri && (!host || !database)) { + console.log(outputs.mysql.connection.required()); + } +}; + +export const printConfigConnectionIssues = ( + options: Record, +) => { + if ('url' in options) { + let text = `Please provide required params for MySQL driver:\n`; + console.log(error(text)); + console.log(wrapParam('url', options.url, false, 'url')); + process.exit(1); + } + + let text = `Please provide required params for MySQL driver:\n`; + console.log(error(text)); + console.log(wrapParam('host', options.host)); + console.log(wrapParam('port', options.port, true)); + console.log(wrapParam('user', options.user, true)); + console.log(wrapParam('password', options.password, true, 'secret')); + console.log(wrapParam('database', options.database)); + console.log(wrapParam('ssl', options.ssl, true)); + process.exit(1); +}; diff --git a/drizzle-kit/src/cli/validations/outputs.ts b/drizzle-kit/src/cli/validations/outputs.ts new file mode 100644 index 000000000..ad0423b97 --- /dev/null +++ b/drizzle-kit/src/cli/validations/outputs.ts @@ -0,0 +1,91 @@ +import chalk from 'chalk'; +import { sqliteDriversLiterals } from './common'; + +export const withStyle = { + error: (str: string) => `${chalk.red(`${chalk.white.bgRed(' Invalid input ')} ${str}`)}`, + warning: (str: string) => `${chalk.white.bgGray(' Warning ')} ${str}`, + errorWarning: (str: string) => `${chalk.red(`${chalk.white.bgRed(' Warning ')} ${str}`)}`, + fullWarning: (str: string) => `${chalk.black.bgYellow(' Warning ')} ${chalk.bold(str)}`, + suggestion: (str: string) => `${chalk.white.bgGray(' Suggestion ')} ${str}`, + info: (str: string) => `${chalk.grey(str)}`, +}; + +export const outputs = { + studio: { + drivers: (param: string) => + withStyle.error( + `"${param}" is not a valid driver. Available drivers: "pg", "mysql2", "better-sqlite", "libsql", "turso". You can read more about drizzle.config: https://orm.drizzle.team/kit-docs/config-reference`, + ), + noCredentials: () => + withStyle.error( + `Please specify a 'dbCredentials' param in config. It will help drizzle to know how to query you database. You can read more about drizzle.config: https://orm.drizzle.team/kit-docs/config-reference`, + ), + noDriver: () => + withStyle.error( + `Please specify a 'driver' param in config. It will help drizzle to know how to query you database. You can read more about drizzle.config: https://orm.drizzle.team/kit-docs/config-reference`, + ), + noDialect: () => + withStyle.error( + `Please specify 'dialect' param in config, either of 'pg', 'mysql', 'sqlite' or singlestore`, + ), + }, + common: { + ambiguousParams: (command: string) => + withStyle.error( + `You can't use both --config and other cli options for ${command} command`, + ), + schema: (command: string) => withStyle.error(`"--schema" is a required field for ${command} command`), + }, + postgres: { + connection: { + required: () => + withStyle.error( + `Either "url" or "host", "database" are required for database connection`, + ), + awsDataApi: () => + withStyle.error( + "You need to provide 'database', 'secretArn' and 'resourceArn' for Drizzle Kit to connect to AWS Data API", + ), + }, + }, + mysql: { + connection: { + driver: () => withStyle.error(`Only "mysql2" is available options for "--driver"`), + required: () => + withStyle.error( + `Either "url" or "host", "database" are required for database connection`, + ), + }, + }, + sqlite: { + connection: { + driver: () => { + const listOfDrivers = sqliteDriversLiterals + .map((it) => `'${it.value}'`) + .join(', '); + return withStyle.error( + `Either ${listOfDrivers} are available options for 'driver' param`, + ); + }, + url: (driver: string) => + withStyle.error( + `"url" is a required option for driver "${driver}". You can read more about drizzle.config: https://orm.drizzle.team/kit-docs/config-reference`, + ), + authToken: (driver: string) => + withStyle.error( + `"authToken" is a required option for driver "${driver}". You can read more about drizzle.config: https://orm.drizzle.team/kit-docs/config-reference`, + ), + }, + introspect: {}, + push: {}, + }, + singlestore: { + connection: { + driver: () => withStyle.error(`Only "mysql2" is available options for "--driver"`), + required: () => + withStyle.error( + `Either "url" or "host", "database" are required for database connection`, + ), + }, + }, +}; diff --git a/drizzle-kit/src/cli/validations/postgres.ts b/drizzle-kit/src/cli/validations/postgres.ts new file mode 100644 index 000000000..658760c61 --- /dev/null +++ b/drizzle-kit/src/cli/validations/postgres.ts @@ -0,0 +1,83 @@ +import { boolean, coerce, literal, object, string, TypeOf, undefined, union } from 'zod'; +import { error } from '../views'; +import { wrapParam } from './common'; + +export const postgresCredentials = union([ + object({ + driver: undefined(), + host: string().min(1), + port: coerce.number().min(1).optional(), + user: string().min(1).optional(), + password: string().min(1).optional(), + database: string().min(1), + ssl: union([ + literal('require'), + literal('allow'), + literal('prefer'), + literal('verify-full'), + boolean(), + object({}).passthrough(), + ]).optional(), + }).transform((o) => { + delete o.driver; + return o as Omit; + }), + object({ + driver: undefined(), + url: string().min(1), + }).transform<{ url: string }>((o) => { + delete o.driver; + return o; + }), + object({ + driver: literal('aws-data-api'), + database: string().min(1), + secretArn: string().min(1), + resourceArn: string().min(1), + }), + object({ + driver: literal('pglite'), + url: string().min(1), + }), +]); + +export type PostgresCredentials = TypeOf; + +export const printConfigConnectionIssues = ( + options: Record, +) => { + if (options.driver === 'aws-data-api') { + let text = `Please provide required params for AWS Data API driver:\n`; + console.log(error(text)); + console.log(wrapParam('database', options.database)); + console.log(wrapParam('secretArn', options.secretArn, false, 'secret')); + console.log(wrapParam('resourceArn', options.resourceArn, false, 'secret')); + process.exit(1); + } + + if ('url' in options) { + let text = `Please provide required params for Postgres driver:\n`; + console.log(error(text)); + console.log(wrapParam('url', options.url, false, 'url')); + process.exit(1); + } + + if ('host' in options || 'database' in options) { + let text = `Please provide required params for Postgres driver:\n`; + console.log(error(text)); + console.log(wrapParam('host', options.host)); + console.log(wrapParam('port', options.port, true)); + console.log(wrapParam('user', options.user, true)); + console.log(wrapParam('password', options.password, true, 'secret')); + console.log(wrapParam('database', options.database)); + console.log(wrapParam('ssl', options.ssl, true)); + process.exit(1); + } + + console.log( + error( + `Either connection "url" or "host", "database" are required for PostgreSQL database connection`, + ), + ); + process.exit(1); +}; diff --git a/drizzle-kit/src/cli/validations/singlestore.ts b/drizzle-kit/src/cli/validations/singlestore.ts new file mode 100644 index 000000000..ebe0cc5f0 --- /dev/null +++ b/drizzle-kit/src/cli/validations/singlestore.ts @@ -0,0 +1,61 @@ +import { boolean, coerce, object, string, TypeOf, union } from 'zod'; +import { error } from '../views'; +import { wrapParam } from './common'; +import { outputs } from './outputs'; + +export const singlestoreCredentials = union([ + object({ + host: string().min(1), + port: coerce.number().min(1).optional(), + user: string().min(1).optional(), + password: string().min(1).optional(), + database: string().min(1), + ssl: union([ + string(), + object({ + pfx: string().optional(), + key: string().optional(), + passphrase: string().optional(), + cert: string().optional(), + ca: union([string(), string().array()]).optional(), + crl: union([string(), string().array()]).optional(), + ciphers: string().optional(), + rejectUnauthorized: boolean().optional(), + }), + ]).optional(), + }), + object({ + url: string().min(1), + }), +]); + +export type SingleStoreCredentials = TypeOf; + +export const printCliConnectionIssues = (options: any) => { + const { uri, host, database } = options || {}; + + if (!uri && (!host || !database)) { + console.log(outputs.singlestore.connection.required()); + } +}; + +export const printConfigConnectionIssues = ( + options: Record, +) => { + if ('url' in options) { + let text = `Please provide required params for SingleStore driver:\n`; + console.log(error(text)); + console.log(wrapParam('url', options.url, false, 'url')); + process.exit(1); + } + + let text = `Please provide required params for SingleStore driver:\n`; + console.log(error(text)); + console.log(wrapParam('host', options.host)); + console.log(wrapParam('port', options.port, true)); + console.log(wrapParam('user', options.user, true)); + console.log(wrapParam('password', options.password, true, 'secret')); + console.log(wrapParam('database', options.database)); + console.log(wrapParam('ssl', options.ssl, true)); + process.exit(1); +}; diff --git a/drizzle-kit/src/cli/validations/sqlite.ts b/drizzle-kit/src/cli/validations/sqlite.ts new file mode 100644 index 000000000..b6ad062d5 --- /dev/null +++ b/drizzle-kit/src/cli/validations/sqlite.ts @@ -0,0 +1,94 @@ +import { softAssertUnreachable } from 'src/global'; +import { literal, object, string, TypeOf, undefined, union } from 'zod'; +import { error } from '../views'; +import { sqliteDriver, wrapParam } from './common'; + +export const sqliteCredentials = union([ + object({ + driver: literal('turso'), + url: string().min(1), + authToken: string().min(1).optional(), + }), + object({ + driver: literal('d1-http'), + accountId: string().min(1), + databaseId: string().min(1), + token: string().min(1), + }), + object({ + driver: undefined(), + url: string().min(1), + }).transform<{ url: string }>((o) => { + delete o.driver; + return o; + }), +]); + +export type SqliteCredentials = + | { + driver: 'turso'; + url: string; + authToken: string; + } + | { + driver: 'd1-http'; + accountId: string; + databaseId: string; + token: string; + } + | { + url: string; + }; + +const _: SqliteCredentials = {} as TypeOf; + +export const printConfigConnectionIssues = ( + options: Record, + command: 'generate' | 'migrate' | 'push' | 'pull' | 'studio', +) => { + const parsedDriver = sqliteDriver.safeParse(options.driver); + const driver = parsedDriver.success ? parsedDriver.data : ('' as never); + + if (driver === 'expo') { + if (command === 'migrate') { + console.log( + error( + `You can't use 'migrate' command with Expo SQLite, please follow migration instructions in our docs - https://orm.drizzle.team/docs/get-started-sqlite#expo-sqlite`, + ), + ); + } else if (command === 'studio') { + console.log( + error( + `You can't use 'studio' command with Expo SQLite, please use Expo Plugin https://www.npmjs.com/package/expo-drizzle-studio-plugin`, + ), + ); + } else if (command === 'pull') { + console.log(error("You can't use 'pull' command with Expo SQLite")); + } else if (command === 'push') { + console.log(error("You can't use 'push' command with Expo SQLite")); + } else { + console.log(error('Unexpected error with expo driver 🤔')); + } + process.exit(1); + } else if (driver === 'd1-http') { + let text = `Please provide required params for D1 HTTP driver:\n`; + console.log(error(text)); + console.log(wrapParam('accountId', options.accountId)); + console.log(wrapParam('databaseId', options.databaseId)); + console.log(wrapParam('token', options.token, false, 'secret')); + process.exit(1); + } else if (driver === 'turso') { + let text = `Please provide required params for Turso driver:\n`; + console.log(error(text)); + console.log(wrapParam('url', options.url)); + console.log(wrapParam('authToken', options.authToken, false, 'secret')); + return; + } else { + softAssertUnreachable(driver); + } + + let text = `Please provide required params:\n`; + console.log(error(text)); + console.log(wrapParam('url', options.url)); + process.exit(1); +}; diff --git a/drizzle-kit/src/cli/validations/studio.ts b/drizzle-kit/src/cli/validations/studio.ts new file mode 100644 index 000000000..cbbb3ac25 --- /dev/null +++ b/drizzle-kit/src/cli/validations/studio.ts @@ -0,0 +1,24 @@ +import { coerce, intersection, object, string, TypeOf, union } from 'zod'; +import { dialect } from '../../schemaValidator'; +import { mysqlCredentials } from './mysql'; +import { postgresCredentials } from './postgres'; +import { sqliteCredentials } from './sqlite'; + +export const credentials = intersection( + postgresCredentials, + mysqlCredentials, + sqliteCredentials, +); + +export type Credentials = TypeOf; + +export const studioCliParams = object({ + port: coerce.number().optional().default(4983), + host: string().optional().default('127.0.0.1'), + config: string().optional(), +}); + +export const studioConfig = object({ + dialect, + schema: union([string(), string().array()]).optional(), +}); diff --git a/drizzle-kit/src/cli/views.ts b/drizzle-kit/src/cli/views.ts new file mode 100644 index 000000000..56e0331df --- /dev/null +++ b/drizzle-kit/src/cli/views.ts @@ -0,0 +1,558 @@ +import chalk from 'chalk'; +import { Prompt, render, SelectState, TaskView } from 'hanji'; +import type { CommonSchema } from '../schemaValidator'; +import { objectValues } from '../utils'; +import type { Named, NamedWithSchema } from './commands/migrate'; + +export const warning = (msg: string) => { + render(`[${chalk.yellow('Warning')}] ${msg}`); +}; +export const err = (msg: string) => { + render(`${chalk.bold.red('Error')} ${msg}`); +}; + +export const info = (msg: string, greyMsg: string = ''): string => { + return `${chalk.blue.bold('Info:')} ${msg} ${greyMsg ? chalk.grey(greyMsg) : ''}`.trim(); +}; +export const grey = (msg: string): string => { + return chalk.grey(msg); +}; + +export const error = (error: string, greyMsg: string = ''): string => { + return `${chalk.bgRed.bold(' Error ')} ${error} ${greyMsg ? chalk.grey(greyMsg) : ''}`.trim(); +}; + +export const schema = (schema: CommonSchema): string => { + type TableEntry = (typeof schema)['tables'][keyof (typeof schema)['tables']]; + const tables = Object.values(schema.tables) as unknown as TableEntry[]; + + let msg = chalk.bold(`${tables.length} tables\n`); + + msg += tables + .map((t) => { + const columnsCount = Object.values(t.columns).length; + const indexesCount = Object.values(t.indexes).length; + const foreignKeys = Object.values(t.foreignKeys).length; + return `${chalk.bold.blue(t.name)} ${ + chalk.gray( + `${columnsCount} columns ${indexesCount} indexes ${foreignKeys} fks`, + ) + }`; + }) + .join('\n'); + + msg += '\n'; + + const enums = objectValues( + 'enums' in schema + ? 'values' in schema['enums'] + ? schema['enums'] + : {} + : {}, + ); + + if (enums.length > 0) { + msg += '\n'; + msg += chalk.bold(`${enums.length} enums\n`); + + msg += enums + .map((it) => { + return `${chalk.bold.blue(it.name)} ${ + chalk.gray( + `[${Object.values(it.values).join(', ')}]`, + ) + }`; + }) + .join('\n'); + msg += '\n'; + } + return msg; +}; + +export interface RenamePropmtItem { + from: T; + to: T; +} + +export const isRenamePromptItem = ( + item: RenamePropmtItem | T, +): item is RenamePropmtItem => { + return 'from' in item && 'to' in item; +}; + +export class ResolveColumnSelect extends Prompt< + RenamePropmtItem | T +> { + private readonly data: SelectState | T>; + + constructor( + private readonly tableName: string, + private readonly base: Named, + data: (RenamePropmtItem | T)[], + ) { + super(); + this.on('attach', (terminal) => terminal.toggleCursor('hide')); + this.data = new SelectState(data); + this.data.bind(this); + } + + render(status: 'idle' | 'submitted' | 'aborted'): string { + if (status === 'submitted' || status === 'aborted') { + return '\n'; + } + + let text = `\nIs ${ + chalk.bold.blue( + this.base.name, + ) + } column in ${ + chalk.bold.blue( + this.tableName, + ) + } table created or renamed from another column?\n`; + + const isSelectedRenamed = isRenamePromptItem( + this.data.items[this.data.selectedIdx], + ); + + const selectedPrefix = isSelectedRenamed + ? chalk.yellow('❯ ') + : chalk.green('❯ '); + + const labelLength: number = this.data.items + .filter((it) => isRenamePromptItem(it)) + .map((it: RenamePropmtItem) => { + return this.base.name.length + 3 + it['from'].name.length; + }) + .reduce((a, b) => { + if (a > b) { + return a; + } + return b; + }, 0); + + this.data.items.forEach((it, idx) => { + const isSelected = idx === this.data.selectedIdx; + const isRenamed = isRenamePromptItem(it); + const title = isRenamed + ? `${it.from.name} › ${it.to.name}`.padEnd(labelLength, ' ') + : it.name.padEnd(labelLength, ' '); + const label = isRenamed + ? `${chalk.yellow('~')} ${title} ${chalk.gray('rename column')}` + : `${chalk.green('+')} ${title} ${chalk.gray('create column')}`; + + text += isSelected ? `${selectedPrefix}${label}` : ` ${label}`; + text += idx != this.data.items.length - 1 ? '\n' : ''; + }); + return text; + } + + result(): RenamePropmtItem | T { + return this.data.items[this.data.selectedIdx]!; + } +} + +export const tableKey = (it: NamedWithSchema) => { + return it.schema === 'public' || !it.schema + ? it.name + : `${it.schema}.${it.name}`; +}; + +export class ResolveSelect extends Prompt< + RenamePropmtItem | T +> { + private readonly state: SelectState | T>; + + constructor( + private readonly base: T, + data: (RenamePropmtItem | T)[], + private readonly entityType: 'table' | 'enum' | 'sequence', + ) { + super(); + this.on('attach', (terminal) => terminal.toggleCursor('hide')); + this.state = new SelectState(data); + this.state.bind(this); + this.base = base; + } + + render(status: 'idle' | 'submitted' | 'aborted'): string { + if (status === 'submitted' || status === 'aborted') { + return ''; + } + const key = tableKey(this.base); + + let text = `\nIs ${chalk.bold.blue(key)} ${this.entityType} created or renamed from another ${this.entityType}?\n`; + + const isSelectedRenamed = isRenamePromptItem( + this.state.items[this.state.selectedIdx], + ); + + const selectedPrefix = isSelectedRenamed + ? chalk.yellow('❯ ') + : chalk.green('❯ '); + + const labelLength: number = this.state.items + .filter((it) => isRenamePromptItem(it)) + .map((_) => { + const it = _ as RenamePropmtItem; + const keyFrom = tableKey(it.from); + return key.length + 3 + keyFrom.length; + }) + .reduce((a, b) => { + if (a > b) { + return a; + } + return b; + }, 0); + + const entityType = this.entityType; + this.state.items.forEach((it, idx) => { + const isSelected = idx === this.state.selectedIdx; + const isRenamed = isRenamePromptItem(it); + + const title = isRenamed + ? `${tableKey(it.from)} › ${tableKey(it.to)}`.padEnd(labelLength, ' ') + : tableKey(it).padEnd(labelLength, ' '); + + const label = isRenamed + ? `${chalk.yellow('~')} ${title} ${chalk.gray(`rename ${entityType}`)}` + : `${chalk.green('+')} ${title} ${chalk.gray(`create ${entityType}`)}`; + + text += isSelected ? `${selectedPrefix}${label}` : ` ${label}`; + text += idx != this.state.items.length - 1 ? '\n' : ''; + }); + return text; + } + + result(): RenamePropmtItem | T { + return this.state.items[this.state.selectedIdx]!; + } +} + +export class ResolveSchemasSelect extends Prompt< + RenamePropmtItem | T +> { + private readonly state: SelectState | T>; + + constructor(private readonly base: Named, data: (RenamePropmtItem | T)[]) { + super(); + this.on('attach', (terminal) => terminal.toggleCursor('hide')); + this.state = new SelectState(data); + this.state.bind(this); + this.base = base; + } + + render(status: 'idle' | 'submitted' | 'aborted'): string { + if (status === 'submitted' || status === 'aborted') { + return ''; + } + + let text = `\nIs ${ + chalk.bold.blue( + this.base.name, + ) + } schema created or renamed from another schema?\n`; + const isSelectedRenamed = isRenamePromptItem( + this.state.items[this.state.selectedIdx], + ); + const selectedPrefix = isSelectedRenamed + ? chalk.yellow('❯ ') + : chalk.green('❯ '); + + const labelLength: number = this.state.items + .filter((it) => isRenamePromptItem(it)) + .map((it: RenamePropmtItem) => { + return this.base.name.length + 3 + it['from'].name.length; + }) + .reduce((a, b) => { + if (a > b) { + return a; + } + return b; + }, 0); + + this.state.items.forEach((it, idx) => { + const isSelected = idx === this.state.selectedIdx; + const isRenamed = isRenamePromptItem(it); + const title = isRenamed + ? `${it.from.name} › ${it.to.name}`.padEnd(labelLength, ' ') + : it.name.padEnd(labelLength, ' '); + const label = isRenamed + ? `${chalk.yellow('~')} ${title} ${chalk.gray('rename schema')}` + : `${chalk.green('+')} ${title} ${chalk.gray('create schema')}`; + + text += isSelected ? `${selectedPrefix}${label}` : ` ${label}`; + text += idx != this.state.items.length - 1 ? '\n' : ''; + }); + return text; + } + + result(): RenamePropmtItem | T { + return this.state.items[this.state.selectedIdx]!; + } +} + +class Spinner { + private offset: number = 0; + private readonly iterator: () => void; + + constructor(private readonly frames: string[]) { + this.iterator = () => { + this.offset += 1; + this.offset %= frames.length - 1; + }; + } + + public tick = () => { + this.iterator(); + }; + + public value = () => { + return this.frames[this.offset]; + }; +} + +const frames = function(values: string[]): () => string { + let index = 0; + const iterator = () => { + const frame = values[index]; + index += 1; + index %= values.length; + return frame!; + }; + return iterator; +}; + +type ValueOf = T[keyof T]; +export type IntrospectStatus = 'fetching' | 'done'; +export type IntrospectStage = + | 'tables' + | 'columns' + | 'enums' + | 'indexes' + | 'fks'; +type IntrospectState = { + [key in IntrospectStage]: { + count: number; + name: string; + status: IntrospectStatus; + }; +}; + +export class IntrospectProgress extends TaskView { + private readonly spinner: Spinner = new Spinner('⣷⣯⣟⡿⢿⣻⣽⣾'.split('')); + private timeout: NodeJS.Timeout | undefined; + + private state: IntrospectState = { + tables: { + count: 0, + name: 'tables', + status: 'fetching', + }, + columns: { + count: 0, + name: 'columns', + status: 'fetching', + }, + enums: { + count: 0, + name: 'enums', + status: 'fetching', + }, + indexes: { + count: 0, + name: 'indexes', + status: 'fetching', + }, + fks: { + count: 0, + name: 'foreign keys', + status: 'fetching', + }, + }; + + constructor(private readonly hasEnums: boolean = false) { + super(); + this.timeout = setInterval(() => { + this.spinner.tick(); + this.requestLayout(); + }, 128); + + this.on('detach', () => clearInterval(this.timeout)); + } + + public update( + stage: IntrospectStage, + count: number, + status: IntrospectStatus, + ) { + this.state[stage].count = count; + this.state[stage].status = status; + this.requestLayout(); + } + + private formatCount = (count: number) => { + const width: number = Math.max.apply( + null, + Object.values(this.state).map((it) => it.count.toFixed(0).length), + ); + + return count.toFixed(0).padEnd(width, ' '); + }; + + private statusText = (spinner: string, stage: ValueOf) => { + const { name, count } = stage; + const isDone = stage.status === 'done'; + + const prefix = isDone ? `[${chalk.green('✓')}]` : `[${spinner}]`; + + const formattedCount = this.formatCount(count); + const suffix = isDone + ? `${formattedCount} ${name} fetched` + : `${formattedCount} ${name} fetching`; + + return `${prefix} ${suffix}\n`; + }; + + render(): string { + let info = ''; + const spin = this.spinner.value(); + info += this.statusText(spin, this.state.tables); + info += this.statusText(spin, this.state.columns); + info += this.hasEnums ? this.statusText(spin, this.state.enums) : ''; + info += this.statusText(spin, this.state.indexes); + info += this.statusText(spin, this.state.fks); + return info; + } +} + +export class MigrateProgress extends TaskView { + private readonly spinner: Spinner = new Spinner('⣷⣯⣟⡿⢿⣻⣽⣾'.split('')); + private timeout: NodeJS.Timeout | undefined; + + constructor() { + super(); + this.timeout = setInterval(() => { + this.spinner.tick(); + this.requestLayout(); + }, 128); + + this.on('detach', () => clearInterval(this.timeout)); + } + + render(status: 'pending' | 'done'): string { + if (status === 'pending') { + const spin = this.spinner.value(); + return `[${spin}] applying migrations...`; + } + return `[${chalk.green('✓')}] migrations applied successfully!`; + } +} + +export class ProgressView extends TaskView { + private readonly spinner: Spinner = new Spinner('⣷⣯⣟⡿⢿⣻⣽⣾'.split('')); + private timeout: NodeJS.Timeout | undefined; + + constructor( + private readonly progressText: string, + private readonly successText: string, + ) { + super(); + this.timeout = setInterval(() => { + this.spinner.tick(); + this.requestLayout(); + }, 128); + + this.on('detach', () => clearInterval(this.timeout)); + } + + render(status: 'pending' | 'done'): string { + if (status === 'pending') { + const spin = this.spinner.value(); + return `[${spin}] ${this.progressText}\n`; + } + return `[${chalk.green('✓')}] ${this.successText}\n`; + } +} + +export class DropMigrationView extends Prompt { + private readonly data: SelectState; + + constructor(data: T[]) { + super(); + this.on('attach', (terminal) => terminal.toggleCursor('hide')); + this.data = new SelectState(data); + this.data.selectedIdx = data.length - 1; + this.data.bind(this); + } + + render(status: 'idle' | 'submitted' | 'aborted'): string { + if (status === 'submitted' || status === 'aborted') { + return '\n'; + } + + let text = chalk.bold('Please select migration to drop:\n'); + const selectedPrefix = chalk.yellow('❯ '); + + const data = trimmedRange(this.data.items, this.data.selectedIdx, 9); + const labelLength: number = data.trimmed + .map((it) => it.tag.length) + .reduce((a, b) => { + if (a > b) { + return a; + } + return b; + }, 0); + + text += data.startTrimmed ? ' ...\n' : ''; + + data.trimmed.forEach((it, idx) => { + const isSelected = idx === this.data.selectedIdx - data.offset; + let title = it.tag.padEnd(labelLength, ' '); + title = isSelected ? chalk.yellow(title) : title; + + text += isSelected ? `${selectedPrefix}${title}` : ` ${title}`; + text += idx != this.data.items.length - 1 ? '\n' : ''; + }); + + text += data.endTrimmed ? ' ...\n' : ''; + return text; + } + + result(): T { + return this.data.items[this.data.selectedIdx]!; + } +} + +export const trimmedRange = ( + arr: T[], + index: number, + limitLines: number, +): { + trimmed: T[]; + offset: number; + startTrimmed: boolean; + endTrimmed: boolean; +} => { + const limit = limitLines - 2; + const sideLimit = Math.round(limit / 2); + + const endTrimmed = arr.length - sideLimit > index; + const startTrimmed = index > sideLimit - 1; + + const paddingStart = Math.max(index + sideLimit - arr.length, 0); + const paddingEnd = Math.min(index - sideLimit + 1, 0); + + const d1 = endTrimmed ? 1 : 0; + const d2 = startTrimmed ? 0 : 1; + + const start = Math.max(0, index - sideLimit + d1 - paddingStart); + const end = Math.min(arr.length, index + sideLimit + d2 - paddingEnd); + + return { + trimmed: arr.slice(start, end), + offset: start, + startTrimmed, + endTrimmed, + }; +}; diff --git a/drizzle-kit/src/extensions/vector.ts b/drizzle-kit/src/extensions/vector.ts new file mode 100644 index 000000000..e8b4f87ef --- /dev/null +++ b/drizzle-kit/src/extensions/vector.ts @@ -0,0 +1,10 @@ +export const vectorOps = [ + 'vector_l2_ops', + 'vector_ip_ops', + 'vector_cosine_ops', + 'vector_l1_ops', + 'bit_hamming_ops', + 'bit_jaccard_ops', + 'halfvec_l2_ops', + 'sparsevec_l2_ops', +]; diff --git a/drizzle-kit/src/global.ts b/drizzle-kit/src/global.ts new file mode 100644 index 000000000..4cea3d15e --- /dev/null +++ b/drizzle-kit/src/global.ts @@ -0,0 +1,61 @@ +export const originUUID = '00000000-0000-0000-0000-000000000000'; +export const snapshotVersion = '7'; + +export function assertUnreachable(x: never | undefined): never { + throw new Error("Didn't expect to get here"); +} + +// don't fail in runtime, types only +export function softAssertUnreachable(x: never) { + return null as never; +} + +export const mapValues = ( + obj: Record, + map: (input: IN) => OUT, +): Record => { + const result = Object.keys(obj).reduce(function(result, key) { + result[key] = map(obj[key]); + return result; + }, {} as Record); + return result; +}; + +export const mapKeys = ( + obj: Record, + map: (key: string, value: T) => string, +): Record => { + const result = Object.fromEntries( + Object.entries(obj).map(([key, val]) => { + const newKey = map(key, val); + return [newKey, val]; + }), + ); + return result; +}; + +export const mapEntries = ( + obj: Record, + map: (key: string, value: T) => [string, T], +): Record => { + const result = Object.fromEntries( + Object.entries(obj).map(([key, val]) => { + const [newKey, newVal] = map(key, val); + return [newKey, newVal]; + }), + ); + return result; +}; + +export const customMapEntries = ( + obj: Record, + map: (key: string, value: T) => [string, TReturn], +): Record => { + const result = Object.fromEntries( + Object.entries(obj).map(([key, val]) => { + const [newKey, newVal] = map(key, val); + return [newKey, newVal]; + }), + ); + return result; +}; diff --git a/drizzle-kit/src/index.ts b/drizzle-kit/src/index.ts new file mode 100644 index 000000000..9fab4bcb8 --- /dev/null +++ b/drizzle-kit/src/index.ts @@ -0,0 +1,319 @@ +import { ConnectionOptions } from 'tls'; +import type { Driver, Prefix } from './cli/validations/common'; +import type { Dialect } from './schemaValidator'; + +// import {SslOptions} from 'mysql2' +type SslOptions = { + pfx?: string; + key?: string; + passphrase?: string; + cert?: string; + ca?: string | string[]; + crl?: string | string[]; + ciphers?: string; + rejectUnauthorized?: boolean; +}; + +type Verify = U; + +/** + * **You are currently using version 0.21.0+ of drizzle-kit. If you have just upgraded to this version, please make sure to read the changelog to understand what changes have been made and what + * adjustments may be necessary for you. See https://orm.drizzle.team/kit-docs/upgrade-21#how-to-migrate-to-0210** + * + * **Config** usage: + * + * `dialect` - mandatory and is responsible for explicitly providing a databse dialect you are using for all the commands + * *Possible values*: `postgresql`, `mysql`, `sqlite`, `singlestore + * + * See https://orm.drizzle.team/kit-docs/config-reference#dialect + * + * --- + * `schema` - param lets you define where your schema file/files live. + * You can have as many separate schema files as you want and define paths to them using glob or array of globs syntax. + * + * See https://orm.drizzle.team/kit-docs/config-reference#schema + * + * --- + * `out` - allows you to define the folder for your migrations and a folder, where drizzle will introspect the schema and relations + * + * See https://orm.drizzle.team/kit-docs/config-reference#out + * + * --- + * `driver` - optional param that is responsible for explicitly providing a driver to use when accessing a database + * *Possible values*: `aws-data-api`, `d1-http`, `expo`, `turso`, `pglite` + * If you don't use AWS Data API, D1, Turso or Expo - ypu don't need this driver. You can check a driver strategy choice here: https://orm.drizzle.team/kit-docs/upgrade-21 + * + * See https://orm.drizzle.team/kit-docs/config-reference#driver + * + * --- + * + * `dbCredentials` - an object to define your connection to the database. For more info please check the docs + * + * See https://orm.drizzle.team/kit-docs/config-reference#dbcredentials + * + * --- + * + * `migrations` - param let’s use specify custom table and schema(PostgreSQL only) for migrations. + * By default, all information about executed migrations will be stored in the database inside + * the `__drizzle_migrations` table, and for PostgreSQL, inside the drizzle schema. + * However, you can configure where to store those records. + * + * See https://orm.drizzle.team/kit-docs/config-reference#migrations + * + * --- + * + * `breakpoints` - param lets you enable/disable SQL statement breakpoints in generated migrations. + * It’s optional and true by default, it’s necessary to properly apply migrations on databases, + * that do not support multiple DDL alternation statements in one transaction(MySQL, SQLite, SingleStore) and + * Drizzle ORM has to apply them sequentially one by one. + * + * See https://orm.drizzle.team/kit-docs/config-reference#breakpoints + * + * --- + * + * `tablesFilters` - param lets you filter tables with glob syntax for db push command. + * It’s useful when you have only one database avaialable for several separate projects with separate sql schemas. + * + * How to define multi-project tables with Drizzle ORM — see https://orm.drizzle.team/docs/goodies#multi-project-schema + * + * See https://orm.drizzle.team/kit-docs/config-reference#tablesfilters + * + * --- + * + * `schemaFilter` - parameter allows you to define which schema in PostgreSQL should be used for either introspect or push commands. + * This parameter accepts a single schema as a string or an array of schemas as strings. + * No glob pattern is supported here. By default, drizzle will use the public schema for both commands, + * but you can add any schema you need. + * + * For example, having schemaFilter: ["my_schema"] will only look for tables in both the database and + * drizzle schema that are a part of the my_schema schema. + * + * See https://orm.drizzle.team/kit-docs/config-reference#schemafilter + * + * --- + * + * `verbose` - command is used for drizzle-kit push commands and prints all statements that will be executed. + * + * > Note: This command will only print the statements that should be executed. + * To approve them before applying, please refer to the `strict` command. + * + * See https://orm.drizzle.team/kit-docs/config-reference#verbose + * + * --- + * + * `strict` - command is used for drizzle-kit push commands and will always ask for your confirmation, + * either to execute all statements needed to sync your schema with the database or not. + * + * See https://orm.drizzle.team/kit-docs/config-reference#strict + */ +export type Config = + & { + dialect: Dialect; + out?: string; + breakpoints?: boolean; + tablesFilter?: string | string[]; + extensionsFilters?: 'postgis'[]; + schemaFilter?: string | string[]; + schema?: string | string[]; + verbose?: boolean; + strict?: boolean; + migrations?: { + table?: string; + schema?: string; + prefix?: Prefix; + }; + introspect?: { + casing: 'camel' | 'preserve'; + }; + } + & ( + | { + dialect: Verify; + driver: Verify; + dbCredentials: { + url: string; + authToken?: string; + }; + } + | { + dialect: Verify; + dbCredentials: { + url: string; + }; + } + | { + dialect: Verify; + dbCredentials: + | ({ + host: string; + port?: number; + user?: string; + password?: string; + database: string; + ssl?: + | boolean + | 'require' + | 'allow' + | 'prefer' + | 'verify-full' + | ConnectionOptions; + } & {}) + | { + url: string; + }; + } + | { + dialect: Verify; + driver: Verify; + dbCredentials: { + database: string; + secretArn: string; + resourceArn: string; + }; + } + | { + dialect: Verify; + driver: Verify; + dbCredentials: { + url: string; + }; + } + | { + dialect: Verify; + dbCredentials: + | { + host: string; + port?: number; + user?: string; + password?: string; + database: string; + ssl?: string | SslOptions; + } + | { + url: string; + }; + } + | { + dialect: Verify; + driver: Verify; + dbCredentials: { + accountId: string; + databaseId: string; + token: string; + }; + } + | { + dialect: Verify; + driver: Verify; + } + | {} + | { + dialect: Verify; + dbCredentials: + | { + host: string; + port?: number; + user?: string; + password?: string; + database: string; + ssl?: string | SslOptions; + } + | { + url: string; + }; + } + ); + +/** + * **You are currently using version 0.21.0+ of drizzle-kit. If you have just upgraded to this version, please make sure to read the changelog to understand what changes have been made and what + * adjustments may be necessary for you. See https://orm.drizzle.team/kit-docs/upgrade-21#how-to-migrate-to-0210** + * + * **Config** usage: + * + * `dialect` - mandatory and is responsible for explicitly providing a databse dialect you are using for all the commands + * *Possible values*: `postgresql`, `mysql`, `sqlite`, `singlestore` + * + * See https://orm.drizzle.team/kit-docs/config-reference#dialect + * + * --- + * `schema` - param lets you define where your schema file/files live. + * You can have as many separate schema files as you want and define paths to them using glob or array of globs syntax. + * + * See https://orm.drizzle.team/kit-docs/config-reference#schema + * + * --- + * `out` - allows you to define the folder for your migrations and a folder, where drizzle will introspect the schema and relations + * + * See https://orm.drizzle.team/kit-docs/config-reference#out + * + * --- + * `driver` - optional param that is responsible for explicitly providing a driver to use when accessing a database + * *Possible values*: `aws-data-api`, `d1-http`, `expo`, `turso`, `pglite` + * If you don't use AWS Data API, D1, Turso or Expo - ypu don't need this driver. You can check a driver strategy choice here: https://orm.drizzle.team/kit-docs/upgrade-21 + * + * See https://orm.drizzle.team/kit-docs/config-reference#driver + * + * --- + * + * `dbCredentials` - an object to define your connection to the database. For more info please check the docs + * + * See https://orm.drizzle.team/kit-docs/config-reference#dbcredentials + * + * --- + * + * `migrations` - param let’s use specify custom table and schema(PostgreSQL only) for migrations. + * By default, all information about executed migrations will be stored in the database inside + * the `__drizzle_migrations` table, and for PostgreSQL, inside the drizzle schema. + * However, you can configure where to store those records. + * + * See https://orm.drizzle.team/kit-docs/config-reference#migrations + * + * --- + * + * `breakpoints` - param lets you enable/disable SQL statement breakpoints in generated migrations. + * It’s optional and true by default, it’s necessary to properly apply migrations on databases, + * that do not support multiple DDL alternation statements in one transaction(MySQL, SQLite, SingleStore) and + * Drizzle ORM has to apply them sequentially one by one. + * + * See https://orm.drizzle.team/kit-docs/config-reference#breakpoints + * + * --- + * + * `tablesFilters` - param lets you filter tables with glob syntax for db push command. + * It’s useful when you have only one database avaialable for several separate projects with separate sql schemas. + * + * How to define multi-project tables with Drizzle ORM — see https://orm.drizzle.team/docs/goodies#multi-project-schema + * + * See https://orm.drizzle.team/kit-docs/config-reference#tablesfilters + * + * --- + * + * `schemaFilter` - parameter allows you to define which schema in PostgreSQL should be used for either introspect or push commands. + * This parameter accepts a single schema as a string or an array of schemas as strings. + * No glob pattern is supported here. By default, drizzle will use the public schema for both commands, + * but you can add any schema you need. + * + * For example, having schemaFilter: ["my_schema"] will only look for tables in both the database and + * drizzle schema that are a part of the my_schema schema. + * + * See https://orm.drizzle.team/kit-docs/config-reference#schemafilter + * + * --- + * + * `verbose` - command is used for drizzle-kit push commands and prints all statements that will be executed. + * + * > Note: This command will only print the statements that should be executed. + * To approve them before applying, please refer to the `strict` command. + * + * See https://orm.drizzle.team/kit-docs/config-reference#verbose + * + * --- + * + * `strict` - command is used for drizzle-kit push commands and will always ask for your confirmation, + * either to execute all statements needed to sync your schema with the database or not. + * + * See https://orm.drizzle.team/kit-docs/config-reference#strict + */ +export function defineConfig(config: Config) { + return config; +} diff --git a/drizzle-kit/src/introspect-mysql.ts b/drizzle-kit/src/introspect-mysql.ts new file mode 100644 index 000000000..f206935a3 --- /dev/null +++ b/drizzle-kit/src/introspect-mysql.ts @@ -0,0 +1,879 @@ +/* eslint-disable @typescript-eslint/no-unsafe-argument */ +import './@types/utils'; +import type { Casing } from './cli/validations/common'; +import { + Column, + ForeignKey, + Index, + MySqlSchema, + MySqlSchemaInternal, + PrimaryKey, + UniqueConstraint, +} from './serializer/mysqlSchema'; +import { indexName } from './serializer/mysqlSerializer'; + +// time precision to fsp +// {mode: "string"} for timestamp by default + +const mysqlImportsList = new Set([ + 'mysqlTable', + 'mysqlEnum', + 'bigint', + 'binary', + 'boolean', + 'char', + 'date', + 'datetime', + 'decimal', + 'double', + 'float', + 'int', + 'json', + 'mediumint', + 'real', + 'serial', + 'smallint', + 'text', + 'tinytext', + 'mediumtext', + 'longtext', + 'time', + 'timestamp', + 'tinyint', + 'varbinary', + 'varchar', + 'year', + 'enum', +]); + +const objToStatement = (json: any) => { + json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); + + const keys = Object.keys(json); + if (keys.length === 0) return; + + let statement = '{ '; + statement += keys.map((it) => `"${it}": "${json[it]}"`).join(', '); + statement += ' }'; + return statement; +}; + +const objToStatement2 = (json: any) => { + json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); + + const keys = Object.keys(json); + if (keys.length === 0) return; + + let statement = '{ '; + statement += keys.map((it) => `${it}: "${json[it]}"`).join(', '); // no "" for keys + statement += ' }'; + return statement; +}; + +const timeConfig = (json: any) => { + json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); + + const keys = Object.keys(json); + if (keys.length === 0) return; + + let statement = '{ '; + statement += keys.map((it) => `${it}: ${json[it]}`).join(', '); + statement += ' }'; + return statement; +}; + +const binaryConfig = (json: any) => { + json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); + + const keys = Object.keys(json); + if (keys.length === 0) return; + + let statement = '{ '; + statement += keys.map((it) => `${it}: ${json[it]}`).join(', '); + statement += ' }'; + return statement; +}; + +const importsPatch = { + 'double precision': 'doublePrecision', + 'timestamp without time zone': 'timestamp', +} as Record; + +const relations = new Set(); + +const escapeColumnKey = (value: string) => { + if (/^(?![a-zA-Z_$][a-zA-Z0-9_$]*$).+$/.test(value)) { + return `"${value}"`; + } + return value; +}; + +const prepareCasing = (casing?: Casing) => (value: string) => { + if (casing === 'preserve') { + return escapeColumnKey(value); + } + if (casing === 'camel') { + return escapeColumnKey(value.camelCase()); + } + + return escapeColumnKey(value); +}; + +export const schemaToTypeScript = ( + schema: MySqlSchemaInternal, + casing: Casing, +) => { + const withCasing = prepareCasing(casing); + // collectFKs + Object.values(schema.tables).forEach((table) => { + Object.values(table.foreignKeys).forEach((fk) => { + const relation = `${fk.tableFrom}-${fk.tableTo}`; + relations.add(relation); + }); + }); + + const imports = Object.values(schema.tables).reduce( + (res, it) => { + const idxImports = Object.values(it.indexes).map((idx) => idx.isUnique ? 'uniqueIndex' : 'index'); + const fkImpots = Object.values(it.foreignKeys).map((it) => 'foreignKey'); + const pkImports = Object.values(it.compositePrimaryKeys).map( + (it) => 'primaryKey', + ); + const uniqueImports = Object.values(it.uniqueConstraints).map( + (it) => 'unique', + ); + + res.mysql.push(...idxImports); + res.mysql.push(...fkImpots); + res.mysql.push(...pkImports); + res.mysql.push(...uniqueImports); + + const columnImports = Object.values(it.columns) + .map((col) => { + let patched = importsPatch[col.type] ?? col.type; + patched = patched.startsWith('varchar(') ? 'varchar' : patched; + patched = patched.startsWith('char(') ? 'char' : patched; + patched = patched.startsWith('binary(') ? 'binary' : patched; + patched = patched.startsWith('decimal(') ? 'decimal' : patched; + patched = patched.startsWith('smallint(') ? 'smallint' : patched; + patched = patched.startsWith('enum(') ? 'mysqlEnum' : patched; + patched = patched.startsWith('datetime(') ? 'datetime' : patched; + patched = patched.startsWith('varbinary(') ? 'varbinary' : patched; + patched = patched.startsWith('int(') ? 'int' : patched; + patched = patched.startsWith('double(') ? 'double' : patched; + return patched; + }) + .filter((type) => { + return mysqlImportsList.has(type); + }); + + res.mysql.push(...columnImports); + return res; + }, + { mysql: [] as string[] }, + ); + + const tableStatements = Object.values(schema.tables).map((table) => { + const func = 'mysqlTable'; + let statement = ''; + if (imports.mysql.includes(withCasing(table.name))) { + statement = `// Table name is in conflict with ${ + withCasing( + table.name, + ) + } import.\n// Please change to any other name, that is not in imports list\n`; + } + statement += `export const ${withCasing(table.name)} = ${func}("${table.name}", {\n`; + statement += createTableColumns( + Object.values(table.columns), + Object.values(table.foreignKeys), + withCasing, + table.name, + schema, + ); + statement += '}'; + + // more than 2 fields or self reference or cyclic + const filteredFKs = Object.values(table.foreignKeys).filter((it) => { + return it.columnsFrom.length > 1 || isSelf(it); + }); + + if ( + Object.keys(table.indexes).length > 0 + || filteredFKs.length > 0 + || Object.keys(table.compositePrimaryKeys).length > 0 + || Object.keys(table.uniqueConstraints).length > 0 + ) { + statement += ',\n'; + statement += '(table) => {\n'; + statement += '\treturn {\n'; + statement += createTableIndexes( + table.name, + Object.values(table.indexes), + withCasing, + ); + statement += createTableFKs(Object.values(filteredFKs), withCasing); + statement += createTablePKs( + Object.values(table.compositePrimaryKeys), + withCasing, + ); + statement += createTableUniques( + Object.values(table.uniqueConstraints), + withCasing, + ); + statement += '\t}\n'; + statement += '}'; + } + + statement += ');'; + return statement; + }); + + const uniqueMySqlImports = [ + 'mysqlTable', + 'mysqlSchema', + 'AnyMySqlColumn', + ...new Set(imports.mysql), + ]; + const importsTs = `import { ${ + uniqueMySqlImports.join( + ', ', + ) + } } from "drizzle-orm/mysql-core"\nimport { sql } from "drizzle-orm"\n\n`; + + let decalrations = ''; + decalrations += tableStatements.join('\n\n'); + + const file = importsTs + decalrations; + + const schemaEntry = ` + { + ${ + Object.values(schema.tables) + .map((it) => withCasing(it.name)) + .join(',') + } + } + `; + + return { + file, // backward compatible, print to file + imports: importsTs, + decalrations, + schemaEntry, + }; +}; + +const isCyclic = (fk: ForeignKey) => { + const key = `${fk.tableFrom}-${fk.tableTo}`; + const reverse = `${fk.tableTo}-${fk.tableFrom}`; + return relations.has(key) && relations.has(reverse); +}; + +const isSelf = (fk: ForeignKey) => { + return fk.tableFrom === fk.tableTo; +}; + +const mapColumnDefault = (defaultValue: any, isExpression?: boolean) => { + if (isExpression) { + return `sql\`${defaultValue}\``; + } + + return defaultValue; +}; + +const mapColumnDefaultForJson = (defaultValue: any) => { + if ( + typeof defaultValue === 'string' + && defaultValue.startsWith("('") + && defaultValue.endsWith("')") + ) { + return defaultValue.substring(2, defaultValue.length - 2); + } + + return defaultValue; +}; + +const column = ( + type: string, + name: string, + casing: (value: string) => string, + defaultValue?: any, + autoincrement?: boolean, + onUpdate?: boolean, + isExpression?: boolean, +) => { + let lowered = type; + if (!type.startsWith('enum(')) { + lowered = type.toLowerCase(); + } + + if (lowered === 'serial') { + return `${casing(name)}: serial("${name}")`; + } + + if (lowered.startsWith('int')) { + const isUnsigned = lowered.startsWith('int unsigned'); + let out = `${casing(name)}: int("${name}"${isUnsigned ? ', { unsigned: true }' : ''})`; + out += autoincrement ? `.autoincrement()` : ''; + out += typeof defaultValue !== 'undefined' + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('tinyint')) { + const isUnsigned = lowered.startsWith('tinyint unsigned'); + // let out = `${name.camelCase()}: tinyint("${name}")`; + let out: string = `${casing(name)}: tinyint("${name}"${isUnsigned ? ', { unsigned: true }' : ''})`; + out += autoincrement ? `.autoincrement()` : ''; + out += typeof defaultValue !== 'undefined' + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('smallint')) { + const isUnsigned = lowered.startsWith('smallint unsigned'); + let out = `${casing(name)}: smallint("${name}"${isUnsigned ? ', { unsigned: true }' : ''})`; + out += autoincrement ? `.autoincrement()` : ''; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('mediumint')) { + const isUnsigned = lowered.startsWith('mediumint unsigned'); + let out = `${casing(name)}: mediumint("${name}"${isUnsigned ? ', { unsigned: true }' : ''})`; + out += autoincrement ? `.autoincrement()` : ''; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('bigint')) { + const isUnsigned = lowered.startsWith('bigint unsigned'); + let out = `${casing(name)}: bigint("${name}", { mode: "number"${isUnsigned ? ', unsigned: true' : ''} })`; + out += autoincrement ? `.autoincrement()` : ''; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered === 'boolean') { + let out = `${casing(name)}: boolean("${name}")`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('double')) { + let params: + | { precision: string | undefined; scale: string | undefined } + | undefined; + + if (lowered.length > 6) { + const [precision, scale] = lowered + .slice(7, lowered.length - 1) + .split(','); + params = { precision, scale }; + } + + let out = params + ? `${casing(name)}: double("${name}", ${timeConfig(params)})` + : `${casing(name)}: double("${name}")`; + + // let out = `${name.camelCase()}: double("${name}")`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered === 'float') { + let out = `${casing(name)}: float("${name}")`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered === 'real') { + let out = `${casing(name)}: real("${name}")`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('timestamp')) { + const keyLength = 'timestamp'.length + 1; + let fsp = lowered.length > keyLength + ? Number(lowered.substring(keyLength, lowered.length - 1)) + : null; + fsp = fsp ? fsp : null; + + const params = timeConfig({ fsp, mode: "'string'" }); + + let out = params + ? `${casing(name)}: timestamp("${name}", ${params})` + : `${casing(name)}: timestamp("${name}")`; + + // mysql has only CURRENT_TIMESTAMP, as I found from docs. But will leave now() for just a case + defaultValue = defaultValue === 'now()' || defaultValue === '(CURRENT_TIMESTAMP)' + ? '.defaultNow()' + : defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + + out += defaultValue; + + let onUpdateNow = onUpdate ? '.onUpdateNow()' : ''; + out += onUpdateNow; + + return out; + } + + if (lowered.startsWith('time')) { + const keyLength = 'time'.length + 1; + let fsp = lowered.length > keyLength + ? Number(lowered.substring(keyLength, lowered.length - 1)) + : null; + fsp = fsp ? fsp : null; + + const params = timeConfig({ fsp }); + + let out = params + ? `${casing(name)}: time("${name}", ${params})` + : `${casing(name)}: time("${name}")`; + + defaultValue = defaultValue === 'now()' + ? '.defaultNow()' + : defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + + out += defaultValue; + return out; + } + + if (lowered === 'date') { + let out = `// you can use { mode: 'date' }, if you want to have Date as type for this column\n\t${ + casing( + name, + ) + }: date("${name}", { mode: 'string' })`; + + defaultValue = defaultValue === 'now()' + ? '.defaultNow()' + : defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + + out += defaultValue; + return out; + } + + // in mysql text can't have default value. Will leave it in case smth ;) + if (lowered === 'text') { + let out = `${casing(name)}: text("${name}")`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + // in mysql text can't have default value. Will leave it in case smth ;) + if (lowered === 'tinytext') { + let out = `${casing(name)}: tinytext("${name}")`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + // in mysql text can't have default value. Will leave it in case smth ;) + if (lowered === 'mediumtext') { + let out = `${casing(name)}: mediumtext("${name}")`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + // in mysql text can't have default value. Will leave it in case smth ;) + if (lowered === 'longtext') { + let out = `${casing(name)}: longtext("${name}")`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered === 'year') { + let out = `${casing(name)}: year("${name}")`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + // in mysql json can't have default value. Will leave it in case smth ;) + if (lowered === 'json') { + let out = `${casing(name)}: json("${name}")`; + + out += defaultValue + ? `.default(${mapColumnDefaultForJson(defaultValue)})` + : ''; + + return out; + } + + if (lowered.startsWith('varchar')) { + let out: string = `${ + casing( + name, + ) + }: varchar("${name}", { length: ${ + lowered.substring( + 'varchar'.length + 1, + lowered.length - 1, + ) + } })`; + + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('char')) { + let out: string = `${ + casing( + name, + ) + }: char("${name}", { length: ${ + lowered.substring( + 'char'.length + 1, + lowered.length - 1, + ) + } })`; + + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('datetime')) { + let out = `// you can use { mode: 'date' }, if you want to have Date as type for this column\n\t`; + + const fsp = lowered.startsWith('datetime(') + ? lowered.substring('datetime'.length + 1, lowered.length - 1) + : undefined; + + out = fsp + ? `${ + casing( + name, + ) + }: datetime("${name}", { mode: 'string', fsp: ${ + lowered.substring( + 'datetime'.length + 1, + lowered.length - 1, + ) + } })` + : `${casing(name)}: datetime("${name}", { mode: 'string'})`; + + defaultValue = defaultValue === 'now()' + ? '.defaultNow()' + : defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + + out += defaultValue; + return out; + } + + if (lowered.startsWith('decimal')) { + let params: + | { precision: string | undefined; scale: string | undefined } + | undefined; + + if (lowered.length > 7) { + const [precision, scale] = lowered + .slice(8, lowered.length - 1) + .split(','); + params = { precision, scale }; + } + + let out = params + ? `${casing(name)}: decimal("${name}", ${timeConfig(params)})` + : `${casing(name)}: decimal("${name}")`; + + defaultValue = typeof defaultValue !== 'undefined' + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + + out += defaultValue; + return out; + } + + if (lowered.startsWith('binary')) { + const keyLength = 'binary'.length + 1; + let length = lowered.length > keyLength + ? Number(lowered.substring(keyLength, lowered.length - 1)) + : null; + length = length ? length : null; + + const params = binaryConfig({ length }); + + let out = params + ? `${casing(name)}: binary("${name}", ${params})` + : `${casing(name)}: binary("${name}")`; + + defaultValue = defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + + out += defaultValue; + return out; + } + + if (lowered.startsWith('enum')) { + const values = lowered.substring('enum'.length + 1, lowered.length - 1); + let out = `${casing(name)}: mysqlEnum("${name}", [${values}])`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('varbinary')) { + const keyLength = 'varbinary'.length + 1; + let length = lowered.length > keyLength + ? Number(lowered.substring(keyLength, lowered.length - 1)) + : null; + length = length ? length : null; + + const params = binaryConfig({ length }); + + let out = params + ? `${casing(name)}: varbinary("${name}", ${params})` + : `${casing(name)}: varbinary("${name}")`; + + defaultValue = defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + + out += defaultValue; + return out; + } + + console.log('uknown', type); + return `// Warning: Can't parse ${type} from database\n\t// ${type}Type: ${type}("${name}")`; +}; + +const createTableColumns = ( + columns: Column[], + fks: ForeignKey[], + casing: (val: string) => string, + tableName: string, + schema: MySqlSchemaInternal, +): string => { + let statement = ''; + + // no self refs and no cyclic + const oneColumnsFKs = Object.values(fks) + .filter((it) => { + return !isSelf(it); + }) + .filter((it) => it.columnsFrom.length === 1); + + const fkByColumnName = oneColumnsFKs.reduce((res, it) => { + const arr = res[it.columnsFrom[0]] || []; + arr.push(it); + res[it.columnsFrom[0]] = arr; + return res; + }, {} as Record); + + columns.forEach((it) => { + statement += '\t'; + statement += column( + it.type, + it.name, + casing, + it.default, + it.autoincrement, + it.onUpdate, + schema.internal?.tables![tableName]?.columns[it.name] + ?.isDefaultAnExpression ?? false, + ); + statement += it.primaryKey ? '.primaryKey()' : ''; + statement += it.notNull ? '.notNull()' : ''; + + statement += it.generated + ? `.generatedAlwaysAs(sql\`${ + it.generated.as.replace( + /`/g, + '\\`', + ) + }\`, { mode: "${it.generated.type}" })` + : ''; + + const fks = fkByColumnName[it.name]; + if (fks) { + const fksStatement = fks + .map((it) => { + const onDelete = it.onDelete && it.onDelete !== 'no action' ? it.onDelete : null; + const onUpdate = it.onUpdate && it.onUpdate !== 'no action' ? it.onUpdate : null; + const params = { onDelete, onUpdate }; + + const typeSuffix = isCyclic(it) ? ': AnyMySqlColumn' : ''; + + const paramsStr = objToStatement2(params); + if (paramsStr) { + return `.references(()${typeSuffix} => ${ + casing( + it.tableTo, + ) + }.${casing(it.columnsTo[0])}, ${paramsStr} )`; + } + return `.references(()${typeSuffix} => ${casing(it.tableTo)}.${ + casing( + it.columnsTo[0], + ) + })`; + }) + .join(''); + statement += fksStatement; + } + + statement += ',\n'; + }); + + return statement; +}; + +const createTableIndexes = ( + tableName: string, + idxs: Index[], + casing: (value: string) => string, +): string => { + let statement = ''; + + idxs.forEach((it) => { + let idxKey = it.name.startsWith(tableName) && it.name !== tableName + ? it.name.slice(tableName.length + 1) + : it.name; + idxKey = idxKey.endsWith('_index') + ? idxKey.slice(0, -'_index'.length) + '_idx' + : idxKey; + + idxKey = casing(idxKey); + + const indexGeneratedName = indexName(tableName, it.columns); + const escapedIndexName = indexGeneratedName === it.name ? '' : `"${it.name}"`; + + statement += `\t\t${idxKey}: `; + statement += it.isUnique ? 'uniqueIndex(' : 'index('; + statement += `${escapedIndexName})`; + statement += `.on(${ + it.columns + .map((it) => `table.${casing(it)}`) + .join(', ') + }),`; + statement += `\n`; + }); + + return statement; +}; + +const createTableUniques = ( + unqs: UniqueConstraint[], + casing: (value: string) => string, +): string => { + let statement = ''; + + unqs.forEach((it) => { + const idxKey = casing(it.name); + + statement += `\t\t${idxKey}: `; + statement += 'unique('; + statement += `"${it.name}")`; + statement += `.on(${ + it.columns + .map((it) => `table.${casing(it)}`) + .join(', ') + }),`; + statement += `\n`; + }); + + return statement; +}; + +const createTablePKs = ( + pks: PrimaryKey[], + casing: (value: string) => string, +): string => { + let statement = ''; + + pks.forEach((it) => { + let idxKey = casing(it.name); + + statement += `\t\t${idxKey}: `; + statement += 'primaryKey({ columns: ['; + statement += `${ + it.columns + .map((c) => { + return `table.${casing(c)}`; + }) + .join(', ') + }]${it.name ? `, name: "${it.name}"` : ''}}`; + statement += '),'; + statement += `\n`; + }); + + return statement; +}; + +const createTableFKs = ( + fks: ForeignKey[], + casing: (value: string) => string, +): string => { + let statement = ''; + + fks.forEach((it) => { + const isSelf = it.tableTo === it.tableFrom; + const tableTo = isSelf ? 'table' : `${casing(it.tableTo)}`; + statement += `\t\t${casing(it.name)}: foreignKey({\n`; + statement += `\t\t\tcolumns: [${ + it.columnsFrom + .map((i) => `table.${casing(i)}`) + .join(', ') + }],\n`; + statement += `\t\t\tforeignColumns: [${ + it.columnsTo + .map((i) => `${tableTo}.${casing(i)}`) + .join(', ') + }],\n`; + statement += `\t\t\tname: "${it.name}"\n`; + statement += `\t\t})`; + + statement += it.onUpdate && it.onUpdate !== 'no action' + ? `.onUpdate("${it.onUpdate}")` + : ''; + + statement += it.onDelete && it.onDelete !== 'no action' + ? `.onDelete("${it.onDelete}")` + : ''; + + statement += `,\n`; + }); + + return statement; +}; diff --git a/drizzle-kit/src/introspect-pg.ts b/drizzle-kit/src/introspect-pg.ts new file mode 100644 index 000000000..b7a52b735 --- /dev/null +++ b/drizzle-kit/src/introspect-pg.ts @@ -0,0 +1,1314 @@ +import { getTableName, is } from 'drizzle-orm'; +import { AnyPgTable } from 'drizzle-orm/pg-core'; +import { + createTableRelationsHelpers, + extractTablesRelationalConfig, + Many, + One, + Relation, + Relations, +} from 'drizzle-orm/relations'; +import { plural, singular } from 'pluralize'; +import './@types/utils'; +import { Casing } from './cli/validations/common'; +import { vectorOps } from './extensions/vector'; +import { assertUnreachable } from './global'; +import { + Column, + ForeignKey, + Index, + PgKitInternals, + PgSchemaInternal, + PrimaryKey, + UniqueConstraint, +} from './serializer/pgSchema'; +import { indexName } from './serializer/pgSerializer'; + +const pgImportsList = new Set([ + 'pgTable', + 'pgEnum', + 'smallint', + 'integer', + 'bigint', + 'boolean', + 'text', + 'varchar', + 'char', + 'serial', + 'smallserial', + 'bigserial', + 'decimal', + 'numeric', + 'real', + 'json', + 'jsonb', + 'time', + 'timestamp', + 'date', + 'interval', + 'cidr', + 'inet', + 'macaddr', + 'macaddr8', + 'bigint', + 'doublePrecision', + 'uuid', + 'vector', + 'point', + 'line', + 'geometry', +]); + +const objToStatement2 = (json: { [s: string]: unknown }) => { + json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); + + const keys = Object.keys(json); + if (keys.length === 0) return; + + let statement = '{ '; + statement += keys.map((it) => `${it}: "${json[it]}"`).join(', '); // no "" for keys + statement += ' }'; + return statement; +}; + +const timeConfig = (json: { [s: string]: unknown }) => { + json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); + + const keys = Object.keys(json); + if (keys.length === 0) return; + + let statement = '{ '; + statement += keys.map((it) => `${it}: ${json[it]}`).join(', '); + statement += ' }'; + return statement; +}; + +const possibleIntervals = [ + 'year', + 'month', + 'day', + 'hour', + 'minute', + 'second', + 'year to month', + 'day to hour', + 'day to minute', + 'day to second', + 'hour to minute', + 'hour to second', + 'minute to second', +]; + +const intervalStrToObj = (str: string) => { + if (str.startsWith('interval(')) { + return { + precision: Number(str.substring('interval('.length, str.length - 1)), + }; + } + const splitted = str.split(' '); + if (splitted.length === 1) { + return {}; + } + const rest = splitted.slice(1, splitted.length).join(' '); + if (possibleIntervals.includes(rest)) { + return { fields: `"${rest}"` }; + } + + for (const s of possibleIntervals) { + if (rest.startsWith(`${s}(`)) { + return { + fields: `"${s}"`, + precision: Number(rest.substring(s.length + 1, rest.length - 1)), + }; + } + } + return {}; +}; + +const intervalConfig = (str: string) => { + const json = intervalStrToObj(str); + // json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); + + const keys = Object.keys(json); + if (keys.length === 0) return; + + let statement = '{ '; + statement += keys + .map((it: keyof typeof json) => `${it}: ${json[it]}`) + .join(', '); + statement += ' }'; + return statement; +}; + +const mapColumnDefault = (defaultValue: any, isExpression?: boolean) => { + if (isExpression) { + return `sql\`${defaultValue}\``; + } + + return defaultValue; +}; + +const importsPatch = { + 'double precision': 'doublePrecision', + 'timestamp without time zone': 'timestamp', + 'timestamp with time zone': 'timestamp', + 'time without time zone': 'time', + 'time with time zone': 'time', +} as Record; + +const relations = new Set(); + +const escapeColumnKey = (value: string) => { + if (/^(?![a-zA-Z_$][a-zA-Z0-9_$]*$).+$/.test(value)) { + return `"${value}"`; + } + return value; +}; + +const withCasing = (value: string, casing: Casing) => { + if (casing === 'preserve') { + return escapeColumnKey(value); + } + if (casing === 'camel') { + return escapeColumnKey(value.camelCase()); + } + + assertUnreachable(casing); +}; + +export const relationsToTypeScriptForStudio = ( + schema: Record>>, + relations: Record>>>, +) => { + const relationalSchema: Record = { + ...Object.fromEntries( + Object.entries(schema) + .map(([key, val]) => { + // have unique keys across schemas + const mappedTableEntries = Object.entries(val).map((tableEntry) => { + return [`__${key}__.${tableEntry[0]}`, tableEntry[1]]; + }); + + return mappedTableEntries; + }) + .flat(), + ), + ...relations, + }; + + const relationsConfig = extractTablesRelationalConfig( + relationalSchema, + createTableRelationsHelpers, + ); + + let result = ''; + + function findColumnKey(table: AnyPgTable, columnName: string) { + for (const tableEntry of Object.entries(table)) { + const key = tableEntry[0]; + const value = tableEntry[1]; + + if (value.name === columnName) { + return key; + } + } + } + + Object.values(relationsConfig.tables).forEach((table) => { + const tableName = table.tsName.split('.')[1]; + const relations = table.relations; + let hasRelations = false; + let relationsObjAsStr = ''; + let hasOne = false; + let hasMany = false; + + Object.values(relations).forEach((relation) => { + hasRelations = true; + + if (is(relation, Many)) { + hasMany = true; + relationsObjAsStr += `\t\t${relation.fieldName}: many(${ + relationsConfig.tableNamesMap[relation.referencedTableName].split( + '.', + )[1] + }${ + typeof relation.relationName !== 'undefined' + ? `, { relationName: "${relation.relationName}"}` + : '' + }),`; + } + + if (is(relation, One)) { + hasOne = true; + relationsObjAsStr += `\t\t${relation.fieldName}: one(${ + relationsConfig.tableNamesMap[relation.referencedTableName].split( + '.', + )[1] + }, { fields: [${ + relation.config?.fields.map( + (c) => + `${ + relationsConfig.tableNamesMap[ + getTableName(relation.sourceTable) + ].split('.')[1] + }.${findColumnKey(relation.sourceTable, c.name)}`, + ) + }], references: [${ + relation.config?.references.map( + (c) => + `${ + relationsConfig.tableNamesMap[ + getTableName(relation.referencedTable) + ].split('.')[1] + }.${findColumnKey(relation.referencedTable, c.name)}`, + ) + }]${ + typeof relation.relationName !== 'undefined' + ? `, relationName: "${relation.relationName}"` + : '' + }}),`; + } + }); + + if (hasRelations) { + result += `export const ${tableName}Relation = relations(${tableName}, ({${hasOne ? 'one' : ''}${ + hasOne && hasMany ? ', ' : '' + }${hasMany ? 'many' : ''}}) => ({ + ${relationsObjAsStr} + }));\n`; + } + }); + + return result; +}; + +function generateIdentityParams(identity: Column['identity']) { + let paramsObj = `{ name: "${identity!.name}"`; + if (identity?.startWith) { + paramsObj += `, startWith: ${identity.startWith}`; + } + if (identity?.increment) { + paramsObj += `, increment: ${identity.increment}`; + } + if (identity?.minValue) { + paramsObj += `, minValue: ${identity.minValue}`; + } + if (identity?.maxValue) { + paramsObj += `, maxValue: ${identity.maxValue}`; + } + if (identity?.cache) { + paramsObj += `, cache: ${identity.cache}`; + } + if (identity?.cycle) { + paramsObj += `, cycle: true`; + } + paramsObj += ' }'; + if (identity?.type === 'always') { + return `.generatedAlwaysAsIdentity(${paramsObj})`; + } + return `.generatedByDefaultAsIdentity(${paramsObj})`; +} + +export const paramNameFor = (name: string, schema?: string) => { + const schemaSuffix = schema && schema !== 'public' ? `In${schema.capitalise()}` : ''; + return `${name}${schemaSuffix}`; +}; + +export const schemaToTypeScript = ( + schema: PgSchemaInternal, + casing: Casing, +) => { + // collectFKs + Object.values(schema.tables).forEach((table) => { + Object.values(table.foreignKeys).forEach((fk) => { + const relation = `${fk.tableFrom}-${fk.tableTo}`; + relations.add(relation); + }); + }); + + const schemas = Object.fromEntries( + Object.entries(schema.schemas).map((it) => { + return [it[0], withCasing(it[1], casing)]; + }), + ); + + const enumTypes = Object.values(schema.enums).reduce( + (acc, cur) => { + acc.add(`${cur.schema}.${cur.name}`); + return acc; + }, + new Set(), + ); + + const imports = Object.values(schema.tables).reduce( + (res, it) => { + const idxImports = Object.values(it.indexes).map((idx) => idx.isUnique ? 'uniqueIndex' : 'index'); + const fkImpots = Object.values(it.foreignKeys).map((it) => 'foreignKey'); + if ( + Object.values(it.foreignKeys).some((it) => isCyclic(it) && !isSelf(it)) + ) { + res.pg.push('type AnyPgColumn'); + } + const pkImports = Object.values(it.compositePrimaryKeys).map( + (it) => 'primaryKey', + ); + const uniqueImports = Object.values(it.uniqueConstraints).map( + (it) => 'unique', + ); + + if (it.schema && it.schema !== 'public' && it.schema !== '') { + res.pg.push('pgSchema'); + } + + res.pg.push(...idxImports); + res.pg.push(...fkImpots); + res.pg.push(...pkImports); + res.pg.push(...uniqueImports); + + const columnImports = Object.values(it.columns) + .map((col) => { + let patched: string = (importsPatch[col.type] || col.type).replace('[]', ''); + patched = patched === 'double precision' ? 'doublePrecision' : patched; + patched = patched.startsWith('varchar(') ? 'varchar' : patched; + patched = patched.startsWith('char(') ? 'char' : patched; + patched = patched.startsWith('numeric(') ? 'numeric' : patched; + patched = patched.startsWith('time(') ? 'time' : patched; + patched = patched.startsWith('timestamp(') ? 'timestamp' : patched; + patched = patched.startsWith('vector(') ? 'vector' : patched; + patched = patched.startsWith('geometry(') ? 'geometry' : patched; + return patched; + }) + .filter((type) => { + return pgImportsList.has(type); + }); + + res.pg.push(...columnImports); + return res; + }, + { pg: [] as string[] }, + ); + + Object.values(schema.sequences).forEach((it) => { + if (it.schema && it.schema !== 'public' && it.schema !== '') { + imports.pg.push('pgSchema'); + } else if (it.schema === 'public') { + imports.pg.push('pgSequence'); + } + }); + + Object.values(schema.enums).forEach((it) => { + if (it.schema && it.schema !== 'public' && it.schema !== '') { + imports.pg.push('pgSchema'); + } else if (it.schema === 'public') { + imports.pg.push('pgEnum'); + } + }); + + const enumStatements = Object.values(schema.enums) + .map((it) => { + const enumSchema = schemas[it.schema]; + // const func = schema || schema === "public" ? "pgTable" : schema; + const paramName = paramNameFor(it.name, enumSchema); + + const func = enumSchema ? `${enumSchema}.enum` : 'pgEnum'; + + const values = Object.values(it.values) + .map((it) => `'${it}'`) + .join(', '); + return `export const ${withCasing(paramName, casing)} = ${func}("${it.name}", [${values}])\n`; + }) + .join('') + .concat('\n'); + + const sequencesStatements = Object.values(schema.sequences) + .map((it) => { + const seqSchema = schemas[it.schema]; + const paramName = paramNameFor(it.name, seqSchema); + + const func = seqSchema ? `${seqSchema}.sequence` : 'pgSequence'; + + let params = ''; + + if (it.startWith) { + params += `, startWith: "${it.startWith}"`; + } + if (it.increment) { + params += `, increment: "${it.increment}"`; + } + if (it.minValue) { + params += `, minValue: "${it.minValue}"`; + } + if (it.maxValue) { + params += `, maxValue: "${it.maxValue}"`; + } + if (it.cache) { + params += `, cache: "${it.cache}"`; + } + if (it.cycle) { + params += `, cycle: true`; + } else { + params += `, cycle: false`; + } + + return `export const ${withCasing(paramName, casing)} = ${func}("${it.name}"${ + params ? `, { ${params.trimChar(',')} }` : '' + })\n`; + }) + .join('') + .concat('\n'); + + const schemaStatements = Object.entries(schemas) + // .filter((it) => it[0] !== "public") + .map((it) => { + return `export const ${it[1]} = pgSchema("${it[0]}");\n`; + }) + .join(''); + + const tableStatements = Object.values(schema.tables).map((table) => { + const tableSchema = schemas[table.schema]; + const paramName = paramNameFor(table.name, tableSchema); + + const func = tableSchema ? `${tableSchema}.table` : 'pgTable'; + let statement = `export const ${withCasing(paramName, casing)} = ${func}("${table.name}", {\n`; + statement += createTableColumns( + table.name, + Object.values(table.columns), + Object.values(table.foreignKeys), + enumTypes, + schemas, + casing, + schema.internal, + ); + statement += '}'; + + // more than 2 fields or self reference or cyclic + // Andrii: I switched this one off until we will get custom names in .references() + // const filteredFKs = Object.values(table.foreignKeys).filter((it) => { + // return it.columnsFrom.length > 1 || isSelf(it); + // }); + + if ( + Object.keys(table.indexes).length > 0 + || Object.values(table.foreignKeys).length > 0 + || Object.keys(table.compositePrimaryKeys).length > 0 + || Object.keys(table.uniqueConstraints).length > 0 + ) { + statement += ',\n'; + statement += '(table) => {\n'; + statement += '\treturn {\n'; + statement += createTableIndexes( + table.name, + Object.values(table.indexes), + casing, + ); + statement += createTableFKs(Object.values(table.foreignKeys), schemas, casing); + statement += createTablePKs( + Object.values(table.compositePrimaryKeys), + casing, + ); + statement += createTableUniques( + Object.values(table.uniqueConstraints), + casing, + ); + statement += '\t}\n'; + statement += '}'; + } + + statement += ');'; + return statement; + }); + + const uniquePgImports = ['pgTable', ...new Set(imports.pg)]; + + const importsTs = `import { ${ + uniquePgImports.join( + ', ', + ) + } } from "drizzle-orm/pg-core" + import { sql } from "drizzle-orm"\n\n`; + + let decalrations = schemaStatements; + decalrations += enumStatements; + decalrations += sequencesStatements; + decalrations += '\n'; + decalrations += tableStatements.join('\n\n'); + + const file = importsTs + decalrations; + + // for drizzle studio query runner + const schemaEntry = ` + { + ${ + Object.values(schema.tables) + .map((it) => withCasing(it.name, casing)) + .join(',\n') + } + } + `; + + return { file, imports: importsTs, decalrations, schemaEntry }; +}; + +const isCyclic = (fk: ForeignKey) => { + const key = `${fk.tableFrom}-${fk.tableTo}`; + const reverse = `${fk.tableTo}-${fk.tableFrom}`; + return relations.has(key) && relations.has(reverse); +}; + +const isSelf = (fk: ForeignKey) => { + return fk.tableFrom === fk.tableTo; +}; + +const buildArrayDefault = (defaultValue: string, typeName: string): string => { + if (typeof defaultValue === 'string' && !(defaultValue.startsWith('{') || defaultValue.startsWith("'{"))) { + return `sql\`${defaultValue}\``; + } + defaultValue = defaultValue.substring(2, defaultValue.length - 2); + return `[${ + defaultValue + .split(/\s*,\s*/g) + .map((value) => { + // if (['integer', 'smallint', 'bigint', 'double precision', 'real'].includes(typeName)) { + // return value; + // } else if (typeName === 'interval') { + // return value.replaceAll('"', "'"); + // } else if (typeName === 'boolean') { + // return value === 't' ? 'true' : 'false'; + if (typeName === 'json' || typeName === 'jsonb') { + return value + .substring(1, value.length - 1) + .replaceAll('\\', ''); + } + return value; + // } + }) + .join(', ') + }]`; +}; + +const mapDefault = ( + tableName: string, + type: string, + name: string, + enumTypes: Set, + typeSchema: string, + defaultValue?: any, + internals?: PgKitInternals, +) => { + const isExpression = internals?.tables[tableName]?.columns[name]?.isDefaultAnExpression ?? false; + const isArray = internals?.tables[tableName]?.columns[name]?.isArray ?? false; + const lowered = type.toLowerCase().replace('[]', ''); + + if (isArray) { + return typeof defaultValue !== 'undefined' ? `.default(${buildArrayDefault(defaultValue, lowered)})` : ''; + } + + if (enumTypes.has(`${typeSchema}.${type.replace('[]', '')}`)) { + return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; + } + + if (lowered.startsWith('integer')) { + return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; + } + + if (lowered.startsWith('smallint')) { + return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; + } + + if (lowered.startsWith('bigint')) { + return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; + } + + if (lowered.startsWith('boolean')) { + return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; + } + + if (lowered.startsWith('double precision')) { + return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; + } + + if (lowered.startsWith('real')) { + return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; + } + + if (lowered.startsWith('uuid')) { + return defaultValue === 'gen_random_uuid()' + ? '.defaultRandom()' + : defaultValue + ? `.default(sql\`${defaultValue}\`)` + : ''; + } + + if (lowered.startsWith('numeric')) { + defaultValue = defaultValue + ? defaultValue.startsWith(`'`) && defaultValue.endsWith(`'`) + ? defaultValue.substring(1, defaultValue.length - 1) + : defaultValue + : undefined; + return defaultValue ? `.default('${mapColumnDefault(defaultValue, isExpression)}')` : ''; + } + + if (lowered.startsWith('timestamp')) { + return defaultValue === 'now()' + ? '.defaultNow()' + : defaultValue === 'CURRENT_TIMESTAMP' + ? '.default(sql\`CURRENT_TIMESTAMP\`)' + : defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + } + + if (lowered.startsWith('time')) { + return defaultValue === 'now()' + ? '.defaultNow()' + : defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + } + + if (lowered.startsWith('interval')) { + return defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; + } + + if (lowered === 'date') { + return defaultValue === 'now()' + ? '.defaultNow()' + : defaultValue === 'CURRENT_DATE' + ? `.default(sql\`${defaultValue}\`)` + : defaultValue + ? `.default(${defaultValue})` + : ''; + } + + if (lowered.startsWith('text')) { + return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; + } + + if (lowered.startsWith('jsonb')) { + const def = typeof defaultValue !== 'undefined' + ? defaultValue.replace(/::(.*?)(?, + typeSchema: string, + casing: Casing, + defaultValue?: any, + internals?: PgKitInternals, +) => { + const isExpression = internals?.tables[tableName]?.columns[name]?.isDefaultAnExpression ?? false; + const lowered = type.toLowerCase().replace('[]', ''); + + if (enumTypes.has(`${typeSchema}.${type.replace('[]', '')}`)) { + let out = `${withCasing(name, casing)}: ${ + withCasing( + paramNameFor(type.replace('[]', ''), typeSchema), + casing, + ) + }("${name}")`; + return out; + } + + if (lowered.startsWith('serial')) { + return `${withCasing(name, casing)}: serial("${name}")`; + } + + if (lowered.startsWith('smallserial')) { + return `${withCasing(name, casing)}: smallserial("${name}")`; + } + + if (lowered.startsWith('bigserial')) { + return `${ + withCasing( + name, + casing, + ) + }: bigserial("${name}", { mode: "bigint" })`; + } + + if (lowered.startsWith('integer')) { + let out = `${withCasing(name, casing)}: integer("${name}")`; + return out; + } + + if (lowered.startsWith('smallint')) { + let out = `${withCasing(name, casing)}: smallint("${name}")`; + return out; + } + + if (lowered.startsWith('bigint')) { + let out = `// You can use { mode: "bigint" } if numbers are exceeding js number limitations\n\t`; + out += `${withCasing(name, casing)}: bigint("${name}", { mode: "number" })`; + return out; + } + + if (lowered.startsWith('boolean')) { + let out = `${withCasing(name, casing)}: boolean("${name}")`; + return out; + } + + if (lowered.startsWith('double precision')) { + let out = `${withCasing(name, casing)}: doublePrecision("${name}")`; + return out; + } + + if (lowered.startsWith('real')) { + let out = `${withCasing(name, casing)}: real("${name}")`; + return out; + } + + if (lowered.startsWith('uuid')) { + let out = `${withCasing(name, casing)}: uuid("${name}")`; + + return out; + } + + if (lowered.startsWith('numeric')) { + let params: + | { precision: string | undefined; scale: string | undefined } + | undefined; + + if (lowered.length > 7) { + const [precision, scale] = lowered + .slice(8, lowered.length - 1) + .split(','); + params = { precision, scale }; + } + + let out = params + ? `${withCasing(name, casing)}: numeric("${name}", ${timeConfig(params)})` + : `${withCasing(name, casing)}: numeric("${name}")`; + + return out; + } + + if (lowered.startsWith('timestamp')) { + const withTimezone = lowered.includes('with time zone'); + // const split = lowered.split(" "); + let precision = lowered.startsWith('timestamp(') + ? Number( + lowered + .split(' ')[0] + .substring('timestamp('.length, lowered.split(' ')[0].length - 1), + ) + : null; + precision = precision ? precision : null; + + const params = timeConfig({ + precision, + withTimezone, + mode: "'string'", + }); + + let out = params + ? `${withCasing(name, casing)}: timestamp("${name}", ${params})` + : `${withCasing(name, casing)}: timestamp("${name}")`; + + return out; + } + + if (lowered.startsWith('time')) { + const withTimezone = lowered.includes('with time zone'); + + let precision = lowered.startsWith('time(') + ? Number( + lowered + .split(' ')[0] + .substring('time('.length, lowered.split(' ')[0].length - 1), + ) + : null; + precision = precision ? precision : null; + + const params = timeConfig({ precision, withTimezone }); + + let out = params + ? `${withCasing(name, casing)}: time("${name}", ${params})` + : `${withCasing(name, casing)}: time("${name}")`; + + return out; + } + + if (lowered.startsWith('interval')) { + // const withTimezone = lowered.includes("with time zone"); + // const split = lowered.split(" "); + // let precision = split.length >= 2 ? Number(split[1].substring(1, 2)) : null; + // precision = precision ? precision : null; + + const params = intervalConfig(lowered); + + let out = params + ? `${withCasing(name, casing)}: interval("${name}", ${params})` + : `${withCasing(name, casing)}: interval("${name}")`; + + return out; + } + + if (lowered === 'date') { + let out = `${withCasing(name, casing)}: date("${name}")`; + + return out; + } + + if (lowered.startsWith('text')) { + let out = `${withCasing(name, casing)}: text("${name}")`; + return out; + } + + if (lowered.startsWith('jsonb')) { + let out = `${withCasing(name, casing)}: jsonb("${name}")`; + return out; + } + + if (lowered.startsWith('json')) { + let out = `${withCasing(name, casing)}: json("${name}")`; + return out; + } + + if (lowered.startsWith('inet')) { + let out = `${withCasing(name, casing)}: inet("${name}")`; + return out; + } + + if (lowered.startsWith('cidr')) { + let out = `${withCasing(name, casing)}: cidr("${name}")`; + return out; + } + + if (lowered.startsWith('macaddr8')) { + let out = `${withCasing(name, casing)}: macaddr8("${name}")`; + return out; + } + + if (lowered.startsWith('macaddr')) { + let out = `${withCasing(name, casing)}: macaddr("${name}")`; + return out; + } + + if (lowered.startsWith('varchar')) { + let out: string; + if (lowered.length !== 7) { + out = `${ + withCasing( + name, + casing, + ) + }: varchar("${name}", { length: ${ + lowered.substring( + 8, + lowered.length - 1, + ) + } })`; + } else { + out = `${withCasing(name, casing)}: varchar("${name}")`; + } + + return out; + } + + if (lowered.startsWith('point')) { + let out: string = `${withCasing(name, casing)}: point("${name}")`; + return out; + } + + if (lowered.startsWith('line')) { + let out: string = `${withCasing(name, casing)}: point("${name}")`; + return out; + } + + if (lowered.startsWith('geometry')) { + let out: string = ''; + + let isGeoUnknown = false; + + if (lowered.length !== 8) { + const geometryOptions = lowered.slice(9, -1).split(','); + if (geometryOptions.length === 1 && geometryOptions[0] !== '') { + out = `${withCasing(name, casing)}: geometry("${name}", { type: "${geometryOptions[0]}" })`; + } else if (geometryOptions.length === 2) { + out = `${withCasing(name, casing)}: geometry("${name}", { type: "${geometryOptions[0]}", srid: ${ + geometryOptions[1] + } })`; + } else { + isGeoUnknown = true; + } + } else { + out = `${withCasing(name, casing)}: geometry("${name}")`; + } + + if (isGeoUnknown) { + let unknown = + `// TODO: failed to parse geometry type because found more than 2 options inside geometry function '${type}'\n// Introspect is currently supporting only type and srid options\n`; + unknown += `\t${withCasing(name, casing)}: unknown("${name}")`; + return unknown; + } + return out; + } + + if (lowered.startsWith('vector')) { + let out: string; + if (lowered.length !== 6) { + out = `${ + withCasing( + name, + casing, + ) + }: vector("${name}", { dimensions: ${ + lowered.substring( + 7, + lowered.length - 1, + ) + } })`; + } else { + out = `${withCasing(name, casing)}: vector("${name}")`; + } + + return out; + } + + if (lowered.startsWith('char')) { + let out: string; + if (lowered.length !== 4) { + out = `${ + withCasing( + name, + casing, + ) + }: char("${name}", { length: ${ + lowered.substring( + 5, + lowered.length - 1, + ) + } })`; + } else { + out = `${withCasing(name, casing)}: char("${name}")`; + } + + return out; + } + + let unknown = `// TODO: failed to parse database type '${type}'\n`; + unknown += `\t${withCasing(name, casing)}: unknown("${name}")`; + return unknown; +}; + +const dimensionsInArray = (size?: number): string => { + let res = ''; + if (typeof size === 'undefined') return res; + for (let i = 0; i < size; i++) { + res += '.array()'; + } + return res; +}; + +const createTableColumns = ( + tableName: string, + columns: Column[], + fks: ForeignKey[], + enumTypes: Set, + schemas: Record, + casing: Casing, + internals: PgKitInternals, +): string => { + let statement = ''; + + // no self refs and no cyclic + const oneColumnsFKs = Object.values(fks) + .filter((it) => { + return !isSelf(it); + }) + .filter((it) => it.columnsFrom.length === 1); + + const fkByColumnName = oneColumnsFKs.reduce((res, it) => { + const arr = res[it.columnsFrom[0]] || []; + arr.push(it); + res[it.columnsFrom[0]] = arr; + return res; + }, {} as Record); + + columns.forEach((it) => { + const columnStatement = column( + tableName, + it.type, + it.name, + enumTypes, + it.typeSchema ?? 'public', + casing, + it.default, + internals, + ); + statement += '\t'; + statement += columnStatement; + // Provide just this in column function + if (internals?.tables[tableName]?.columns[it.name]?.isArray) { + statement += dimensionsInArray( + internals?.tables[tableName]?.columns[it.name]?.dimensions, + ); + } + statement += mapDefault( + tableName, + it.type, + it.name, + enumTypes, + it.typeSchema ?? 'public', + it.default, + internals, + ); + statement += it.primaryKey ? '.primaryKey()' : ''; + statement += it.notNull && !it.identity ? '.notNull()' : ''; + + statement += it.identity ? generateIdentityParams(it.identity) : ''; + + statement += it.generated + ? `.generatedAlwaysAs(sql\`${it.generated.as}\`)` + : ''; + + // const fks = fkByColumnName[it.name]; + // Andrii: I switched it off until we will get a custom naem setting in references + // if (fks) { + // const fksStatement = fks + // .map((it) => { + // const onDelete = it.onDelete && it.onDelete !== 'no action' ? it.onDelete : null; + // const onUpdate = it.onUpdate && it.onUpdate !== 'no action' ? it.onUpdate : null; + // const params = { onDelete, onUpdate }; + + // const typeSuffix = isCyclic(it) ? ': AnyPgColumn' : ''; + + // const paramsStr = objToStatement2(params); + // const tableSchema = schemas[it.schemaTo || '']; + // const paramName = paramNameFor(it.tableTo, tableSchema); + // if (paramsStr) { + // return `.references(()${typeSuffix} => ${ + // withCasing( + // paramName, + // casing, + // ) + // }.${withCasing(it.columnsTo[0], casing)}, ${paramsStr} )`; + // } + // return `.references(()${typeSuffix} => ${ + // withCasing( + // paramName, + // casing, + // ) + // }.${withCasing(it.columnsTo[0], casing)})`; + // }) + // .join(''); + // statement += fksStatement; + // } + + statement += ',\n'; + }); + + return statement; +}; + +const createTableIndexes = ( + tableName: string, + idxs: Index[], + casing: Casing, +): string => { + let statement = ''; + + idxs.forEach((it) => { + // we have issue when index is called as table called + let idxKey = it.name.startsWith(tableName) && it.name !== tableName + ? it.name.slice(tableName.length + 1) + : it.name; + idxKey = idxKey.endsWith('_index') + ? idxKey.slice(0, -'_index'.length) + '_idx' + : idxKey; + + idxKey = withCasing(idxKey, casing); + + const indexGeneratedName = indexName( + tableName, + it.columns.map((it) => it.expression), + ); + const escapedIndexName = indexGeneratedName === it.name ? '' : `"${it.name}"`; + + statement += `\t\t${idxKey}: `; + statement += it.isUnique ? 'uniqueIndex(' : 'index('; + statement += `${escapedIndexName})`; + statement += `${it.concurrently ? `.concurrently()` : ''}`; + + statement += `.using("${it.method}", ${ + it.columns + .map((it) => { + if (it.isExpression) { + return `sql\`${it.expression}\``; + } else { + return `table.${withCasing(it.expression, casing)}${it.asc ? '.asc()' : '.desc()'}${ + it.nulls === 'first' ? '.nullsFirst()' : '.nullsLast()' + }${ + it.opclass && vectorOps.includes(it.opclass) + ? `.op("${it.opclass}")` + : '' + }`; + } + }) + .join(', ') + })`; + statement += it.where ? `.where(sql\`${it.where}\`)` : ''; + + function reverseLogic(mappedWith: Record): string { + let reversedString = '{'; + for (const key in mappedWith) { + if (mappedWith.hasOwnProperty(key)) { + reversedString += `${key}: "${mappedWith[key]}",`; + } + } + reversedString = reversedString.length > 1 + ? reversedString.slice(0, reversedString.length - 1) + : reversedString; + return `${reversedString}}`; + } + + statement += it.with && Object.keys(it.with).length > 0 + ? `.with(${reverseLogic(it.with)})` + : ''; + statement += `,\n`; + }); + + return statement; +}; + +const createTablePKs = (pks: PrimaryKey[], casing: Casing): string => { + let statement = ''; + + pks.forEach((it) => { + let idxKey = withCasing(it.name, casing); + + statement += `\t\t${idxKey}: `; + statement += 'primaryKey({ columns: ['; + statement += `${ + it.columns + .map((c) => { + return `table.${withCasing(c, casing)}`; + }) + .join(', ') + }]${it.name ? `, name: "${it.name}"` : ''}}`; + statement += ')'; + statement += `,\n`; + }); + + return statement; +}; + +const createTableUniques = ( + unqs: UniqueConstraint[], + casing: Casing, +): string => { + let statement = ''; + + unqs.forEach((it) => { + const idxKey = withCasing(it.name, casing); + + statement += `\t\t${idxKey}: `; + statement += 'unique('; + statement += `"${it.name}")`; + statement += `.on(${ + it.columns + .map((it) => `table.${withCasing(it, casing)}`) + .join(', ') + })`; + statement += it.nullsNotDistinct ? `.nullsNotDistinct()` : ''; + statement += `,\n`; + }); + + return statement; +}; + +const createTableFKs = ( + fks: ForeignKey[], + schemas: Record, + casing: Casing, +): string => { + let statement = ''; + + fks.forEach((it) => { + const tableSchema = schemas[it.schemaTo || '']; + const paramName = paramNameFor(it.tableTo, tableSchema); + + const isSelf = it.tableTo === it.tableFrom; + const tableTo = isSelf ? 'table' : `${withCasing(paramName, casing)}`; + statement += `\t\t${withCasing(it.name, casing)}: foreignKey({\n`; + statement += `\t\t\tcolumns: [${ + it.columnsFrom + .map((i) => `table.${withCasing(i, casing)}`) + .join(', ') + }],\n`; + statement += `\t\t\tforeignColumns: [${ + it.columnsTo + .map((i) => `${tableTo}.${withCasing(i, casing)}`) + .join(', ') + }],\n`; + statement += `\t\t\tname: "${it.name}"\n`; + statement += `\t\t})`; + + statement += it.onUpdate && it.onUpdate !== 'no action' + ? `.onUpdate("${it.onUpdate}")` + : ''; + + statement += it.onDelete && it.onDelete !== 'no action' + ? `.onDelete("${it.onDelete}")` + : ''; + + statement += `,\n`; + }); + + return statement; +}; diff --git a/drizzle-kit/src/introspect-singlestore.ts b/drizzle-kit/src/introspect-singlestore.ts new file mode 100644 index 000000000..8aa6e3dd7 --- /dev/null +++ b/drizzle-kit/src/introspect-singlestore.ts @@ -0,0 +1,780 @@ +/* eslint-disable @typescript-eslint/no-unsafe-argument */ +import './@types/utils'; +import type { Casing } from './cli/validations/common'; +import { Column, Index, PrimaryKey, SingleStoreSchemaInternal, UniqueConstraint } from './serializer/singlestoreSchema'; +import { indexName } from './serializer/singlestoreSerializer'; + +// time precision to fsp +// {mode: "string"} for timestamp by default + +const singlestoreImportsList = new Set([ + 'singlestoreTable', + 'singlestoreEnum', + 'bigint', + 'binary', + 'boolean', + 'char', + 'date', + 'datetime', + 'decimal', + 'double', + 'float', + 'int', + 'json', + // TODO: add new type BSON + // TODO: add new type Blob + // TODO: add new type UUID + // TODO: add new type GUID + // TODO: add new type Vector + // TODO: add new type GeoPoint + 'mediumint', + 'real', + 'serial', + 'smallint', + 'text', + 'tinytext', + 'mediumtext', + 'longtext', + 'time', + 'timestamp', + 'tinyint', + 'varbinary', + 'varchar', + 'year', + 'enum', +]); + +const objToStatement = (json: any) => { + json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); + + const keys = Object.keys(json); + if (keys.length === 0) return; + + let statement = '{ '; + statement += keys.map((it) => `"${it}": "${json[it]}"`).join(', '); + statement += ' }'; + return statement; +}; + +const objToStatement2 = (json: any) => { + json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); + + const keys = Object.keys(json); + if (keys.length === 0) return; + + let statement = '{ '; + statement += keys.map((it) => `${it}: "${json[it]}"`).join(', '); // no "" for keys + statement += ' }'; + return statement; +}; + +const timeConfig = (json: any) => { + json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); + + const keys = Object.keys(json); + if (keys.length === 0) return; + + let statement = '{ '; + statement += keys.map((it) => `${it}: ${json[it]}`).join(', '); + statement += ' }'; + return statement; +}; + +const binaryConfig = (json: any) => { + json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); + + const keys = Object.keys(json); + if (keys.length === 0) return; + + let statement = '{ '; + statement += keys.map((it) => `${it}: ${json[it]}`).join(', '); + statement += ' }'; + return statement; +}; + +const importsPatch = { + 'double precision': 'doublePrecision', + 'timestamp without time zone': 'timestamp', +} as Record; + +const relations = new Set(); + +const prepareCasing = (casing?: Casing) => (value: string) => { + if (typeof casing === 'undefined') { + return value; + } + if (casing === 'camel') { + return value.camelCase(); + } + + return value; +}; + +export const schemaToTypeScript = ( + schema: SingleStoreSchemaInternal, + casing: Casing, +) => { + const withCasing = prepareCasing(casing); + + const imports = Object.values(schema.tables).reduce( + (res, it) => { + const idxImports = Object.values(it.indexes).map((idx) => idx.isUnique ? 'uniqueIndex' : 'index'); + const pkImports = Object.values(it.compositePrimaryKeys).map( + (it) => 'primaryKey', + ); + const uniqueImports = Object.values(it.uniqueConstraints).map( + (it) => 'unique', + ); + + res.singlestore.push(...idxImports); + res.singlestore.push(...pkImports); + res.singlestore.push(...uniqueImports); + + const columnImports = Object.values(it.columns) + .map((col) => { + let patched = importsPatch[col.type] ?? col.type; + patched = patched.startsWith('varchar(') ? 'varchar' : patched; + patched = patched.startsWith('char(') ? 'char' : patched; + patched = patched.startsWith('binary(') ? 'binary' : patched; + patched = patched.startsWith('decimal(') ? 'decimal' : patched; + patched = patched.startsWith('smallint(') ? 'smallint' : patched; + patched = patched.startsWith('enum(') ? 'singlestoreEnum' : patched; + patched = patched.startsWith('datetime(') ? 'datetime' : patched; + patched = patched.startsWith('varbinary(') ? 'varbinary' : patched; + patched = patched.startsWith('int(') ? 'int' : patched; + return patched; + }) + .filter((type) => { + return singlestoreImportsList.has(type); + }); + + res.singlestore.push(...columnImports); + return res; + }, + { singlestore: [] as string[] }, + ); + + const tableStatements = Object.values(schema.tables).map((table) => { + const func = 'singlestoreTable'; + let statement = ''; + if (imports.singlestore.includes(withCasing(table.name))) { + statement = `// Table name is in conflict with ${ + withCasing( + table.name, + ) + } import.\n// Please change to any other name, that is not in imports list\n`; + } + statement += `export const ${withCasing(table.name)} = ${func}("${table.name}", {\n`; + statement += createTableColumns( + Object.values(table.columns), + withCasing, + table.name, + schema, + ); + statement += '}'; + + if ( + Object.keys(table.indexes).length > 0 + || Object.keys(table.compositePrimaryKeys).length > 0 + || Object.keys(table.uniqueConstraints).length > 0 + ) { + statement += ',\n'; + statement += '(table) => {\n'; + statement += '\treturn {\n'; + statement += createTableIndexes( + table.name, + Object.values(table.indexes), + withCasing, + ); + statement += createTablePKs( + Object.values(table.compositePrimaryKeys), + withCasing, + ); + statement += createTableUniques( + Object.values(table.uniqueConstraints), + withCasing, + ); + statement += '\t}\n'; + statement += '}'; + } + + statement += ');'; + return statement; + }); + + const uniqueSingleStoreImports = [ + 'singlestoreTable', + 'singlestoreSchema', + 'AnySingleStoreColumn', + ...new Set(imports.singlestore), + ]; + const importsTs = `import { ${ + uniqueSingleStoreImports.join( + ', ', + ) + } } from "drizzle-orm/singlestore-core"\nimport { sql } from "drizzle-orm"\n\n`; + + let decalrations = ''; + decalrations += tableStatements.join('\n\n'); + + const file = importsTs + decalrations; + + const schemaEntry = ` + { + ${ + Object.values(schema.tables) + .map((it) => withCasing(it.name)) + .join(',') + } + } + `; + + return { + file, // backward compatible, print to file + imports: importsTs, + decalrations, + schemaEntry, + }; +}; + +const mapColumnDefault = (defaultValue: any, isExpression?: boolean) => { + if (isExpression) { + return `sql\`${defaultValue}\``; + } + + return defaultValue; +}; + +const mapColumnDefaultForJson = (defaultValue: any) => { + if ( + typeof defaultValue === 'string' + && defaultValue.startsWith("('") + && defaultValue.endsWith("')") + ) { + return defaultValue.substring(2, defaultValue.length - 2); + } + + return defaultValue; +}; + +const column = ( + type: string, + name: string, + casing: (value: string) => string, + defaultValue?: any, + autoincrement?: boolean, + onUpdate?: boolean, + isExpression?: boolean, +) => { + let lowered = type; + if (!type.startsWith('enum(')) { + lowered = type.toLowerCase(); + } + + if (lowered === 'serial') { + return `${casing(name)}: serial("${name}")`; + } + + if (lowered.startsWith('int')) { + const isUnsigned = lowered.startsWith('int unsigned'); + let out = `${casing(name)}: int("${name}"${isUnsigned ? ', { unsigned: true }' : ''})`; + out += autoincrement ? `.autoincrement()` : ''; + out += typeof defaultValue !== 'undefined' + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('tinyint')) { + const isUnsigned = lowered.startsWith('tinyint unsigned'); + // let out = `${name.camelCase()}: tinyint("${name}")`; + let out: string = `${casing(name)}: tinyint("${name}"${isUnsigned ? ', { unsigned: true }' : ''})`; + out += autoincrement ? `.autoincrement()` : ''; + out += typeof defaultValue !== 'undefined' + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('smallint')) { + const isUnsigned = lowered.startsWith('smallint unsigned'); + let out = `${casing(name)}: smallint("${name}"${isUnsigned ? ', { unsigned: true }' : ''})`; + out += autoincrement ? `.autoincrement()` : ''; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('mediumint')) { + const isUnsigned = lowered.startsWith('mediumint unsigned'); + let out = `${casing(name)}: mediumint("${name}"${isUnsigned ? ', { unsigned: true }' : ''})`; + out += autoincrement ? `.autoincrement()` : ''; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('bigint')) { + const isUnsigned = lowered.startsWith('bigint unsigned'); + let out = `${casing(name)}: bigint("${name}", { mode: "number"${isUnsigned ? ', unsigned: true' : ''} })`; + out += autoincrement ? `.autoincrement()` : ''; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered === 'boolean') { + let out = `${casing(name)}: boolean("${name}")`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('double')) { + let params: + | { precision: string | undefined; scale: string | undefined } + | undefined; + + if (lowered.length > 6) { + const [precision, scale] = lowered + .slice(7, lowered.length - 1) + .split(','); + params = { precision, scale }; + } + + let out = params + ? `${casing(name)}: double("${name}", ${timeConfig(params)})` + : `${casing(name)}: double("${name}")`; + + // let out = `${name.camelCase()}: double("${name}")`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered === 'float') { + let out = `${casing(name)}: float("${name}")`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered === 'real') { + let out = `${casing(name)}: real("${name}")`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('timestamp')) { + const keyLength = 'timestamp'.length + 1; + let fsp = lowered.length > keyLength + ? Number(lowered.substring(keyLength, lowered.length - 1)) + : null; + fsp = fsp ? fsp : null; + + const params = timeConfig({ fsp, mode: "'string'" }); + + let out = params + ? `${casing(name)}: timestamp("${name}", ${params})` + : `${casing(name)}: timestamp("${name}")`; + + // TODO: check if SingleStore has defaultNow() or now() + defaultValue = defaultValue === 'now()' || defaultValue === '(CURRENT_TIMESTAMP)' + ? '.defaultNow()' + : defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + + out += defaultValue; + + // TODO: check if SingleStore has onUpdateNow() + let onUpdateNow = onUpdate ? '.onUpdateNow()' : ''; + out += onUpdateNow; + + return out; + } + + if (lowered.startsWith('time')) { + const keyLength = 'time'.length + 1; + let fsp = lowered.length > keyLength + ? Number(lowered.substring(keyLength, lowered.length - 1)) + : null; + fsp = fsp ? fsp : null; + + const params = timeConfig({ fsp }); + + let out = params + ? `${casing(name)}: time("${name}", ${params})` + : `${casing(name)}: time("${name}")`; + + defaultValue = defaultValue === 'now()' + ? '.defaultNow()' + : defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + + out += defaultValue; + return out; + } + + if (lowered === 'date') { + let out = `// you can use { mode: 'date' }, if you want to have Date as type for this column\n\t${ + casing( + name, + ) + }: date("${name}", { mode: 'string' })`; + + defaultValue = defaultValue === 'now()' + ? '.defaultNow()' + : defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + + out += defaultValue; + return out; + } + + // in mysql text can't have default value. Will leave it in case smth ;) + // TODO: check if SingleStore has text can't have default value + if (lowered === 'text') { + let out = `${casing(name)}: text("${name}")`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + // in mysql text can't have default value. Will leave it in case smth ;) + // TODO: check if SingleStore has tinytext can't have default value + if (lowered === 'tinytext') { + let out = `${casing(name)}: tinytext("${name}")`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + // in mysql text can't have default value. Will leave it in case smth ;) + // TODO: check if SingleStore has mediumtext can't have default value + if (lowered === 'mediumtext') { + let out = `${casing(name)}: mediumtext("${name}")`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + // in mysql text can't have default value. Will leave it in case smth ;) + // TODO: check if SingleStore has longtext can't have default value + if (lowered === 'longtext') { + let out = `${casing(name)}: longtext("${name}")`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered === 'year') { + let out = `${casing(name)}: year("${name}")`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + // in mysql json can't have default value. Will leave it in case smth ;) + // TODO: check if SingleStore has json can't have default value + if (lowered === 'json') { + let out = `${casing(name)}: json("${name}")`; + + out += defaultValue + ? `.default(${mapColumnDefaultForJson(defaultValue)})` + : ''; + + return out; + } + + // TODO: add new type BSON + + // TODO: add new type Blob + + // TODO: add new type UUID + + // TODO: add new type GUID + + // TODO: add new type Vector + + // TODO: add new type GeoPoint + + if (lowered.startsWith('varchar')) { + let out: string = `${ + casing( + name, + ) + }: varchar("${name}", { length: ${ + lowered.substring( + 'varchar'.length + 1, + lowered.length - 1, + ) + } })`; + + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('char')) { + let out: string = `${ + casing( + name, + ) + }: char("${name}", { length: ${ + lowered.substring( + 'char'.length + 1, + lowered.length - 1, + ) + } })`; + + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('datetime')) { + let out = `// you can use { mode: 'date' }, if you want to have Date as type for this column\n\t`; + + const fsp = lowered.startsWith('datetime(') + ? lowered.substring('datetime'.length + 1, lowered.length - 1) + : undefined; + + out = fsp + ? `${ + casing( + name, + ) + }: datetime("${name}", { mode: 'string', fsp: ${ + lowered.substring( + 'datetime'.length + 1, + lowered.length - 1, + ) + } })` + : `${casing(name)}: datetime("${name}", { mode: 'string'})`; + + defaultValue = defaultValue === 'now()' + ? '.defaultNow()' + : defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + + out += defaultValue; + return out; + } + + if (lowered.startsWith('decimal')) { + let params: + | { precision: string | undefined; scale: string | undefined } + | undefined; + + if (lowered.length > 7) { + const [precision, scale] = lowered + .slice(8, lowered.length - 1) + .split(','); + params = { precision, scale }; + } + + let out = params + ? `${casing(name)}: decimal("${name}", ${timeConfig(params)})` + : `${casing(name)}: decimal("${name}")`; + + defaultValue = typeof defaultValue !== 'undefined' + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + + out += defaultValue; + return out; + } + + if (lowered.startsWith('binary')) { + const keyLength = 'binary'.length + 1; + let length = lowered.length > keyLength + ? Number(lowered.substring(keyLength, lowered.length - 1)) + : null; + length = length ? length : null; + + const params = binaryConfig({ length }); + + let out = params + ? `${casing(name)}: binary("${name}", ${params})` + : `${casing(name)}: binary("${name}")`; + + defaultValue = defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + + out += defaultValue; + return out; + } + + if (lowered.startsWith('enum')) { + const values = lowered.substring('enum'.length + 1, lowered.length - 1); + let out = `${casing(name)}: singlestoreEnum("${name}", [${values}])`; + out += defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + return out; + } + + if (lowered.startsWith('varbinary')) { + const keyLength = 'varbinary'.length + 1; + let length = lowered.length > keyLength + ? Number(lowered.substring(keyLength, lowered.length - 1)) + : null; + length = length ? length : null; + + const params = binaryConfig({ length }); + + let out = params + ? `${casing(name)}: varbinary("${name}", ${params})` + : `${casing(name)}: varbinary("${name}")`; + + defaultValue = defaultValue + ? `.default(${mapColumnDefault(defaultValue, isExpression)})` + : ''; + + out += defaultValue; + return out; + } + + console.log('uknown', type); + return `// Warning: Can't parse ${type} from database\n\t// ${type}Type: ${type}("${name}")`; +}; + +const createTableColumns = ( + columns: Column[], + casing: (val: string) => string, + tableName: string, + schema: SingleStoreSchemaInternal, +): string => { + let statement = ''; + + columns.forEach((it) => { + statement += '\t'; + statement += column( + it.type, + it.name, + casing, + it.default, + it.autoincrement, + it.onUpdate, + schema.internal?.tables![tableName]?.columns[it.name] + ?.isDefaultAnExpression ?? false, + ); + statement += it.primaryKey ? '.primaryKey()' : ''; + statement += it.notNull ? '.notNull()' : ''; + + statement += it.generated + ? `.generatedAlwaysAs(sql\`${ + it.generated.as.replace( + /`/g, + '\\`', + ) + }\`, { mode: "${it.generated.type}" })` + : ''; + + statement += ',\n'; + }); + + return statement; +}; + +const createTableIndexes = ( + tableName: string, + idxs: Index[], + casing: (value: string) => string, +): string => { + let statement = ''; + + idxs.forEach((it) => { + let idxKey = it.name.startsWith(tableName) && it.name !== tableName + ? it.name.slice(tableName.length + 1) + : it.name; + idxKey = idxKey.endsWith('_index') + ? idxKey.slice(0, -'_index'.length) + '_idx' + : idxKey; + + idxKey = casing(idxKey); + + const indexGeneratedName = indexName(tableName, it.columns); + const escapedIndexName = indexGeneratedName === it.name ? '' : `"${it.name}"`; + + statement += `\t\t${idxKey}: `; + statement += it.isUnique ? 'uniqueIndex(' : 'index('; + statement += `${escapedIndexName})`; + statement += `.on(${ + it.columns + .map((it) => `table.${casing(it)}`) + .join(', ') + }),`; + statement += `\n`; + }); + + return statement; +}; + +const createTableUniques = ( + unqs: UniqueConstraint[], + casing: (value: string) => string, +): string => { + let statement = ''; + + unqs.forEach((it) => { + const idxKey = casing(it.name); + + statement += `\t\t${idxKey}: `; + statement += 'unique('; + statement += `"${it.name}")`; + statement += `.on(${ + it.columns + .map((it) => `table.${casing(it)}`) + .join(', ') + }),`; + statement += `\n`; + }); + + return statement; +}; + +const createTablePKs = ( + pks: PrimaryKey[], + casing: (value: string) => string, +): string => { + let statement = ''; + + pks.forEach((it) => { + let idxKey = casing(it.name); + + statement += `\t\t${idxKey}: `; + statement += 'primaryKey({ columns: ['; + statement += `${ + it.columns + .map((c) => { + return `table.${casing(c)}`; + }) + .join(', ') + }]${it.name ? `, name: "${it.name}"` : ''}}`; + statement += '),'; + statement += `\n`; + }); + + return statement; +}; diff --git a/drizzle-kit/src/introspect-sqlite.ts b/drizzle-kit/src/introspect-sqlite.ts new file mode 100644 index 000000000..b4a729f4c --- /dev/null +++ b/drizzle-kit/src/introspect-sqlite.ts @@ -0,0 +1,457 @@ +/* eslint-disable @typescript-eslint/no-unsafe-argument */ +import './@types/utils'; +import type { Casing } from './cli/validations/common'; +import type { + Column, + ForeignKey, + Index, + PrimaryKey, + SQLiteSchema, + SQLiteSchemaInternal, + UniqueConstraint, +} from './serializer/sqliteSchema'; + +const sqliteImportsList = new Set([ + 'sqliteTable', + 'integer', + 'real', + 'text', + 'numeric', + 'blob', +]); + +export const indexName = (tableName: string, columns: string[]) => { + return `${tableName}_${columns.join('_')}_index`; +}; + +const objToStatement2 = (json: any) => { + json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); + + const keys = Object.keys(json); + if (keys.length === 0) return; + + let statement = '{ '; + statement += keys.map((it) => `${it}: "${json[it]}"`).join(', '); // no "" for keys + statement += ' }'; + return statement; +}; + +const relations = new Set(); + +const escapeColumnKey = (value: string) => { + if (/^(?![a-zA-Z_$][a-zA-Z0-9_$]*$).+$/.test(value)) { + return `"${value}"`; + } + return value; +}; + +const withCasing = (value: string, casing?: Casing) => { + if (casing === 'preserve') { + return escapeColumnKey(value); + } + if (casing === 'camel') { + return escapeColumnKey(value.camelCase()); + } + + return value; +}; + +export const schemaToTypeScript = ( + schema: SQLiteSchemaInternal, + casing: Casing, +) => { + // collectFKs + Object.values(schema.tables).forEach((table) => { + Object.values(table.foreignKeys).forEach((fk) => { + const relation = `${fk.tableFrom}-${fk.tableTo}`; + relations.add(relation); + }); + }); + + const imports = Object.values(schema.tables).reduce( + (res, it) => { + const idxImports = Object.values(it.indexes).map((idx) => idx.isUnique ? 'uniqueIndex' : 'index'); + const fkImpots = Object.values(it.foreignKeys).map((it) => 'foreignKey'); + const pkImports = Object.values(it.compositePrimaryKeys).map( + (it) => 'primaryKey', + ); + const uniqueImports = Object.values(it.uniqueConstraints).map( + (it) => 'unique', + ); + + res.sqlite.push(...idxImports); + res.sqlite.push(...fkImpots); + res.sqlite.push(...pkImports); + res.sqlite.push(...uniqueImports); + + const columnImports = Object.values(it.columns) + .map((col) => { + return col.type; + }) + .filter((type) => { + return sqliteImportsList.has(type); + }); + + res.sqlite.push(...columnImports); + return res; + }, + { sqlite: [] as string[] }, + ); + + const tableStatements = Object.values(schema.tables).map((table) => { + const func = 'sqliteTable'; + let statement = ''; + if (imports.sqlite.includes(withCasing(table.name, casing))) { + statement = `// Table name is in conflict with ${ + withCasing( + table.name, + casing, + ) + } import.\n// Please change to any other name, that is not in imports list\n`; + } + statement += `export const ${withCasing(table.name, casing)} = ${func}("${table.name}", {\n`; + statement += createTableColumns( + Object.values(table.columns), + Object.values(table.foreignKeys), + casing, + ); + statement += '}'; + + // more than 2 fields or self reference or cyclic + const filteredFKs = Object.values(table.foreignKeys).filter((it) => { + return it.columnsFrom.length > 1 || isSelf(it); + }); + + if ( + Object.keys(table.indexes).length > 0 + || filteredFKs.length > 0 + || Object.keys(table.compositePrimaryKeys).length > 0 + || Object.keys(table.uniqueConstraints).length > 0 + ) { + statement += ',\n'; + statement += '(table) => {\n'; + statement += '\treturn {\n'; + statement += createTableIndexes( + table.name, + Object.values(table.indexes), + casing, + ); + statement += createTableFKs(Object.values(filteredFKs), casing); + statement += createTablePKs( + Object.values(table.compositePrimaryKeys), + casing, + ); + statement += createTableUniques( + Object.values(table.uniqueConstraints), + casing, + ); + statement += '\t}\n'; + statement += '}'; + } + + statement += ');'; + return statement; + }); + + const uniqueSqliteImports = [ + 'sqliteTable', + 'AnySQLiteColumn', + ...new Set(imports.sqlite), + ]; + + const importsTs = `import { ${ + uniqueSqliteImports.join( + ', ', + ) + } } from "drizzle-orm/sqlite-core" + import { sql } from "drizzle-orm"\n\n`; + + const decalrations = tableStatements.join('\n\n'); + + const file = importsTs + decalrations; + + // for drizzle studio query runner + const schemaEntry = ` + { + ${ + Object.values(schema.tables) + .map((it) => withCasing(it.name, casing)) + .join(',') + } + } + `; + + return { file, imports: importsTs, decalrations, schemaEntry }; +}; + +const isCyclic = (fk: ForeignKey) => { + const key = `${fk.tableFrom}-${fk.tableTo}`; + const reverse = `${fk.tableTo}-${fk.tableFrom}`; + return relations.has(key) && relations.has(reverse); +}; + +const isSelf = (fk: ForeignKey) => { + return fk.tableFrom === fk.tableTo; +}; + +const mapColumnDefault = (defaultValue: any) => { + if ( + typeof defaultValue === 'string' + && defaultValue.startsWith('(') + && defaultValue.endsWith(')') + ) { + return `sql\`${defaultValue}\``; + } + // If default value is NULL as string it will come back from db as "'NULL'" and not just "NULL" + if (defaultValue === 'NULL') { + return `sql\`NULL\``; + } + + if ( + typeof defaultValue === 'string' + && defaultValue.startsWith("'") + && defaultValue.endsWith("'") + ) { + return defaultValue.substring(1, defaultValue.length - 1); + } + + return defaultValue; +}; + +const column = ( + type: string, + name: string, + defaultValue?: any, + autoincrement?: boolean, + casing?: Casing, +) => { + let lowered = type; + + if (lowered === 'integer') { + let out = `${withCasing(name, casing)}: integer("${name}")`; + // out += autoincrement ? `.autoincrement()` : ""; + out += typeof defaultValue !== 'undefined' + ? `.default(${mapColumnDefault(defaultValue)})` + : ''; + return out; + } + + if (lowered === 'real') { + let out = `${withCasing(name, casing)}: real("${name}")`; + out += defaultValue ? `.default(${mapColumnDefault(defaultValue)})` : ''; + return out; + } + + if (lowered.startsWith('text')) { + const match = lowered.match(/\d+/); + let out: string; + + if (match) { + out = `${withCasing(name, casing)}: text("${name}", { length: ${match[0]} })`; + } else { + out = `${withCasing(name, casing)}: text("${name}")`; + } + + out += defaultValue ? `.default("${mapColumnDefault(defaultValue)}")` : ''; + return out; + } + + if (lowered === 'blob') { + let out = `${withCasing(name, casing)}: blob("${name}")`; + out += defaultValue ? `.default(${mapColumnDefault(defaultValue)})` : ''; + return out; + } + + if (lowered === 'numeric') { + let out = `${withCasing(name, casing)}: numeric("${name}")`; + out += defaultValue ? `.default(${mapColumnDefault(defaultValue)})` : ''; + return out; + } + + // console.log("uknown", type); + return `// Warning: Can't parse ${type} from database\n\t// ${type}Type: ${type}("${name}")`; +}; + +const createTableColumns = ( + columns: Column[], + fks: ForeignKey[], + casing: Casing, +): string => { + let statement = ''; + + // no self refs and no cyclic + const oneColumnsFKs = Object.values(fks) + .filter((it) => { + return !isSelf(it); + }) + .filter((it) => it.columnsFrom.length === 1); + + const fkByColumnName = oneColumnsFKs.reduce((res, it) => { + const arr = res[it.columnsFrom[0]] || []; + arr.push(it); + res[it.columnsFrom[0]] = arr; + return res; + }, {} as Record); + + columns.forEach((it) => { + statement += '\t'; + statement += column(it.type, it.name, it.default, it.autoincrement, casing); + statement += it.primaryKey + ? `.primaryKey(${it.autoincrement ? '{ autoIncrement: true }' : ''})` + : ''; + statement += it.notNull ? '.notNull()' : ''; + + statement += it.generated + ? `.generatedAlwaysAs(sql\`${ + it.generated.as + .replace(/`/g, '\\`') + .slice(1, -1) + }\`, { mode: "${it.generated.type}" })` + : ''; + + const fks = fkByColumnName[it.name]; + if (fks) { + const fksStatement = fks + .map((it) => { + const onDelete = it.onDelete && it.onDelete !== 'no action' ? it.onDelete : null; + const onUpdate = it.onUpdate && it.onUpdate !== 'no action' ? it.onUpdate : null; + const params = { onDelete, onUpdate }; + + const typeSuffix = isCyclic(it) ? ': AnySQLiteColumn' : ''; + + const paramsStr = objToStatement2(params); + if (paramsStr) { + return `.references(()${typeSuffix} => ${ + withCasing( + it.tableTo, + casing, + ) + }.${withCasing(it.columnsTo[0], casing)}, ${paramsStr} )`; + } + return `.references(()${typeSuffix} => ${ + withCasing( + it.tableTo, + casing, + ) + }.${withCasing(it.columnsTo[0], casing)})`; + }) + .join(''); + statement += fksStatement; + } + + statement += ',\n'; + }); + + return statement; +}; + +const createTableIndexes = ( + tableName: string, + idxs: Index[], + casing: Casing, +): string => { + let statement = ''; + + idxs.forEach((it) => { + let idxKey = it.name.startsWith(tableName) && it.name !== tableName + ? it.name.slice(tableName.length + 1) + : it.name; + idxKey = idxKey.endsWith('_index') + ? idxKey.slice(0, -'_index'.length) + '_idx' + : idxKey; + + idxKey = withCasing(idxKey, casing); + + const indexGeneratedName = indexName(tableName, it.columns); + const escapedIndexName = indexGeneratedName === it.name ? '' : `"${it.name}"`; + + statement += `\t\t${idxKey}: `; + statement += it.isUnique ? 'uniqueIndex(' : 'index('; + statement += `${escapedIndexName})`; + statement += `.on(${ + it.columns + .map((it) => `table.${withCasing(it, casing)}`) + .join(', ') + }),`; + statement += `\n`; + }); + + return statement; +}; + +const createTableUniques = ( + unqs: UniqueConstraint[], + casing: Casing, +): string => { + let statement = ''; + + unqs.forEach((it) => { + const idxKey = withCasing(it.name, casing); + + statement += `\t\t${idxKey}: `; + statement += 'unique('; + statement += `"${it.name}")`; + statement += `.on(${ + it.columns + .map((it) => `table.${withCasing(it, casing)}`) + .join(', ') + }),`; + statement += `\n`; + }); + + return statement; +}; + +const createTablePKs = (pks: PrimaryKey[], casing: Casing): string => { + let statement = ''; + + pks.forEach((it, i) => { + statement += `\t\tpk${i}: `; + statement += 'primaryKey({ columns: ['; + statement += `${ + it.columns + .map((c) => { + return `table.${withCasing(c, casing)}`; + }) + .join(', ') + }]${it.name ? `, name: "${it.name}"` : ''}}`; + statement += ')'; + statement += `\n`; + }); + + return statement; +}; + +const createTableFKs = (fks: ForeignKey[], casing: Casing): string => { + let statement = ''; + + fks.forEach((it) => { + const isSelf = it.tableTo === it.tableFrom; + const tableTo = isSelf ? 'table' : `${withCasing(it.tableTo, casing)}`; + statement += `\t\t${withCasing(it.name, casing)}: foreignKey(() => ({\n`; + statement += `\t\t\tcolumns: [${ + it.columnsFrom + .map((i) => `table.${withCasing(i, casing)}`) + .join(', ') + }],\n`; + statement += `\t\t\tforeignColumns: [${ + it.columnsTo + .map((i) => `${tableTo}.${withCasing(i, casing)}`) + .join(', ') + }],\n`; + statement += `\t\t\tname: "${it.name}"\n`; + statement += `\t\t}))`; + + statement += it.onUpdate && it.onUpdate !== 'no action' + ? `.onUpdate("${it.onUpdate}")` + : ''; + + statement += it.onDelete && it.onDelete !== 'no action' + ? `.onDelete("${it.onDelete}")` + : ''; + + statement += `,\n`; + }); + + return statement; +}; diff --git a/drizzle-kit/src/jsonDiffer.js b/drizzle-kit/src/jsonDiffer.js new file mode 100644 index 000000000..113d7e0a4 --- /dev/null +++ b/drizzle-kit/src/jsonDiffer.js @@ -0,0 +1,638 @@ +'use-strict'; +import { diff } from 'json-diff'; + +export function diffForRenamedTables(pairs) { + // raname table1 to name of table2, so we can apply diffs + const renamed = pairs.map((it) => { + const from = it.from; + const to = it.to; + const newFrom = { ...from, name: to.name }; + return [newFrom, to]; + }); + + // find any alternations made to a renamed table + const altered = renamed.map((pair) => { + return diffForRenamedTable(pair[0], pair[1]); + }); + + return altered; +} + +function diffForRenamedTable(t1, t2) { + t1.name = t2.name; + const diffed = diff(t1, t2) || {}; + diffed.name = t2.name; + + return findAlternationsInTable(diffed, t2.schema); +} + +export function diffForRenamedColumn(t1, t2) { + const renamed = { ...t1, name: t2.name }; + const diffed = diff(renamed, t2) || {}; + diffed.name = t2.name; + + return alternationsInColumn(diffed); +} + +const update1to2 = (json) => { + Object.entries(json).forEach(([key, val]) => { + if ('object' !== typeof val) return; + + if (val.hasOwnProperty('references')) { + const ref = val['references']; + const fkName = ref['foreignKeyName']; + const table = ref['table']; + const column = ref['column']; + const onDelete = ref['onDelete']; + const onUpdate = ref['onUpdate']; + const newRef = `${fkName};${table};${column};${onDelete ?? ''};${onUpdate ?? ''}`; + val['references'] = newRef; + } else { + update1to2(val); + } + }); +}; + +const mapArraysDiff = (source, diff) => { + const sequence = []; + let sourceIndex = 0; + for (let i = 0; i < diff.length; i++) { + const it = diff[i]; + if (it.length === 1) { + sequence.push({ type: 'same', value: source[sourceIndex] }); + sourceIndex += 1; + } else { + if (it[0] === '-') { + sequence.push({ type: 'removed', value: it[1] }); + } else { + sequence.push({ type: 'added', value: it[1], before: '' }); + } + } + } + const result = sequence.reverse().reduce( + (acc, it) => { + if (it.type === 'same') { + acc.prev = it.value; + } + + if (it.type === 'added' && acc.prev) { + it.before = acc.prev; + } + acc.result.push(it); + return acc; + }, + { result: [] }, + ); + + return result.result.reverse(); +}; + +export function diffSchemasOrTables(left, right) { + left = JSON.parse(JSON.stringify(left)); + right = JSON.parse(JSON.stringify(right)); + + const result = Object.entries(diff(left, right) ?? {}); + + const added = result + .filter((it) => it[0].endsWith('__added')) + .map((it) => it[1]); + const deleted = result + .filter((it) => it[0].endsWith('__deleted')) + .map((it) => it[1]); + + return { added, deleted }; +} + +export function diffColumns(left, right) { + left = JSON.parse(JSON.stringify(left)); + right = JSON.parse(JSON.stringify(right)); + const result = diff(left, right) ?? {}; + + const alteredTables = Object.fromEntries( + Object.entries(result) + .filter((it) => { + return !(it[0].includes('__added') || it[0].includes('__deleted')); + }) + .map((tableEntry) => { + // const entry = { name: it, ...result[it] } + const deletedColumns = Object.entries(tableEntry[1].columns ?? {}) + .filter((it) => { + return it[0].endsWith('__deleted'); + }) + .map((it) => { + return it[1]; + }); + + const addedColumns = Object.entries(tableEntry[1].columns ?? {}) + .filter((it) => { + return it[0].endsWith('__added'); + }) + .map((it) => { + return it[1]; + }); + + tableEntry[1].columns = { + added: addedColumns, + deleted: deletedColumns, + }; + const table = left[tableEntry[0]]; + return [ + tableEntry[0], + { name: table.name, schema: table.schema, ...tableEntry[1] }, + ]; + }), + ); + + return alteredTables; +} + +export function applyJsonDiff(json1, json2) { + json1 = JSON.parse(JSON.stringify(json1)); + json2 = JSON.parse(JSON.stringify(json2)); + + // deep copy, needed because of the bug in diff library + const rawDiff = diff(json1, json2); + + const difference = JSON.parse(JSON.stringify(rawDiff || {})); + difference.schemas = difference.schemas || {}; + difference.tables = difference.tables || {}; + difference.enums = difference.enums || {}; + difference.sequences = difference.sequences || {}; + + // remove added/deleted schemas + const schemaKeys = Object.keys(difference.schemas); + for (let key of schemaKeys) { + if (key.endsWith('__added') || key.endsWith('__deleted')) { + delete difference.schemas[key]; + continue; + } + } + + // remove added/deleted tables + const tableKeys = Object.keys(difference.tables); + for (let key of tableKeys) { + if (key.endsWith('__added') || key.endsWith('__deleted')) { + delete difference.tables[key]; + continue; + } + + // supply table name and schema for altered tables + const table = json1.tables[key]; + difference.tables[key] = { + name: table.name, + schema: table.schema, + ...difference.tables[key], + }; + } + + for (let [tableKey, tableValue] of Object.entries(difference.tables)) { + const table = difference.tables[tableKey]; + const columns = tableValue.columns || {}; + const columnKeys = Object.keys(columns); + for (let key of columnKeys) { + if (key.endsWith('__added') || key.endsWith('__deleted')) { + delete table.columns[key]; + continue; + } + } + + if (Object.keys(columns).length === 0) { + delete table['columns']; + } + + if ( + 'name' in table + && 'schema' in table + && Object.keys(table).length === 2 + ) { + delete difference.tables[tableKey]; + } + } + + const enumsEntries = Object.entries(difference.enums); + const alteredEnums = enumsEntries + .filter((it) => !(it[0].includes('__added') || it[0].includes('__deleted'))) + .map((it) => { + const enumEntry = json1.enums[it[0]]; + const { name, schema, values } = enumEntry; + + const sequence = mapArraysDiff(values, it[1].values); + const addedValues = sequence + .filter((it) => it.type === 'added') + .map((it) => { + return { + before: it.before, + value: it.value, + }; + }); + const deletedValues = sequence + .filter((it) => it.type === 'removed') + .map((it) => it.value); + + return { name, schema, addedValues, deletedValues }; + }); + + const sequencesEntries = Object.entries(difference.sequences); + const alteredSequences = sequencesEntries + .filter((it) => !(it[0].includes('__added') || it[0].includes('__deleted')) && 'values' in it[1]) + .map((it) => { + return json2.sequences[it[0]]; + }); + + const alteredTablesWithColumns = Object.values(difference.tables).map( + (table) => { + return findAlternationsInTable(table); + }, + ); + + return { + alteredTablesWithColumns, + alteredEnums, + alteredSequences, + }; +} + +const findAlternationsInTable = (table) => { + // map each table to have altered, deleted or renamed columns + + // in case no columns were altered, but indexes were + const columns = table.columns ?? {}; + + const altered = Object.keys(columns) + .filter((it) => !(it.includes('__deleted') || it.includes('__added'))) + .map((it) => { + return { name: it, ...columns[it] }; + }); + + const deletedIndexes = Object.fromEntries( + Object.entries(table.indexes__deleted || {}) + .concat( + Object.entries(table.indexes || {}).filter((it) => it[0].includes('__deleted')), + ) + .map((entry) => [entry[0].replace('__deleted', ''), entry[1]]), + ); + + const addedIndexes = Object.fromEntries( + Object.entries(table.indexes__added || {}) + .concat( + Object.entries(table.indexes || {}).filter((it) => it[0].includes('__added')), + ) + .map((entry) => [entry[0].replace('__added', ''), entry[1]]), + ); + + const alteredIndexes = Object.fromEntries( + Object.entries(table.indexes || {}).filter((it) => { + return !it[0].endsWith('__deleted') && !it[0].endsWith('__added'); + }), + ); + + const deletedForeignKeys = Object.fromEntries( + Object.entries(table.foreignKeys__deleted || {}) + .concat( + Object.entries(table.foreignKeys || {}).filter((it) => it[0].includes('__deleted')), + ) + .map((entry) => [entry[0].replace('__deleted', ''), entry[1]]), + ); + + const addedForeignKeys = Object.fromEntries( + Object.entries(table.foreignKeys__added || {}) + .concat( + Object.entries(table.foreignKeys || {}).filter((it) => it[0].includes('__added')), + ) + .map((entry) => [entry[0].replace('__added', ''), entry[1]]), + ); + + const alteredForeignKeys = Object.fromEntries( + Object.entries(table.foreignKeys || {}) + .filter( + (it) => !it[0].endsWith('__added') && !it[0].endsWith('__deleted'), + ) + .map((entry) => [entry[0], entry[1]]), + ); + + const addedCompositePKs = Object.fromEntries( + Object.entries(table.compositePrimaryKeys || {}).filter((it) => { + return it[0].endsWith('__added'); + }), + ); + + const deletedCompositePKs = Object.fromEntries( + Object.entries(table.compositePrimaryKeys || {}).filter((it) => { + return it[0].endsWith('__deleted'); + }), + ); + + const alteredCompositePKs = Object.fromEntries( + Object.entries(table.compositePrimaryKeys || {}).filter((it) => { + return !it[0].endsWith('__deleted') && !it[0].endsWith('__added'); + }), + ); + + const addedUniqueConstraints = Object.fromEntries( + Object.entries(table.uniqueConstraints || {}).filter((it) => { + return it[0].endsWith('__added'); + }), + ); + + const deletedUniqueConstraints = Object.fromEntries( + Object.entries(table.uniqueConstraints || {}).filter((it) => { + return it[0].endsWith('__deleted'); + }), + ); + + const alteredUniqueConstraints = Object.fromEntries( + Object.entries(table.uniqueConstraints || {}).filter((it) => { + return !it[0].endsWith('__deleted') && !it[0].endsWith('__added'); + }), + ); + + const mappedAltered = altered.map((it) => alternationsInColumn(it)).filter(Boolean); + + return { + name: table.name, + schema: table.schema || '', + altered: mappedAltered, + addedIndexes, + deletedIndexes, + alteredIndexes, + addedForeignKeys, + deletedForeignKeys, + alteredForeignKeys, + addedCompositePKs, + deletedCompositePKs, + alteredCompositePKs, + addedUniqueConstraints, + deletedUniqueConstraints, + alteredUniqueConstraints, + }; +}; + +const alternationsInColumn = (column) => { + const altered = [column]; + const result = altered + .filter((it) => { + if ('type' in it && it.type.__old.replace(' (', '(') === it.type.__new.replace(' (', '(')) { + return false; + } + return true; + }) + .map((it) => { + if (typeof it.name !== 'string' && '__old' in it.name) { + // rename + return { + ...it, + name: { type: 'changed', old: it.name.__old, new: it.name.__new }, + }; + } + return it; + }) + .map((it) => { + if ('type' in it) { + // type change + return { + ...it, + type: { type: 'changed', old: it.type.__old, new: it.type.__new }, + }; + } + return it; + }) + .map((it) => { + if ('default' in it) { + return { + ...it, + default: { + type: 'changed', + old: it.default.__old, + new: it.default.__new, + }, + }; + } + if ('default__added' in it) { + const { default__added, ...others } = it; + return { + ...others, + default: { type: 'added', value: it.default__added }, + }; + } + if ('default__deleted' in it) { + const { default__deleted, ...others } = it; + return { + ...others, + default: { type: 'deleted', value: it.default__deleted }, + }; + } + return it; + }) + .map((it) => { + if ('generated' in it) { + if ('as' in it.generated && 'type' in it.generated) { + return { + ...it, + generated: { + type: 'changed', + old: { as: it.generated.as.__old, type: it.generated.type.__old }, + new: { as: it.generated.as.__new, type: it.generated.type.__new }, + }, + }; + } else if ('as' in it.generated) { + return { + ...it, + generated: { + type: 'changed', + old: { as: it.generated.as.__old }, + new: { as: it.generated.as.__new }, + }, + }; + } else { + return { + ...it, + generated: { + type: 'changed', + old: { as: it.generated.type.__old }, + new: { as: it.generated.type.__new }, + }, + }; + } + } + if ('generated__added' in it) { + const { generated__added, ...others } = it; + return { + ...others, + generated: { type: 'added', value: it.generated__added }, + }; + } + if ('generated__deleted' in it) { + const { generated__deleted, ...others } = it; + return { + ...others, + generated: { type: 'deleted', value: it.generated__deleted }, + }; + } + return it; + }) + .map((it) => { + if ('identity' in it) { + return { + ...it, + identity: { + type: 'changed', + old: it.identity.__old, + new: it.identity.__new, + }, + }; + } + if ('identity__added' in it) { + const { identity__added, ...others } = it; + return { + ...others, + identity: { type: 'added', value: it.identity__added }, + }; + } + if ('identity__deleted' in it) { + const { identity__deleted, ...others } = it; + return { + ...others, + identity: { type: 'deleted', value: it.identity__deleted }, + }; + } + return it; + }) + .map((it) => { + if ('notNull' in it) { + return { + ...it, + notNull: { + type: 'changed', + old: it.notNull.__old, + new: it.notNull.__new, + }, + }; + } + if ('notNull__added' in it) { + const { notNull__added, ...others } = it; + return { + ...others, + notNull: { type: 'added', value: it.notNull__added }, + }; + } + if ('notNull__deleted' in it) { + const { notNull__deleted, ...others } = it; + return { + ...others, + notNull: { type: 'deleted', value: it.notNull__deleted }, + }; + } + return it; + }) + .map((it) => { + if ('primaryKey' in it) { + return { + ...it, + primaryKey: { + type: 'changed', + old: it.primaryKey.__old, + new: it.primaryKey.__new, + }, + }; + } + if ('primaryKey__added' in it) { + const { notNull__added, ...others } = it; + return { + ...others, + primaryKey: { type: 'added', value: it.primaryKey__added }, + }; + } + if ('primaryKey__deleted' in it) { + const { notNull__deleted, ...others } = it; + return { + ...others, + primaryKey: { type: 'deleted', value: it.primaryKey__deleted }, + }; + } + return it; + }) + .map((it) => { + if ('typeSchema' in it) { + return { + ...it, + typeSchema: { + type: 'changed', + old: it.typeSchema.__old, + new: it.typeSchema.__new, + }, + }; + } + if ('typeSchema__added' in it) { + const { typeSchema__added, ...others } = it; + return { + ...others, + typeSchema: { type: 'added', value: it.typeSchema__added }, + }; + } + if ('typeSchema__deleted' in it) { + const { typeSchema__deleted, ...others } = it; + return { + ...others, + typeSchema: { type: 'deleted', value: it.typeSchema__deleted }, + }; + } + return it; + }) + .map((it) => { + if ('onUpdate' in it) { + return { + ...it, + onUpdate: { + type: 'changed', + old: it.onUpdate.__old, + new: it.onUpdate.__new, + }, + }; + } + if ('onUpdate__added' in it) { + const { onUpdate__added, ...others } = it; + return { + ...others, + onUpdate: { type: 'added', value: it.onUpdate__added }, + }; + } + if ('onUpdate__deleted' in it) { + const { onUpdate__deleted, ...others } = it; + return { + ...others, + onUpdate: { type: 'deleted', value: it.onUpdate__deleted }, + }; + } + return it; + }) + .map((it) => { + if ('autoincrement' in it) { + return { + ...it, + autoincrement: { + type: 'changed', + old: it.autoincrement.__old, + new: it.autoincrement.__new, + }, + }; + } + if ('autoincrement__added' in it) { + const { autoincrement__added, ...others } = it; + return { + ...others, + autoincrement: { type: 'added', value: it.autoincrement__added }, + }; + } + if ('autoincrement__deleted' in it) { + const { autoincrement__deleted, ...others } = it; + return { + ...others, + autoincrement: { type: 'deleted', value: it.autoincrement__deleted }, + }; + } + return it; + }) + .filter(Boolean); + + return result[0]; +}; diff --git a/drizzle-kit/src/jsonStatements.ts b/drizzle-kit/src/jsonStatements.ts new file mode 100644 index 000000000..090b0cdde --- /dev/null +++ b/drizzle-kit/src/jsonStatements.ts @@ -0,0 +1,2686 @@ +import chalk from 'chalk'; +import { warning } from './cli/views'; +import { CommonSquashedSchema } from './schemaValidator'; +import { MySqlKitInternals, MySqlSchema, MySqlSquasher } from './serializer/mysqlSchema'; +import { Index, PgSchema, PgSquasher } from './serializer/pgSchema'; +import { SingleStoreKitInternals, SingleStoreSchema, SingleStoreSquasher } from './serializer/singlestoreSchema'; +import { SQLiteKitInternals, SQLiteSquasher } from './serializer/sqliteSchema'; +import { AlteredColumn, Column, Sequence, Table } from './snapshotsDiffer'; + +export interface JsonSqliteCreateTableStatement { + type: 'sqlite_create_table'; + tableName: string; + columns: Column[]; + referenceData: { + name: string; + tableFrom: string; + columnsFrom: string[]; + tableTo: string; + columnsTo: string[]; + onUpdate?: string | undefined; + onDelete?: string | undefined; + }[]; + compositePKs: string[][]; + uniqueConstraints?: string[]; +} + +export interface JsonCreateTableStatement { + type: 'create_table'; + tableName: string; + schema: string; + columns: Column[]; + compositePKs: string[]; + compositePkName?: string; + uniqueConstraints?: string[]; + internals?: MySqlKitInternals | SingleStoreKitInternals; +} + +export interface JsonDropTableStatement { + type: 'drop_table'; + tableName: string; + schema: string; +} + +export interface JsonRenameTableStatement { + type: 'rename_table'; + fromSchema: string; + toSchema: string; + tableNameFrom: string; + tableNameTo: string; +} + +export interface JsonCreateEnumStatement { + type: 'create_type_enum'; + name: string; + schema: string; + values: string[]; +} + +export interface JsonDropEnumStatement { + type: 'drop_type_enum'; + name: string; + schema: string; +} + +export interface JsonMoveEnumStatement { + type: 'move_type_enum'; + name: string; + schemaFrom: string; + schemaTo: string; +} + +export interface JsonRenameEnumStatement { + type: 'rename_type_enum'; + nameFrom: string; + nameTo: string; + schema: string; +} + +export interface JsonAddValueToEnumStatement { + type: 'alter_type_add_value'; + name: string; + schema: string; + value: string; + before: string; +} + +export interface JsonCreateSequenceStatement { + type: 'create_sequence'; + name: string; + schema: string; + values: { + increment?: string | undefined; + minValue?: string | undefined; + maxValue?: string | undefined; + startWith?: string | undefined; + cache?: string | undefined; + cycle?: boolean | undefined; + }; +} + +export interface JsonDropSequenceStatement { + type: 'drop_sequence'; + name: string; + schema: string; +} + +export interface JsonMoveSequenceStatement { + type: 'move_sequence'; + name: string; + schemaFrom: string; + schemaTo: string; +} + +export interface JsonRenameSequenceStatement { + type: 'rename_sequence'; + nameFrom: string; + nameTo: string; + schema: string; +} + +export interface JsonAlterSequenceStatement { + type: 'alter_sequence'; + name: string; + schema: string; + values: { + increment?: string | undefined; + minValue?: string | undefined; + maxValue?: string | undefined; + startWith?: string | undefined; + cache?: string | undefined; + cycle?: boolean | undefined; + }; +} + +export interface JsonDropColumnStatement { + type: 'alter_table_drop_column'; + tableName: string; + columnName: string; + schema: string; +} + +export interface JsonAddColumnStatement { + type: 'alter_table_add_column'; + tableName: string; + column: Column; + schema: string; +} + +export interface JsonSqliteAddColumnStatement { + type: 'sqlite_alter_table_add_column'; + tableName: string; + column: Column; + referenceData?: string; +} + +export interface JsonCreateIndexStatement { + type: 'create_index'; + tableName: string; + data: string; + schema: string; + internal?: MySqlKitInternals | SQLiteKitInternals | SingleStoreKitInternals; +} + +export interface JsonPgCreateIndexStatement { + type: 'create_index_pg'; + tableName: string; + data: Index; + schema: string; +} + +export interface JsonReferenceStatement { + type: 'create_reference' | 'alter_reference' | 'delete_reference'; + data: string; + schema: string; + tableName: string; + // fromTable: string; + // fromColumns: string[]; + // toTable: string; + // toColumns: string[]; + // foreignKeyName: string; + // onDelete?: string; + // onUpdate?: string; +} + +export interface JsonCreateUniqueConstraint { + type: 'create_unique_constraint'; + tableName: string; + data: string; + schema?: string; + constraintName?: string; +} + +export interface JsonDeleteUniqueConstraint { + type: 'delete_unique_constraint'; + tableName: string; + data: string; + schema?: string; + constraintName?: string; +} + +export interface JsonAlterUniqueConstraint { + type: 'alter_unique_constraint'; + tableName: string; + old: string; + new: string; + schema?: string; + oldConstraintName?: string; + newConstraintName?: string; +} + +export interface JsonCreateCompositePK { + type: 'create_composite_pk'; + tableName: string; + data: string; + schema?: string; + constraintName?: string; +} + +export interface JsonDeleteCompositePK { + type: 'delete_composite_pk'; + tableName: string; + data: string; + schema?: string; + constraintName?: string; +} + +export interface JsonAlterCompositePK { + type: 'alter_composite_pk'; + tableName: string; + old: string; + new: string; + schema?: string; + oldConstraintName?: string; + newConstraintName?: string; +} + +export interface JsonAlterTableSetSchema { + type: 'alter_table_set_schema'; + tableName: string; + schemaFrom: string; + schemaTo: string; +} + +export interface JsonAlterTableRemoveFromSchema { + type: 'alter_table_remove_from_schema'; + tableName: string; + schema: string; +} + +export interface JsonAlterTableSetNewSchema { + type: 'alter_table_set_new_schema'; + tableName: string; + from: string; + to: string; +} + +export interface JsonCreateReferenceStatement extends JsonReferenceStatement { + type: 'create_reference'; +} + +export interface JsonAlterReferenceStatement extends JsonReferenceStatement { + type: 'alter_reference'; + oldFkey: string; +} + +export interface JsonDeleteReferenceStatement extends JsonReferenceStatement { + type: 'delete_reference'; +} + +export interface JsonDropIndexStatement { + type: 'drop_index'; + tableName: string; + data: string; + schema: string; +} + +export interface JsonRenameColumnStatement { + type: 'alter_table_rename_column'; + tableName: string; + oldColumnName: string; + newColumnName: string; + schema: string; +} + +export interface JsonAlterColumnTypeStatement { + type: 'alter_table_alter_column_set_type'; + tableName: string; + columnName: string; + newDataType: string; + oldDataType: string; + schema: string; + columnDefault: string; + columnOnUpdate: boolean; + columnNotNull: boolean; + columnAutoIncrement: boolean; + columnPk: boolean; + columnGenerated?: { as: string; type: 'stored' | 'virtual' }; +} + +export interface JsonAlterColumnSetPrimaryKeyStatement { + type: 'alter_table_alter_column_set_pk'; + tableName: string; + schema: string; + columnName: string; +} + +export interface JsonAlterColumnDropPrimaryKeyStatement { + type: 'alter_table_alter_column_drop_pk'; + tableName: string; + columnName: string; + schema: string; +} + +export interface JsonAlterColumnSetDefaultStatement { + type: 'alter_table_alter_column_set_default'; + tableName: string; + columnName: string; + newDefaultValue: any; + oldDefaultValue?: any; + schema: string; + newDataType: string; + columnOnUpdate: boolean; + columnNotNull: boolean; + columnAutoIncrement: boolean; + columnPk: boolean; +} + +export interface JsonAlterColumnDropDefaultStatement { + type: 'alter_table_alter_column_drop_default'; + tableName: string; + columnName: string; + schema: string; + newDataType: string; + columnDefault: string; + columnOnUpdate: boolean; + columnNotNull: boolean; + columnAutoIncrement: boolean; + columnPk: boolean; +} + +export interface JsonAlterColumnSetNotNullStatement { + type: 'alter_table_alter_column_set_notnull'; + tableName: string; + columnName: string; + schema: string; + newDataType: string; + columnDefault: string; + columnOnUpdate: boolean; + columnNotNull: boolean; + columnAutoIncrement: boolean; + columnPk: boolean; +} + +export interface JsonAlterColumnDropNotNullStatement { + type: 'alter_table_alter_column_drop_notnull'; + tableName: string; + columnName: string; + schema: string; + newDataType: string; + columnDefault: string; + columnOnUpdate: boolean; + columnNotNull: boolean; + columnAutoIncrement: boolean; + columnPk: boolean; +} + +export interface JsonAlterColumnSetGeneratedStatement { + type: 'alter_table_alter_column_set_generated'; + tableName: string; + columnName: string; + schema: string; + newDataType: string; + columnDefault: string; + columnOnUpdate: boolean; + columnNotNull: boolean; + columnAutoIncrement: boolean; + columnPk: boolean; + columnGenerated?: { as: string; type: 'stored' | 'virtual' }; +} +export interface JsonAlterColumnSetIdentityStatement { + type: 'alter_table_alter_column_set_identity'; + tableName: string; + columnName: string; + schema: string; + identity: string; +} + +export interface JsonAlterColumnDropIdentityStatement { + type: 'alter_table_alter_column_drop_identity'; + tableName: string; + columnName: string; + schema: string; +} + +export interface JsonAlterColumnAlterIdentityStatement { + type: 'alter_table_alter_column_change_identity'; + tableName: string; + columnName: string; + schema: string; + identity: string; + oldIdentity: string; +} + +export interface JsonAlterColumnDropGeneratedStatement { + type: 'alter_table_alter_column_drop_generated'; + tableName: string; + columnName: string; + schema: string; + newDataType: string; + columnDefault: string; + columnOnUpdate: boolean; + columnNotNull: boolean; + columnAutoIncrement: boolean; + columnPk: boolean; + columnGenerated?: { as: string; type: 'stored' | 'virtual' }; + oldColumn?: Column; +} + +export interface JsonAlterColumnAlterGeneratedStatement { + type: 'alter_table_alter_column_alter_generated'; + tableName: string; + columnName: string; + schema: string; + newDataType: string; + columnDefault: string; + columnOnUpdate: boolean; + columnNotNull: boolean; + columnAutoIncrement: boolean; + columnPk: boolean; + columnGenerated?: { as: string; type: 'stored' | 'virtual' }; +} + +export interface JsonAlterColumnSetOnUpdateStatement { + type: 'alter_table_alter_column_set_on_update'; + tableName: string; + columnName: string; + schema: string; + newDataType: string; + columnDefault: string; + columnOnUpdate: boolean; + columnNotNull: boolean; + columnAutoIncrement: boolean; + columnPk: boolean; +} + +export interface JsonAlterColumnDropOnUpdateStatement { + type: 'alter_table_alter_column_drop_on_update'; + tableName: string; + columnName: string; + schema: string; + newDataType: string; + columnDefault: string; + columnOnUpdate: boolean; + columnNotNull: boolean; + columnAutoIncrement: boolean; + columnPk: boolean; +} + +export interface JsonAlterColumnSetAutoincrementStatement { + type: 'alter_table_alter_column_set_autoincrement'; + tableName: string; + columnName: string; + schema: string; + newDataType: string; + columnDefault: string; + columnOnUpdate: boolean; + columnNotNull: boolean; + columnAutoIncrement: boolean; + columnPk: boolean; +} + +export interface JsonAlterColumnDropAutoincrementStatement { + type: 'alter_table_alter_column_drop_autoincrement'; + tableName: string; + columnName: string; + schema: string; + newDataType: string; + columnDefault: string; + columnOnUpdate: boolean; + columnNotNull: boolean; + columnAutoIncrement: boolean; + columnPk: boolean; +} + +export interface JsonCreateSchema { + type: 'create_schema'; + name: string; +} + +export interface JsonDropSchema { + type: 'drop_schema'; + name: string; +} + +export interface JsonRenameSchema { + type: 'rename_schema'; + from: string; + to: string; +} + +export type JsonAlterColumnStatement = + | JsonRenameColumnStatement + | JsonAlterColumnTypeStatement + | JsonAlterColumnSetDefaultStatement + | JsonAlterColumnDropDefaultStatement + | JsonAlterColumnSetNotNullStatement + | JsonAlterColumnDropNotNullStatement + | JsonAlterColumnDropOnUpdateStatement + | JsonAlterColumnSetOnUpdateStatement + | JsonAlterColumnDropAutoincrementStatement + | JsonAlterColumnSetAutoincrementStatement + | JsonAlterColumnSetPrimaryKeyStatement + | JsonAlterColumnDropPrimaryKeyStatement + | JsonAlterColumnSetGeneratedStatement + | JsonAlterColumnDropGeneratedStatement + | JsonAlterColumnAlterGeneratedStatement + | JsonAlterColumnSetIdentityStatement + | JsonAlterColumnAlterIdentityStatement + | JsonAlterColumnDropIdentityStatement; + +export type JsonStatement = + | JsonAlterColumnStatement + | JsonCreateTableStatement + | JsonDropTableStatement + | JsonRenameTableStatement + | JsonCreateEnumStatement + | JsonDropEnumStatement + | JsonMoveEnumStatement + | JsonRenameEnumStatement + | JsonAddValueToEnumStatement + | JsonDropColumnStatement + | JsonAddColumnStatement + | JsonCreateIndexStatement + | JsonCreateReferenceStatement + | JsonAlterReferenceStatement + | JsonDeleteReferenceStatement + | JsonDropIndexStatement + | JsonReferenceStatement + | JsonSqliteCreateTableStatement + | JsonSqliteAddColumnStatement + | JsonCreateCompositePK + | JsonDeleteCompositePK + | JsonAlterCompositePK + | JsonCreateUniqueConstraint + | JsonDeleteUniqueConstraint + | JsonAlterUniqueConstraint + | JsonCreateSchema + | JsonDropSchema + | JsonRenameSchema + | JsonAlterTableSetSchema + | JsonAlterTableRemoveFromSchema + | JsonAlterTableSetNewSchema + | JsonPgCreateIndexStatement + | JsonAlterSequenceStatement + | JsonDropSequenceStatement + | JsonCreateSequenceStatement + | JsonMoveSequenceStatement + | JsonRenameSequenceStatement; + +export const preparePgCreateTableJson = ( + table: Table, + // TODO: remove? + json2: PgSchema, +): JsonCreateTableStatement => { + const { name, schema, columns, compositePrimaryKeys, uniqueConstraints } = table; + const tableKey = `${schema || 'public'}.${name}`; + + // TODO: @AndriiSherman. We need this, will add test cases + const compositePkName = Object.values(compositePrimaryKeys).length > 0 + ? json2.tables[tableKey].compositePrimaryKeys[ + `${PgSquasher.unsquashPK(Object.values(compositePrimaryKeys)[0]).name}` + ].name + : ''; + + return { + type: 'create_table', + tableName: name, + schema, + columns: Object.values(columns), + compositePKs: Object.values(compositePrimaryKeys), + compositePkName: compositePkName, + uniqueConstraints: Object.values(uniqueConstraints), + }; +}; + +export const prepareMySqlCreateTableJson = ( + table: Table, + // TODO: remove? + json2: MySqlSchema, + // we need it to know if some of the indexes(and in future other parts) are expressions or columns + // didn't change mysqlserialaizer, because it will break snapshots and diffs and it's hard to detect + // if previously it was an expression or column + internals: MySqlKitInternals, +): JsonCreateTableStatement => { + const { name, schema, columns, compositePrimaryKeys, uniqueConstraints } = table; + + return { + type: 'create_table', + tableName: name, + schema, + columns: Object.values(columns), + compositePKs: Object.values(compositePrimaryKeys), + compositePkName: Object.values(compositePrimaryKeys).length > 0 + ? json2.tables[name].compositePrimaryKeys[ + MySqlSquasher.unsquashPK(Object.values(compositePrimaryKeys)[0]) + .name + ].name + : '', + uniqueConstraints: Object.values(uniqueConstraints), + internals, + }; +}; + +export const prepareSingleStoreCreateTableJson = ( + table: Table, + // TODO: remove? + json2: SingleStoreSchema, + // we need it to know if some of the indexes(and in future other parts) are expressions or columns + // didn't change singlestoreserialaizer, because it will break snapshots and diffs and it's hard to detect + // if previously it was an expression or column + internals: SingleStoreKitInternals, +): JsonCreateTableStatement => { + const { name, schema, columns, compositePrimaryKeys, uniqueConstraints } = table; + + return { + type: 'create_table', + tableName: name, + schema, + columns: Object.values(columns), + compositePKs: Object.values(compositePrimaryKeys), + compositePkName: Object.values(compositePrimaryKeys).length > 0 + ? json2.tables[name].compositePrimaryKeys[ + SingleStoreSquasher.unsquashPK(Object.values(compositePrimaryKeys)[0]) + .name + ].name + : '', + uniqueConstraints: Object.values(uniqueConstraints), + internals, + }; +}; + +export const prepareSQLiteCreateTable = ( + table: Table, + action?: 'push' | undefined, +): JsonSqliteCreateTableStatement => { + const { name, columns, uniqueConstraints } = table; + + const references: string[] = Object.values(table.foreignKeys); + + const composites: string[][] = Object.values(table.compositePrimaryKeys).map( + (it) => SQLiteSquasher.unsquashPK(it), + ); + + const fks = references.map((it) => + action === 'push' + ? SQLiteSquasher.unsquashPushFK(it) + : SQLiteSquasher.unsquashFK(it) + ); + + return { + type: 'sqlite_create_table', + tableName: name, + columns: Object.values(columns), + referenceData: fks, + compositePKs: composites, + uniqueConstraints: Object.values(uniqueConstraints), + }; +}; + +export const prepareDropTableJson = (table: Table): JsonDropTableStatement => { + return { + type: 'drop_table', + tableName: table.name, + schema: table.schema, + }; +}; + +export const prepareRenameTableJson = ( + tableFrom: Table, + tableTo: Table, +): JsonRenameTableStatement => { + return { + type: 'rename_table', + fromSchema: tableTo.schema, + toSchema: tableTo.schema, + tableNameFrom: tableFrom.name, + tableNameTo: tableTo.name, + }; +}; + +export const prepareCreateEnumJson = ( + name: string, + schema: string, + values: string[], +): JsonCreateEnumStatement => { + return { + type: 'create_type_enum', + name: name, + schema: schema, + values, + }; +}; + +// https://blog.yo1.dog/updating-enum-values-in-postgresql-the-safe-and-easy-way/ +export const prepareAddValuesToEnumJson = ( + name: string, + schema: string, + values: { value: string; before: string }[], +): JsonAddValueToEnumStatement[] => { + return values.map((it) => { + return { + type: 'alter_type_add_value', + name: name, + schema: schema, + value: it.value, + before: it.before, + }; + }); +}; + +export const prepareDropEnumJson = ( + name: string, + schema: string, +): JsonDropEnumStatement => { + return { + type: 'drop_type_enum', + name: name, + schema: schema, + }; +}; + +export const prepareMoveEnumJson = ( + name: string, + schemaFrom: string, + schemaTo: string, +): JsonMoveEnumStatement => { + return { + type: 'move_type_enum', + name: name, + schemaFrom, + schemaTo, + }; +}; + +export const prepareRenameEnumJson = ( + nameFrom: string, + nameTo: string, + schema: string, +): JsonRenameEnumStatement => { + return { + type: 'rename_type_enum', + nameFrom, + nameTo, + schema, + }; +}; + +//////////// + +export const prepareCreateSequenceJson = ( + seq: Sequence, +): JsonCreateSequenceStatement => { + const values = PgSquasher.unsquashSequence(seq.values); + return { + type: 'create_sequence', + name: seq.name, + schema: seq.schema, + values, + }; +}; + +export const prepareAlterSequenceJson = ( + seq: Sequence, +): JsonAlterSequenceStatement[] => { + const values = PgSquasher.unsquashSequence(seq.values); + return [ + { + type: 'alter_sequence', + schema: seq.schema, + name: seq.name, + values, + }, + ]; +}; + +export const prepareDropSequenceJson = ( + name: string, + schema: string, +): JsonDropSequenceStatement => { + return { + type: 'drop_sequence', + name: name, + schema: schema, + }; +}; + +export const prepareMoveSequenceJson = ( + name: string, + schemaFrom: string, + schemaTo: string, +): JsonMoveSequenceStatement => { + return { + type: 'move_sequence', + name: name, + schemaFrom, + schemaTo, + }; +}; + +export const prepareRenameSequenceJson = ( + nameFrom: string, + nameTo: string, + schema: string, +): JsonRenameSequenceStatement => { + return { + type: 'rename_sequence', + nameFrom, + nameTo, + schema, + }; +}; + +//////////// + +export const prepareCreateSchemasJson = ( + values: string[], +): JsonCreateSchema[] => { + return values.map((it) => { + return { + type: 'create_schema', + name: it, + } as JsonCreateSchema; + }); +}; + +export const prepareRenameSchemasJson = ( + values: { from: string; to: string }[], +): JsonRenameSchema[] => { + return values.map((it) => { + return { + type: 'rename_schema', + from: it.from, + to: it.to, + } as JsonRenameSchema; + }); +}; + +export const prepareDeleteSchemasJson = ( + values: string[], +): JsonDropSchema[] => { + return values.map((it) => { + return { + type: 'drop_schema', + name: it, + } as JsonDropSchema; + }); +}; + +export const prepareRenameColumns = ( + tableName: string, + // TODO: split for pg and mysql+sqlite and singlestore without schema + schema: string, + pairs: { from: Column; to: Column }[], +): JsonRenameColumnStatement[] => { + return pairs.map((it) => { + return { + type: 'alter_table_rename_column', + tableName: tableName, + oldColumnName: it.from.name, + newColumnName: it.to.name, + schema, + }; + }); +}; + +export const _prepareDropColumns = ( + taleName: string, + schema: string, + columns: Column[], +): JsonDropColumnStatement[] => { + return columns.map((it) => { + return { + type: 'alter_table_drop_column', + tableName: taleName, + columnName: it.name, + schema, + }; + }); +}; + +export const _prepareAddColumns = ( + tableName: string, + schema: string, + columns: Column[], +): JsonAddColumnStatement[] => { + return columns.map((it) => { + return { + type: 'alter_table_add_column', + tableName: tableName, + column: it, + schema, + }; + }); +}; + +export const _prepareSqliteAddColumns = ( + tableName: string, + columns: Column[], + referenceData: string[], +): JsonSqliteAddColumnStatement[] => { + const unsquashed = referenceData.map((addedFkValue) => SQLiteSquasher.unsquashFK(addedFkValue)); + + return columns + .map((it) => { + const columnsWithReference = unsquashed.find((t) => t.columnsFrom.includes(it.name)); + + if (it.generated?.type === 'stored') { + warning( + `As SQLite docs mention: "It is not possible to ALTER TABLE ADD COLUMN a STORED column. One can add a VIRTUAL column, however", source: "https://www.sqlite.org/gencol.html"`, + ); + return undefined; + } + + return { + type: 'sqlite_alter_table_add_column', + tableName: tableName, + column: it, + referenceData: columnsWithReference + ? SQLiteSquasher.squashFK(columnsWithReference) + : undefined, + }; + }) + .filter(Boolean) as JsonSqliteAddColumnStatement[]; +}; + +export const prepareAlterColumnsMysql = ( + tableName: string, + schema: string, + columns: AlteredColumn[], + // TODO: remove? + json1: CommonSquashedSchema, + json2: CommonSquashedSchema, + action?: 'push' | undefined, +): JsonAlterColumnStatement[] => { + let statements: JsonAlterColumnStatement[] = []; + let dropPkStatements: JsonAlterColumnDropPrimaryKeyStatement[] = []; + let setPkStatements: JsonAlterColumnSetPrimaryKeyStatement[] = []; + + for (const column of columns) { + const columnName = typeof column.name !== 'string' ? column.name.new : column.name; + + const table = json2.tables[tableName]; + const snapshotColumn = table.columns[columnName]; + + const columnType = snapshotColumn.type; + const columnDefault = snapshotColumn.default; + const columnOnUpdate = 'onUpdate' in snapshotColumn ? snapshotColumn.onUpdate : undefined; + const columnNotNull = table.columns[columnName].notNull; + + const columnAutoIncrement = 'autoincrement' in snapshotColumn + ? snapshotColumn.autoincrement ?? false + : false; + + const columnPk = table.columns[columnName].primaryKey; + + if (column.autoincrement?.type === 'added') { + statements.push({ + type: 'alter_table_alter_column_set_autoincrement', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if (column.autoincrement?.type === 'changed') { + const type = column.autoincrement.new + ? 'alter_table_alter_column_set_autoincrement' + : 'alter_table_alter_column_drop_autoincrement'; + + statements.push({ + type, + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if (column.autoincrement?.type === 'deleted') { + statements.push({ + type: 'alter_table_alter_column_drop_autoincrement', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + } + + for (const column of columns) { + const columnName = typeof column.name !== 'string' ? column.name.new : column.name; + + // I used any, because those fields are available only for mysql dialect + // For other dialects it will become undefined, that is fine for json statements + const columnType = json2.tables[tableName].columns[columnName].type; + const columnDefault = json2.tables[tableName].columns[columnName].default; + const columnGenerated = json2.tables[tableName].columns[columnName].generated; + const columnOnUpdate = (json2.tables[tableName].columns[columnName] as any) + .onUpdate; + const columnNotNull = json2.tables[tableName].columns[columnName].notNull; + const columnAutoIncrement = ( + json2.tables[tableName].columns[columnName] as any + ).autoincrement; + const columnPk = (json2.tables[tableName].columns[columnName] as any) + .primaryKey; + + const compositePk = json2.tables[tableName].compositePrimaryKeys[ + `${tableName}_${columnName}` + ]; + + if (typeof column.name !== 'string') { + statements.push({ + type: 'alter_table_rename_column', + tableName, + oldColumnName: column.name.old, + newColumnName: column.name.new, + schema, + }); + } + + if (column.type?.type === 'changed') { + statements.push({ + type: 'alter_table_alter_column_set_type', + tableName, + columnName, + newDataType: column.type.new, + oldDataType: column.type.old, + schema, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + columnGenerated, + }); + } + + if ( + column.primaryKey?.type === 'deleted' + || (column.primaryKey?.type === 'changed' + && !column.primaryKey.new + && typeof compositePk === 'undefined') + ) { + dropPkStatements.push({ + //// + type: 'alter_table_alter_column_drop_pk', + tableName, + columnName, + schema, + }); + } + + if (column.default?.type === 'added') { + statements.push({ + type: 'alter_table_alter_column_set_default', + tableName, + columnName, + newDefaultValue: column.default.value, + schema, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + newDataType: columnType, + columnPk, + }); + } + + if (column.default?.type === 'changed') { + statements.push({ + type: 'alter_table_alter_column_set_default', + tableName, + columnName, + newDefaultValue: column.default.new, + oldDefaultValue: column.default.old, + schema, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + newDataType: columnType, + columnPk, + }); + } + + if (column.default?.type === 'deleted') { + statements.push({ + type: 'alter_table_alter_column_drop_default', + tableName, + columnName, + schema, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + newDataType: columnType, + columnPk, + }); + } + + if (column.notNull?.type === 'added') { + statements.push({ + type: 'alter_table_alter_column_set_notnull', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if (column.notNull?.type === 'changed') { + const type = column.notNull.new + ? 'alter_table_alter_column_set_notnull' + : 'alter_table_alter_column_drop_notnull'; + statements.push({ + type: type, + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if (column.notNull?.type === 'deleted') { + statements.push({ + type: 'alter_table_alter_column_drop_notnull', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if (column.generated?.type === 'added') { + if (columnGenerated?.type === 'virtual') { + warning( + `You are trying to add virtual generated constraint to ${ + chalk.blue( + columnName, + ) + } column. As MySQL docs mention: "Nongenerated columns can be altered to stored but not virtual generated columns". We will drop an existing column and add it with a virtual generated statement. This means that the data previously stored in this column will be wiped, and new data will be generated on each read for this column\n`, + ); + } + statements.push({ + type: 'alter_table_alter_column_set_generated', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + columnGenerated, + }); + } + + if (column.generated?.type === 'changed' && action !== 'push') { + statements.push({ + type: 'alter_table_alter_column_alter_generated', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + columnGenerated, + }); + } + + if (column.generated?.type === 'deleted') { + if (columnGenerated?.type === 'virtual') { + warning( + `You are trying to remove virtual generated constraint from ${ + chalk.blue( + columnName, + ) + } column. As MySQL docs mention: "Stored but not virtual generated columns can be altered to nongenerated columns. The stored generated values become the values of the nongenerated column". We will drop an existing column and add it without a virtual generated statement. This means that this column will have no data after migration\n`, + ); + } + statements.push({ + type: 'alter_table_alter_column_drop_generated', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + columnGenerated, + oldColumn: json1.tables[tableName].columns[columnName], + }); + } + + if ( + column.primaryKey?.type === 'added' + || (column.primaryKey?.type === 'changed' && column.primaryKey.new) + ) { + const wasAutoincrement = statements.filter( + (it) => it.type === 'alter_table_alter_column_set_autoincrement', + ); + if (wasAutoincrement.length === 0) { + setPkStatements.push({ + type: 'alter_table_alter_column_set_pk', + tableName, + schema, + columnName, + }); + } + } + + if (column.onUpdate?.type === 'added') { + statements.push({ + type: 'alter_table_alter_column_set_on_update', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if (column.onUpdate?.type === 'deleted') { + statements.push({ + type: 'alter_table_alter_column_drop_on_update', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + } + + return [...dropPkStatements, ...setPkStatements, ...statements]; +}; + +export const prepareAlterColumnsSingleStore = ( + tableName: string, + schema: string, + columns: AlteredColumn[], + // TODO: remove? + json1: CommonSquashedSchema, + json2: CommonSquashedSchema, + action?: 'push' | undefined, +): JsonAlterColumnStatement[] => { + let statements: JsonAlterColumnStatement[] = []; + let dropPkStatements: JsonAlterColumnDropPrimaryKeyStatement[] = []; + let setPkStatements: JsonAlterColumnSetPrimaryKeyStatement[] = []; + + for (const column of columns) { + const columnName = typeof column.name !== 'string' ? column.name.new : column.name; + + const table = json2.tables[tableName]; + const snapshotColumn = table.columns[columnName]; + + const columnType = snapshotColumn.type; + const columnDefault = snapshotColumn.default; + const columnOnUpdate = 'onUpdate' in snapshotColumn ? snapshotColumn.onUpdate : undefined; + const columnNotNull = table.columns[columnName].notNull; + + const columnAutoIncrement = 'autoincrement' in snapshotColumn + ? snapshotColumn.autoincrement ?? false + : false; + + const columnPk = table.columns[columnName].primaryKey; + + if (column.autoincrement?.type === 'added') { + statements.push({ + type: 'alter_table_alter_column_set_autoincrement', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if (column.autoincrement?.type === 'changed') { + const type = column.autoincrement.new + ? 'alter_table_alter_column_set_autoincrement' + : 'alter_table_alter_column_drop_autoincrement'; + + statements.push({ + type, + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if (column.autoincrement?.type === 'deleted') { + statements.push({ + type: 'alter_table_alter_column_drop_autoincrement', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + } + + for (const column of columns) { + const columnName = typeof column.name !== 'string' ? column.name.new : column.name; + + // I used any, because those fields are available only for mysql and singlestore dialect + // For other dialects it will become undefined, that is fine for json statements + const columnType = json2.tables[tableName].columns[columnName].type; + const columnDefault = json2.tables[tableName].columns[columnName].default; + const columnGenerated = json2.tables[tableName].columns[columnName].generated; + const columnOnUpdate = (json2.tables[tableName].columns[columnName] as any) + .onUpdate; + const columnNotNull = json2.tables[tableName].columns[columnName].notNull; + const columnAutoIncrement = ( + json2.tables[tableName].columns[columnName] as any + ).autoincrement; + const columnPk = (json2.tables[tableName].columns[columnName] as any) + .primaryKey; + + const compositePk = json2.tables[tableName].compositePrimaryKeys[ + `${tableName}_${columnName}` + ]; + + if (typeof column.name !== 'string') { + statements.push({ + type: 'alter_table_rename_column', + tableName, + oldColumnName: column.name.old, + newColumnName: column.name.new, + schema, + }); + } + + if (column.type?.type === 'changed') { + statements.push({ + type: 'alter_table_alter_column_set_type', + tableName, + columnName, + newDataType: column.type.new, + oldDataType: column.type.old, + schema, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + columnGenerated, + }); + } + + if ( + column.primaryKey?.type === 'deleted' + || (column.primaryKey?.type === 'changed' + && !column.primaryKey.new + && typeof compositePk === 'undefined') + ) { + dropPkStatements.push({ + //// + type: 'alter_table_alter_column_drop_pk', + tableName, + columnName, + schema, + }); + } + + if (column.default?.type === 'added') { + statements.push({ + type: 'alter_table_alter_column_set_default', + tableName, + columnName, + newDefaultValue: column.default.value, + schema, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + newDataType: columnType, + columnPk, + }); + } + + if (column.default?.type === 'changed') { + statements.push({ + type: 'alter_table_alter_column_set_default', + tableName, + columnName, + newDefaultValue: column.default.new, + oldDefaultValue: column.default.old, + schema, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + newDataType: columnType, + columnPk, + }); + } + + if (column.default?.type === 'deleted') { + statements.push({ + type: 'alter_table_alter_column_drop_default', + tableName, + columnName, + schema, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + newDataType: columnType, + columnPk, + }); + } + + if (column.notNull?.type === 'added') { + statements.push({ + type: 'alter_table_alter_column_set_notnull', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if (column.notNull?.type === 'changed') { + const type = column.notNull.new + ? 'alter_table_alter_column_set_notnull' + : 'alter_table_alter_column_drop_notnull'; + statements.push({ + type: type, + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if (column.notNull?.type === 'deleted') { + statements.push({ + type: 'alter_table_alter_column_drop_notnull', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if (column.generated?.type === 'added') { + if (columnGenerated?.type === 'virtual') { + // TODO: Change warning message according to SingleStore docs + warning( + `You are trying to add virtual generated constraint to ${ + chalk.blue( + columnName, + ) + } column. As MySQL docs mention: "Nongenerated columns can be altered to stored but not virtual generated columns". We will drop an existing column and add it with a virtual generated statement. This means that the data previously stored in this column will be wiped, and new data will be generated on each read for this column\n`, + ); + } + statements.push({ + type: 'alter_table_alter_column_set_generated', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + columnGenerated, + }); + } + + if (column.generated?.type === 'changed' && action !== 'push') { + statements.push({ + type: 'alter_table_alter_column_alter_generated', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + columnGenerated, + }); + } + + if (column.generated?.type === 'deleted') { + if (columnGenerated?.type === 'virtual') { + // TODO: Change warning message according to SingleStore docs + warning( + `You are trying to remove virtual generated constraint from ${ + chalk.blue( + columnName, + ) + } column. As MySQL docs mention: "Stored but not virtual generated columns can be altered to nongenerated columns. The stored generated values become the values of the nongenerated column". We will drop an existing column and add it without a virtual generated statement. This means that this column will have no data after migration\n`, + ); + } + statements.push({ + type: 'alter_table_alter_column_drop_generated', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + columnGenerated, + oldColumn: json1.tables[tableName].columns[columnName], + }); + } + + if ( + column.primaryKey?.type === 'added' + || (column.primaryKey?.type === 'changed' && column.primaryKey.new) + ) { + const wasAutoincrement = statements.filter( + (it) => it.type === 'alter_table_alter_column_set_autoincrement', + ); + if (wasAutoincrement.length === 0) { + setPkStatements.push({ + type: 'alter_table_alter_column_set_pk', + tableName, + schema, + columnName, + }); + } + } + + if (column.onUpdate?.type === 'added') { + statements.push({ + type: 'alter_table_alter_column_set_on_update', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if (column.onUpdate?.type === 'deleted') { + statements.push({ + type: 'alter_table_alter_column_drop_on_update', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + } + + return [...dropPkStatements, ...setPkStatements, ...statements]; +}; + +export const preparePgAlterColumns = ( + _tableName: string, + schema: string, + columns: AlteredColumn[], + // TODO: remove? + json2: CommonSquashedSchema, + action?: 'push' | undefined, +): JsonAlterColumnStatement[] => { + const tableKey = `${schema || 'public'}.${_tableName}`; + let statements: JsonAlterColumnStatement[] = []; + let dropPkStatements: JsonAlterColumnDropPrimaryKeyStatement[] = []; + let setPkStatements: JsonAlterColumnSetPrimaryKeyStatement[] = []; + + for (const column of columns) { + const columnName = typeof column.name !== 'string' ? column.name.new : column.name; + + const tableName = json2.tables[tableKey].name; + + // I used any, because those fields are available only for mysql dialect + // For other dialects it will become undefined, that is fine for json statements + const columnType = json2.tables[tableKey].columns[columnName].type; + const columnDefault = json2.tables[tableKey].columns[columnName].default; + const columnGenerated = json2.tables[tableKey].columns[columnName].generated; + const columnOnUpdate = (json2.tables[tableKey].columns[columnName] as any) + .onUpdate; + const columnNotNull = json2.tables[tableKey].columns[columnName].notNull; + const columnAutoIncrement = ( + json2.tables[tableKey].columns[columnName] as any + ).autoincrement; + const columnPk = (json2.tables[tableKey].columns[columnName] as any) + .primaryKey; + + const compositePk = json2.tables[tableKey].compositePrimaryKeys[`${tableName}_${columnName}`]; + + if (typeof column.name !== 'string') { + statements.push({ + type: 'alter_table_rename_column', + tableName, + oldColumnName: column.name.old, + newColumnName: column.name.new, + schema, + }); + } + + if (column.type?.type === 'changed') { + statements.push({ + type: 'alter_table_alter_column_set_type', + tableName, + columnName, + newDataType: column.type.new, + oldDataType: column.type.old, + schema, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if ( + column.primaryKey?.type === 'deleted' + || (column.primaryKey?.type === 'changed' + && !column.primaryKey.new + && typeof compositePk === 'undefined') + ) { + dropPkStatements.push({ + //// + type: 'alter_table_alter_column_drop_pk', + tableName, + columnName, + schema, + }); + } + + if (column.default?.type === 'added') { + statements.push({ + type: 'alter_table_alter_column_set_default', + tableName, + columnName, + newDefaultValue: column.default.value, + schema, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + newDataType: columnType, + columnPk, + }); + } + + if (column.default?.type === 'changed') { + statements.push({ + type: 'alter_table_alter_column_set_default', + tableName, + columnName, + newDefaultValue: column.default.new, + oldDefaultValue: column.default.old, + schema, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + newDataType: columnType, + columnPk, + }); + } + + if (column.default?.type === 'deleted') { + statements.push({ + type: 'alter_table_alter_column_drop_default', + tableName, + columnName, + schema, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + newDataType: columnType, + columnPk, + }); + } + + if (column.notNull?.type === 'added') { + statements.push({ + type: 'alter_table_alter_column_set_notnull', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if (column.notNull?.type === 'changed') { + const type = column.notNull.new + ? 'alter_table_alter_column_set_notnull' + : 'alter_table_alter_column_drop_notnull'; + statements.push({ + type: type, + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if (column.notNull?.type === 'deleted') { + statements.push({ + type: 'alter_table_alter_column_drop_notnull', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if (column.identity?.type === 'added') { + statements.push({ + type: 'alter_table_alter_column_set_identity', + tableName, + columnName, + schema, + identity: column.identity.value, + }); + } + + if (column.identity?.type === 'changed') { + statements.push({ + type: 'alter_table_alter_column_change_identity', + tableName, + columnName, + schema, + identity: column.identity.new, + oldIdentity: column.identity.old, + }); + } + + if (column.identity?.type === 'deleted') { + statements.push({ + type: 'alter_table_alter_column_drop_identity', + tableName, + columnName, + schema, + }); + } + + if (column.generated?.type === 'added') { + statements.push({ + type: 'alter_table_alter_column_set_generated', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + columnGenerated, + }); + } + + if (column.generated?.type === 'changed' && action !== 'push') { + statements.push({ + type: 'alter_table_alter_column_alter_generated', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + columnGenerated, + }); + } + + if (column.generated?.type === 'deleted') { + statements.push({ + type: 'alter_table_alter_column_drop_generated', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + columnGenerated, + }); + } + + if ( + column.primaryKey?.type === 'added' + || (column.primaryKey?.type === 'changed' && column.primaryKey.new) + ) { + const wasAutoincrement = statements.filter( + (it) => it.type === 'alter_table_alter_column_set_autoincrement', + ); + if (wasAutoincrement.length === 0) { + setPkStatements.push({ + type: 'alter_table_alter_column_set_pk', + tableName, + schema, + columnName, + }); + } + } + + // if (column.primaryKey?.type === "added") { + // statements.push({ + // type: "alter_table_alter_column_set_primarykey", + // tableName, + // columnName, + // schema, + // newDataType: columnType, + // columnDefault, + // columnOnUpdate, + // columnNotNull, + // columnAutoIncrement, + // }); + // } + + // if (column.primaryKey?.type === "changed") { + // const type = column.primaryKey.new + // ? "alter_table_alter_column_set_primarykey" + // : "alter_table_alter_column_drop_primarykey"; + + // statements.push({ + // type, + // tableName, + // columnName, + // schema, + // newDataType: columnType, + // columnDefault, + // columnOnUpdate, + // columnNotNull, + // columnAutoIncrement, + // }); + // } + + // if (column.primaryKey?.type === "deleted") { + // statements.push({ + // type: "alter_table_alter_column_drop_primarykey", + // tableName, + // columnName, + // schema, + // newDataType: columnType, + // columnDefault, + // columnOnUpdate, + // columnNotNull, + // columnAutoIncrement, + // }); + // } + + if (column.onUpdate?.type === 'added') { + statements.push({ + type: 'alter_table_alter_column_set_on_update', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if (column.onUpdate?.type === 'deleted') { + statements.push({ + type: 'alter_table_alter_column_drop_on_update', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + } + + return [...dropPkStatements, ...setPkStatements, ...statements]; +}; + +export const prepareSqliteAlterColumns = ( + tableName: string, + schema: string, + columns: AlteredColumn[], + // TODO: remove? + json2: CommonSquashedSchema, +): JsonAlterColumnStatement[] => { + let statements: JsonAlterColumnStatement[] = []; + let dropPkStatements: JsonAlterColumnDropPrimaryKeyStatement[] = []; + let setPkStatements: JsonAlterColumnSetPrimaryKeyStatement[] = []; + + for (const column of columns) { + const columnName = typeof column.name !== 'string' ? column.name.new : column.name; + + // I used any, because those fields are available only for mysql dialect + // For other dialects it will become undefined, that is fine for json statements + const columnType = json2.tables[tableName].columns[columnName].type; + const columnDefault = json2.tables[tableName].columns[columnName].default; + const columnOnUpdate = (json2.tables[tableName].columns[columnName] as any) + .onUpdate; + const columnNotNull = json2.tables[tableName].columns[columnName].notNull; + const columnAutoIncrement = ( + json2.tables[tableName].columns[columnName] as any + ).autoincrement; + const columnPk = (json2.tables[tableName].columns[columnName] as any) + .primaryKey; + + const columnGenerated = json2.tables[tableName].columns[columnName].generated; + + const compositePk = json2.tables[tableName].compositePrimaryKeys[ + `${tableName}_${columnName}` + ]; + + if (typeof column.name !== 'string') { + statements.push({ + type: 'alter_table_rename_column', + tableName, + oldColumnName: column.name.old, + newColumnName: column.name.new, + schema, + }); + } + + if (column.type?.type === 'changed') { + statements.push({ + type: 'alter_table_alter_column_set_type', + tableName, + columnName, + newDataType: column.type.new, + oldDataType: column.type.old, + schema, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if ( + column.primaryKey?.type === 'deleted' + || (column.primaryKey?.type === 'changed' + && !column.primaryKey.new + && typeof compositePk === 'undefined') + ) { + dropPkStatements.push({ + //// + type: 'alter_table_alter_column_drop_pk', + tableName, + columnName, + schema, + }); + } + + if (column.default?.type === 'added') { + statements.push({ + type: 'alter_table_alter_column_set_default', + tableName, + columnName, + newDefaultValue: column.default.value, + schema, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + newDataType: columnType, + columnPk, + }); + } + + if (column.default?.type === 'changed') { + statements.push({ + type: 'alter_table_alter_column_set_default', + tableName, + columnName, + newDefaultValue: column.default.new, + oldDefaultValue: column.default.old, + schema, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + newDataType: columnType, + columnPk, + }); + } + + if (column.default?.type === 'deleted') { + statements.push({ + type: 'alter_table_alter_column_drop_default', + tableName, + columnName, + schema, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + newDataType: columnType, + columnPk, + }); + } + + if (column.notNull?.type === 'added') { + statements.push({ + type: 'alter_table_alter_column_set_notnull', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if (column.notNull?.type === 'changed') { + const type = column.notNull.new + ? 'alter_table_alter_column_set_notnull' + : 'alter_table_alter_column_drop_notnull'; + statements.push({ + type: type, + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if (column.notNull?.type === 'deleted') { + statements.push({ + type: 'alter_table_alter_column_drop_notnull', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if (column.generated?.type === 'added') { + if (columnGenerated?.type === 'virtual') { + statements.push({ + type: 'alter_table_alter_column_set_generated', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + columnGenerated, + }); + } else { + warning( + `As SQLite docs mention: "It is not possible to ALTER TABLE ADD COLUMN a STORED column. One can add a VIRTUAL column, however", source: "https://www.sqlite.org/gencol.html"`, + ); + } + } + + if (column.generated?.type === 'changed') { + if (columnGenerated?.type === 'virtual') { + statements.push({ + type: 'alter_table_alter_column_alter_generated', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + columnGenerated, + }); + } else { + warning( + `As SQLite docs mention: "It is not possible to ALTER TABLE ADD COLUMN a STORED column. One can add a VIRTUAL column, however", source: "https://www.sqlite.org/gencol.html"`, + ); + } + } + + if (column.generated?.type === 'deleted') { + statements.push({ + type: 'alter_table_alter_column_drop_generated', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + columnGenerated, + }); + } + + if ( + column.primaryKey?.type === 'added' + || (column.primaryKey?.type === 'changed' && column.primaryKey.new) + ) { + const wasAutoincrement = statements.filter( + (it) => it.type === 'alter_table_alter_column_set_autoincrement', + ); + if (wasAutoincrement.length === 0) { + setPkStatements.push({ + type: 'alter_table_alter_column_set_pk', + tableName, + schema, + columnName, + }); + } + } + + if (column.onUpdate?.type === 'added') { + statements.push({ + type: 'alter_table_alter_column_set_on_update', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + + if (column.onUpdate?.type === 'deleted') { + statements.push({ + type: 'alter_table_alter_column_drop_on_update', + tableName, + columnName, + schema, + newDataType: columnType, + columnDefault, + columnOnUpdate, + columnNotNull, + columnAutoIncrement, + columnPk, + }); + } + } + + return [...dropPkStatements, ...setPkStatements, ...statements]; +}; + +export const preparePgCreateIndexesJson = ( + tableName: string, + schema: string, + indexes: Record, + fullSchema: PgSchema, + action?: 'push' | undefined, +): JsonPgCreateIndexStatement[] => { + if (action === 'push') { + return Object.values(indexes).map((indexData) => { + const unsquashedIndex = PgSquasher.unsquashIdxPush(indexData); + const data = fullSchema.tables[`${schema === '' ? 'public' : schema}.${tableName}`] + .indexes[unsquashedIndex.name]; + return { + type: 'create_index_pg', + tableName, + data, + schema, + }; + }); + } + return Object.values(indexes).map((indexData) => { + return { + type: 'create_index_pg', + tableName, + data: PgSquasher.unsquashIdx(indexData), + schema, + }; + }); +}; + +export const prepareCreateIndexesJson = ( + tableName: string, + schema: string, + indexes: Record, + internal?: MySqlKitInternals | SQLiteKitInternals, +): JsonCreateIndexStatement[] => { + return Object.values(indexes).map((indexData) => { + return { + type: 'create_index', + tableName, + data: indexData, + schema, + internal, + }; + }); +}; + +export const prepareCreateReferencesJson = ( + tableName: string, + schema: string, + foreignKeys: Record, +): JsonCreateReferenceStatement[] => { + return Object.values(foreignKeys).map((fkData) => { + return { + type: 'create_reference', + tableName, + data: fkData, + schema, + }; + }); +}; + +export const prepareDropReferencesJson = ( + tableName: string, + schema: string, + foreignKeys: Record, +): JsonDeleteReferenceStatement[] => { + return Object.values(foreignKeys).map((fkData) => { + return { + type: 'delete_reference', + tableName, + data: fkData, + schema, + }; + }); +}; + +// alter should create 2 statements. It's important to make only 1 sql per statement(for breakpoints) +export const prepareAlterReferencesJson = ( + tableName: string, + schema: string, + foreignKeys: Record, +): JsonReferenceStatement[] => { + const stmts: JsonReferenceStatement[] = []; + Object.values(foreignKeys).map((val) => { + stmts.push({ + type: 'delete_reference', + tableName, + schema, + data: val.__old, + }); + + stmts.push({ + type: 'create_reference', + tableName, + schema, + data: val.__new, + }); + }); + return stmts; +}; + +export const prepareDropIndexesJson = ( + tableName: string, + schema: string, + indexes: Record, +): JsonDropIndexStatement[] => { + return Object.values(indexes).map((indexData) => { + return { + type: 'drop_index', + tableName, + data: indexData, + schema, + }; + }); +}; + +export const prepareAddCompositePrimaryKeySqlite = ( + tableName: string, + pks: Record, +): JsonCreateCompositePK[] => { + return Object.values(pks).map((it) => { + return { + type: 'create_composite_pk', + tableName, + data: it, + } as JsonCreateCompositePK; + }); +}; + +export const prepareDeleteCompositePrimaryKeySqlite = ( + tableName: string, + pks: Record, +): JsonDeleteCompositePK[] => { + return Object.values(pks).map((it) => { + return { + type: 'delete_composite_pk', + tableName, + data: it, + } as JsonDeleteCompositePK; + }); +}; + +export const prepareAlterCompositePrimaryKeySqlite = ( + tableName: string, + pks: Record, +): JsonAlterCompositePK[] => { + return Object.values(pks).map((it) => { + return { + type: 'alter_composite_pk', + tableName, + old: it.__old, + new: it.__new, + } as JsonAlterCompositePK; + }); +}; + +export const prepareAddCompositePrimaryKeyPg = ( + tableName: string, + schema: string, + pks: Record, + // TODO: remove? + json2: PgSchema, +): JsonCreateCompositePK[] => { + return Object.values(pks).map((it) => { + const unsquashed = PgSquasher.unsquashPK(it); + return { + type: 'create_composite_pk', + tableName, + data: it, + schema, + constraintName: json2.tables[`${schema || 'public'}.${tableName}`].compositePrimaryKeys[ + unsquashed.name + ].name, + } as JsonCreateCompositePK; + }); +}; + +export const prepareDeleteCompositePrimaryKeyPg = ( + tableName: string, + schema: string, + pks: Record, + // TODO: remove? + json1: PgSchema, +): JsonDeleteCompositePK[] => { + return Object.values(pks).map((it) => { + return { + type: 'delete_composite_pk', + tableName, + data: it, + schema, + constraintName: json1.tables[`${schema || 'public'}.${tableName}`].compositePrimaryKeys[ + PgSquasher.unsquashPK(it).name + ].name, + } as JsonDeleteCompositePK; + }); +}; + +export const prepareAlterCompositePrimaryKeyPg = ( + tableName: string, + schema: string, + pks: Record, + // TODO: remove? + json1: PgSchema, + json2: PgSchema, +): JsonAlterCompositePK[] => { + return Object.values(pks).map((it) => { + return { + type: 'alter_composite_pk', + tableName, + old: it.__old, + new: it.__new, + schema, + oldConstraintName: json1.tables[`${schema || 'public'}.${tableName}`].compositePrimaryKeys[ + PgSquasher.unsquashPK(it.__old).name + ].name, + newConstraintName: json2.tables[`${schema || 'public'}.${tableName}`].compositePrimaryKeys[ + PgSquasher.unsquashPK(it.__new).name + ].name, + } as JsonAlterCompositePK; + }); +}; + +export const prepareAddUniqueConstraintPg = ( + tableName: string, + schema: string, + unqs: Record, +): JsonCreateUniqueConstraint[] => { + return Object.values(unqs).map((it) => { + return { + type: 'create_unique_constraint', + tableName, + data: it, + schema, + } as JsonCreateUniqueConstraint; + }); +}; + +export const prepareDeleteUniqueConstraintPg = ( + tableName: string, + schema: string, + unqs: Record, +): JsonDeleteUniqueConstraint[] => { + return Object.values(unqs).map((it) => { + return { + type: 'delete_unique_constraint', + tableName, + data: it, + schema, + } as JsonDeleteUniqueConstraint; + }); +}; + +// add create table changes +// add handler to make drop and add and not alter(looking at __old and __new) +// add serializer for mysql and sqlite + types +// add introspect serializer for pg+sqlite+mysql +// add introspect actual code +// add push sqlite handler +// add push mysql warning if data exists and may have unique conflict +// add release notes +// add docs changes + +export const prepareAlterUniqueConstraintPg = ( + tableName: string, + schema: string, + unqs: Record, +): JsonAlterUniqueConstraint[] => { + return Object.values(unqs).map((it) => { + return { + type: 'alter_unique_constraint', + tableName, + old: it.__old, + new: it.__new, + schema, + } as JsonAlterUniqueConstraint; + }); +}; + +export const prepareAddCompositePrimaryKeyMySql = ( + tableName: string, + pks: Record, + // TODO: remove? + json1: MySqlSchema, + json2: MySqlSchema, +): JsonCreateCompositePK[] => { + const res: JsonCreateCompositePK[] = []; + for (const it of Object.values(pks)) { + const unsquashed = MySqlSquasher.unsquashPK(it); + + if ( + unsquashed.columns.length === 1 + && json1.tables[tableName]?.columns[unsquashed.columns[0]]?.primaryKey + ) { + continue; + } + + res.push({ + type: 'create_composite_pk', + tableName, + data: it, + constraintName: json2.tables[tableName].compositePrimaryKeys[unsquashed.name].name, + } as JsonCreateCompositePK); + } + return res; +}; + +export const prepareDeleteCompositePrimaryKeyMySql = ( + tableName: string, + pks: Record, + // TODO: remove? + json1: MySqlSchema, +): JsonDeleteCompositePK[] => { + return Object.values(pks).map((it) => { + return { + type: 'delete_composite_pk', + tableName, + data: it, + constraintName: json1.tables[tableName].compositePrimaryKeys[ + MySqlSquasher.unsquashPK(it).name + ].name, + } as JsonDeleteCompositePK; + }); +}; + +export const prepareAlterCompositePrimaryKeyMySql = ( + tableName: string, + pks: Record, + // TODO: remove? + json1: MySqlSchema, + json2: MySqlSchema, +): JsonAlterCompositePK[] => { + return Object.values(pks).map((it) => { + return { + type: 'alter_composite_pk', + tableName, + old: it.__old, + new: it.__new, + oldConstraintName: json1.tables[tableName].compositePrimaryKeys[ + MySqlSquasher.unsquashPK(it.__old).name + ].name, + newConstraintName: json2.tables[tableName].compositePrimaryKeys[ + MySqlSquasher.unsquashPK(it.__new).name + ].name, + } as JsonAlterCompositePK; + }); +}; + +export const prepareAddCompositePrimaryKeySingleStore = ( + tableName: string, + pks: Record, + // TODO: remove? + json1: SingleStoreSchema, + json2: SingleStoreSchema, +): JsonCreateCompositePK[] => { + const res: JsonCreateCompositePK[] = []; + for (const it of Object.values(pks)) { + const unsquashed = SingleStoreSquasher.unsquashPK(it); + + if ( + unsquashed.columns.length === 1 + && json1.tables[tableName]?.columns[unsquashed.columns[0]]?.primaryKey + ) { + continue; + } + + res.push({ + type: 'create_composite_pk', + tableName, + data: it, + constraintName: json2.tables[tableName].compositePrimaryKeys[unsquashed.name].name, + } as JsonCreateCompositePK); + } + return res; +}; + +export const prepareDeleteCompositePrimaryKeySingleStore = ( + tableName: string, + pks: Record, + // TODO: remove? + json1: SingleStoreSchema, +): JsonDeleteCompositePK[] => { + return Object.values(pks).map((it) => { + return { + type: 'delete_composite_pk', + tableName, + data: it, + constraintName: json1.tables[tableName].compositePrimaryKeys[ + SingleStoreSquasher.unsquashPK(it).name + ].name, + } as JsonDeleteCompositePK; + }); +}; + +export const prepareAlterCompositePrimaryKeySingleStore = ( + tableName: string, + pks: Record, + // TODO: remove? + json1: SingleStoreSchema, + json2: SingleStoreSchema, +): JsonAlterCompositePK[] => { + return Object.values(pks).map((it) => { + return { + type: 'alter_composite_pk', + tableName, + old: it.__old, + new: it.__new, + oldConstraintName: json1.tables[tableName].compositePrimaryKeys[ + SingleStoreSquasher.unsquashPK(it.__old).name + ].name, + newConstraintName: json2.tables[tableName].compositePrimaryKeys[ + SingleStoreSquasher.unsquashPK(it.__new).name + ].name, + } as JsonAlterCompositePK; + }); +}; diff --git a/drizzle-kit/src/loader.mjs b/drizzle-kit/src/loader.mjs new file mode 100644 index 000000000..488f5712c --- /dev/null +++ b/drizzle-kit/src/loader.mjs @@ -0,0 +1,57 @@ +import esbuild from 'esbuild'; +import { readFileSync } from 'fs'; +import * as path from 'path'; + +const parse = (it) => { + if (!it) return { drizzle: false }; + + if (it.endsWith('__drizzle__')) { + const offset = it.startsWith('file://') ? 'file://'.length : 0; + const clean = it.slice(offset, -'__drizzle__'.length); + return { drizzle: true, clean, original: it }; + } + return { drizzle: false, clean: it }; +}; + +export function resolve(specifier, context, nextResolve) { + const { drizzle, clean } = parse(specifier); + if (drizzle && !clean.endsWith('.ts') && !clean.endsWith('.mts')) { + return nextResolve(clean); + } + + if (drizzle) { + return { + shortCircuit: true, + url: `file://${specifier}`, + }; + } + + const parsedParent = parse(context.parentURL); + const parentURL = parsedParent.drizzle + ? new URL(`file://${path.resolve(parsedParent.clean)}`) + : context.parentURL; + + // Let Node.js handle all other specifiers. + return nextResolve(specifier, { ...context, parentURL }); +} + +export async function load(url, context, defaultLoad) { + const { drizzle, clean } = parse(url); + if (drizzle) { + const file = readFileSync(clean, 'utf-8'); + if (clean.endsWith('.ts') || clean.endsWith('.mts')) { + const source = esbuild.transformSync(file, { + loader: 'ts', + format: 'esm', + }); + return { + format: 'module', + shortCircuit: true, + source: source.code, + }; + } + } + + // let Node.js handle all other URLs + return defaultLoad(url, context, defaultLoad); +} diff --git a/drizzle-kit/src/migrationPreparator.ts b/drizzle-kit/src/migrationPreparator.ts new file mode 100644 index 000000000..4e5664290 --- /dev/null +++ b/drizzle-kit/src/migrationPreparator.ts @@ -0,0 +1,208 @@ +import { randomUUID } from 'crypto'; +import fs from 'fs'; +import { serializeMySql, serializePg, serializeSingleStore, serializeSQLite } from './serializer'; +import { dryMySql, MySqlSchema, mysqlSchema } from './serializer/mysqlSchema'; +import { dryPg, PgSchema, pgSchema, PgSchemaInternal } from './serializer/pgSchema'; +import { drySingleStore, SingleStoreSchema, singlestoreSchema } from './serializer/singlestoreSchema'; +import { drySQLite, SQLiteSchema, sqliteSchema } from './serializer/sqliteSchema'; + +export const prepareMySqlDbPushSnapshot = async ( + prev: MySqlSchema, + schemaPath: string | string[], +): Promise<{ prev: MySqlSchema; cur: MySqlSchema }> => { + const serialized = await serializeMySql(schemaPath); + + const id = randomUUID(); + const idPrev = prev.id; + + const { version, dialect, ...rest } = serialized; + const result: MySqlSchema = { version, dialect, id, prevId: idPrev, ...rest }; + + return { prev, cur: result }; +}; + +export const prepareSingleStoreDbPushSnapshot = async ( + prev: SingleStoreSchema, + schemaPath: string | string[], +): Promise<{ prev: SingleStoreSchema; cur: SingleStoreSchema }> => { + const serialized = await serializeSingleStore(schemaPath); + + const id = randomUUID(); + const idPrev = prev.id; + + const { version, dialect, ...rest } = serialized; + const result: SingleStoreSchema = { version, dialect, id, prevId: idPrev, ...rest }; + + return { prev, cur: result }; +}; + +export const prepareSQLiteDbPushSnapshot = async ( + prev: SQLiteSchema, + schemaPath: string | string[], +): Promise<{ prev: SQLiteSchema; cur: SQLiteSchema }> => { + const serialized = await serializeSQLite(schemaPath); + + const id = randomUUID(); + const idPrev = prev.id; + + const { version, dialect, ...rest } = serialized; + const result: SQLiteSchema = { + version, + dialect, + id, + prevId: idPrev, + ...rest, + }; + + return { prev, cur: result }; +}; + +export const preparePgDbPushSnapshot = async ( + prev: PgSchema, + schemaPath: string | string[], + schemaFilter: string[] = ['public'], +): Promise<{ prev: PgSchema; cur: PgSchema }> => { + const serialized = await serializePg(schemaPath, schemaFilter); + + const id = randomUUID(); + const idPrev = prev.id; + + const { version, dialect, ...rest } = serialized; + const result: PgSchema = { version, dialect, id, prevId: idPrev, ...rest }; + + return { prev, cur: result }; +}; + +export const prepareMySqlMigrationSnapshot = async ( + migrationFolders: string[], + schemaPath: string | string[], +): Promise<{ prev: MySqlSchema; cur: MySqlSchema; custom: MySqlSchema }> => { + const prevSnapshot = mysqlSchema.parse( + preparePrevSnapshot(migrationFolders, dryMySql), + ); + const serialized = await serializeMySql(schemaPath); + + const id = randomUUID(); + const idPrev = prevSnapshot.id; + + const { version, dialect, ...rest } = serialized; + const result: MySqlSchema = { version, dialect, id, prevId: idPrev, ...rest }; + + const { id: _ignoredId, prevId: _ignoredPrevId, ...prevRest } = prevSnapshot; + + // that's for custom migrations, when we need new IDs, but old snapshot + const custom: MySqlSchema = { + id, + prevId: idPrev, + ...prevRest, + }; + + return { prev: prevSnapshot, cur: result, custom }; +}; + +export const prepareSingleStoreMigrationSnapshot = async ( + migrationFolders: string[], + schemaPath: string | string[], +): Promise<{ prev: SingleStoreSchema; cur: SingleStoreSchema; custom: SingleStoreSchema }> => { + const prevSnapshot = singlestoreSchema.parse( + preparePrevSnapshot(migrationFolders, drySingleStore), + ); + const serialized = await serializeSingleStore(schemaPath); + + const id = randomUUID(); + const idPrev = prevSnapshot.id; + + const { version, dialect, ...rest } = serialized; + const result: SingleStoreSchema = { version, dialect, id, prevId: idPrev, ...rest }; + + const { id: _ignoredId, prevId: _ignoredPrevId, ...prevRest } = prevSnapshot; + + // that's for custom migrations, when we need new IDs, but old snapshot + const custom: SingleStoreSchema = { + id, + prevId: idPrev, + ...prevRest, + }; + + return { prev: prevSnapshot, cur: result, custom }; +}; + +export const prepareSqliteMigrationSnapshot = async ( + snapshots: string[], + schemaPath: string | string[], +): Promise<{ prev: SQLiteSchema; cur: SQLiteSchema; custom: SQLiteSchema }> => { + const prevSnapshot = sqliteSchema.parse( + preparePrevSnapshot(snapshots, drySQLite), + ); + const serialized = await serializeSQLite(schemaPath); + + const id = randomUUID(); + const idPrev = prevSnapshot.id; + + const { version, dialect, ...rest } = serialized; + const result: SQLiteSchema = { + version, + dialect, + id, + prevId: idPrev, + ...rest, + }; + + const { id: _ignoredId, prevId: _ignoredPrevId, ...prevRest } = prevSnapshot; + + // that's for custom migrations, when we need new IDs, but old snapshot + const custom: SQLiteSchema = { + id, + prevId: idPrev, + ...prevRest, + }; + + return { prev: prevSnapshot, cur: result, custom }; +}; + +export const fillPgSnapshot = ({ + serialized, + id, + idPrev, +}: { + serialized: PgSchemaInternal; + id: string; + idPrev: string; +}): PgSchema => { + // const id = randomUUID(); + return { id, prevId: idPrev, ...serialized }; +}; + +export const preparePgMigrationSnapshot = async ( + snapshots: string[], + schemaPath: string | string[], +): Promise<{ prev: PgSchema; cur: PgSchema; custom: PgSchema }> => { + const prevSnapshot = pgSchema.parse(preparePrevSnapshot(snapshots, dryPg)); + const serialized = await serializePg(schemaPath); + + const id = randomUUID(); + const idPrev = prevSnapshot.id; + + // const { version, dialect, ...rest } = serialized; + + const result: PgSchema = { id, prevId: idPrev, ...serialized }; + + const { id: _ignoredId, prevId: _ignoredPrevId, ...prevRest } = prevSnapshot; + + // that's for custom migrations, when we need new IDs, but old snapshot + const custom: PgSchema = fillPgSnapshot({ serialized: prevRest, id, idPrev }); + + return { prev: prevSnapshot, cur: result, custom }; +}; + +const preparePrevSnapshot = (snapshots: string[], defaultPrev: any) => { + let prevSnapshot: any; + + if (snapshots.length === 0) { + prevSnapshot = defaultPrev; + } else { + const lastSnapshot = snapshots[snapshots.length - 1]; + prevSnapshot = JSON.parse(fs.readFileSync(lastSnapshot).toString()); + } + return prevSnapshot; +}; diff --git a/drizzle-kit/src/schemaValidator.ts b/drizzle-kit/src/schemaValidator.ts new file mode 100644 index 000000000..712252f37 --- /dev/null +++ b/drizzle-kit/src/schemaValidator.ts @@ -0,0 +1,23 @@ +import { enum as enumType, TypeOf, union } from 'zod'; +import { mysqlSchema, mysqlSchemaSquashed } from './serializer/mysqlSchema'; +import { pgSchema, pgSchemaSquashed } from './serializer/pgSchema'; +import { singlestoreSchema, singlestoreSchemaSquashed } from './serializer/singlestoreSchema'; +import { sqliteSchema, SQLiteSchemaSquashed } from './serializer/sqliteSchema'; + +export const dialects = ['postgresql', 'mysql', 'sqlite', 'singlestore'] as const; +export const dialect = enumType(dialects); + +export type Dialect = (typeof dialects)[number]; +const _: Dialect = '' as TypeOf; + +const commonSquashedSchema = union([ + pgSchemaSquashed, + mysqlSchemaSquashed, + SQLiteSchemaSquashed, + singlestoreSchemaSquashed, +]); + +const commonSchema = union([pgSchema, mysqlSchema, sqliteSchema, singlestoreSchema]); + +export type CommonSquashedSchema = TypeOf; +export type CommonSchema = TypeOf; diff --git a/drizzle-kit/src/serializer/index.ts b/drizzle-kit/src/serializer/index.ts new file mode 100644 index 000000000..6eb55c6f6 --- /dev/null +++ b/drizzle-kit/src/serializer/index.ts @@ -0,0 +1,154 @@ +import chalk from 'chalk'; +import type { SQL } from 'drizzle-orm'; +import fs from 'fs'; +import * as glob from 'glob'; +import Path from 'path'; +import { error } from '../cli/views'; +import type { MySqlSchemaInternal } from './mysqlSchema'; +import type { PgSchemaInternal } from './pgSchema'; +import { SingleStoreSchemaInternal } from './singlestoreSchema'; +import type { SQLiteSchemaInternal } from './sqliteSchema'; + +export const sqlToStr = (sql: SQL) => { + return sql.toQuery({ + escapeName: () => { + throw new Error("we don't support params for `sql` default values"); + }, + escapeParam: () => { + throw new Error("we don't support params for `sql` default values"); + }, + escapeString: () => { + throw new Error("we don't support params for `sql` default values"); + }, + }).sql; +}; + +export const sqlToStrGenerated = (sql: SQL) => { + return sql.toQuery({ + escapeName: () => { + throw new Error("we don't support params for `sql` default values"); + }, + escapeParam: () => { + throw new Error("we don't support params for `sql` default values"); + }, + escapeString: () => { + throw new Error("we don't support params for `sql` default values"); + }, + }).sql; +}; + +export const serializeMySql = async ( + path: string | string[], +): Promise => { + const filenames = prepareFilenames(path); + + console.log(chalk.gray(`Reading schema files:\n${filenames.join('\n')}\n`)); + + const { prepareFromMySqlImports } = await import('./mysqlImports'); + const { generateMySqlSnapshot } = await import('./mysqlSerializer'); + + const { tables } = await prepareFromMySqlImports(filenames); + + return generateMySqlSnapshot(tables); +}; + +export const serializePg = async ( + path: string | string[], + schemaFilter?: string[], +): Promise => { + const filenames = prepareFilenames(path); + + const { prepareFromPgImports } = await import('./pgImports'); + const { generatePgSnapshot } = await import('./pgSerializer'); + + const { tables, enums, schemas, sequences } = await prepareFromPgImports( + filenames, + ); + + return generatePgSnapshot(tables, enums, schemas, sequences, schemaFilter); +}; + +export const serializeSQLite = async ( + path: string | string[], +): Promise => { + const filenames = prepareFilenames(path); + + const { prepareFromSqliteImports } = await import('./sqliteImports'); + const { generateSqliteSnapshot } = await import('./sqliteSerializer'); + const { tables } = await prepareFromSqliteImports(filenames); + return generateSqliteSnapshot(tables); +}; + +export const serializeSingleStore = async ( + path: string | string[], +): Promise => { + const filenames = prepareFilenames(path); + + console.log(chalk.gray(`Reading schema files:\n${filenames.join('\n')}\n`)); + + const { prepareFromSingleStoreImports } = await import('./singlestoreImports'); + const { generateSingleStoreSnapshot } = await import('./singlestoreSerializer'); + + const { tables } = await prepareFromSingleStoreImports(filenames); + + return generateSingleStoreSnapshot(tables); +}; + +export const prepareFilenames = (path: string | string[]) => { + if (typeof path === 'string') { + path = [path]; + } + const prefix = process.env.TEST_CONFIG_PATH_PREFIX || ''; + + const result = path.reduce((result, cur) => { + const globbed = glob.sync(`${prefix}${cur}`); + + globbed.forEach((it) => { + const fileName = fs.lstatSync(it).isDirectory() ? null : Path.resolve(it); + + const filenames = fileName + ? [fileName!] + : fs.readdirSync(it).map((file) => Path.join(Path.resolve(it), file)); + + filenames + .filter((file) => !fs.lstatSync(file).isDirectory()) + .forEach((file) => result.add(file)); + }); + + return result; + }, new Set()); + const res = [...result]; + + // TODO: properly handle and test + const errors = res.filter((it) => { + return !( + it.endsWith('.ts') + || it.endsWith('.js') + || it.endsWith('.cjs') + || it.endsWith('.mjs') + || it.endsWith('.mts') + || it.endsWith('.cts') + ); + }); + + // when schema: "./schema" and not "./schema.ts" + if (res.length === 0) { + console.log( + error( + `No schema files found for path config [${ + path + .map((it) => `'${it}'`) + .join(', ') + }]`, + ), + ); + console.log( + error( + `If path represents a file - please make sure to use .ts or other extension in the path`, + ), + ); + process.exit(1); + } + + return res; +}; diff --git a/drizzle-kit/src/serializer/mysqlImports.ts b/drizzle-kit/src/serializer/mysqlImports.ts new file mode 100644 index 000000000..d9899026b --- /dev/null +++ b/drizzle-kit/src/serializer/mysqlImports.ts @@ -0,0 +1,31 @@ +import { is } from 'drizzle-orm'; +import { AnyMySqlTable, MySqlTable } from 'drizzle-orm/mysql-core'; +import { safeRegister } from '../cli/commands/utils'; + +export const prepareFromExports = (exports: Record) => { + const tables: AnyMySqlTable[] = []; + + const i0values = Object.values(exports); + i0values.forEach((t) => { + if (is(t, MySqlTable)) { + tables.push(t); + } + }); + + return { tables }; +}; + +export const prepareFromMySqlImports = async (imports: string[]) => { + const tables: AnyMySqlTable[] = []; + + const { unregister } = await safeRegister(); + for (let i = 0; i < imports.length; i++) { + const it = imports[i]; + const i0: Record = require(`${it}`); + const prepared = prepareFromExports(i0); + + tables.push(...prepared.tables); + } + unregister(); + return { tables: Array.from(new Set(tables)) }; +}; diff --git a/drizzle-kit/src/serializer/mysqlSchema.ts b/drizzle-kit/src/serializer/mysqlSchema.ts new file mode 100644 index 000000000..5bc62ab2f --- /dev/null +++ b/drizzle-kit/src/serializer/mysqlSchema.ts @@ -0,0 +1,348 @@ +import { any, boolean, enum as enumType, literal, object, record, string, TypeOf, union } from 'zod'; +import { mapValues, originUUID, snapshotVersion } from '../global'; + +// ------- V3 -------- +const index = object({ + name: string(), + columns: string().array(), + isUnique: boolean(), + using: enumType(['btree', 'hash']).optional(), + algorithm: enumType(['default', 'inplace', 'copy']).optional(), + lock: enumType(['default', 'none', 'shared', 'exclusive']).optional(), +}).strict(); + +const fk = object({ + name: string(), + tableFrom: string(), + columnsFrom: string().array(), + tableTo: string(), + columnsTo: string().array(), + onUpdate: string().optional(), + onDelete: string().optional(), +}).strict(); + +const column = object({ + name: string(), + type: string(), + primaryKey: boolean(), + notNull: boolean(), + autoincrement: boolean().optional(), + default: any().optional(), + onUpdate: any().optional(), + generated: object({ + type: enumType(['stored', 'virtual']), + as: string(), + }).optional(), +}).strict(); + +const tableV3 = object({ + name: string(), + columns: record(string(), column), + indexes: record(string(), index), + foreignKeys: record(string(), fk), +}).strict(); + +const compositePK = object({ + name: string(), + columns: string().array(), +}).strict(); + +const uniqueConstraint = object({ + name: string(), + columns: string().array(), +}).strict(); + +const tableV4 = object({ + name: string(), + schema: string().optional(), + columns: record(string(), column), + indexes: record(string(), index), + foreignKeys: record(string(), fk), +}).strict(); + +const table = object({ + name: string(), + columns: record(string(), column), + indexes: record(string(), index), + foreignKeys: record(string(), fk), + compositePrimaryKeys: record(string(), compositePK), + uniqueConstraints: record(string(), uniqueConstraint).default({}), +}).strict(); + +export const kitInternals = object({ + tables: record( + string(), + object({ + columns: record( + string(), + object({ isDefaultAnExpression: boolean().optional() }).optional(), + ), + }).optional(), + ).optional(), + indexes: record( + string(), + object({ + columns: record( + string(), + object({ isExpression: boolean().optional() }).optional(), + ), + }).optional(), + ).optional(), +}).optional(); + +// use main dialect +const dialect = literal('mysql'); + +const schemaHash = object({ + id: string(), + prevId: string(), +}); + +export const schemaInternalV3 = object({ + version: literal('3'), + dialect: dialect, + tables: record(string(), tableV3), +}).strict(); + +export const schemaInternalV4 = object({ + version: literal('4'), + dialect: dialect, + tables: record(string(), tableV4), + schemas: record(string(), string()), +}).strict(); + +export const schemaInternalV5 = object({ + version: literal('5'), + dialect: dialect, + tables: record(string(), table), + schemas: record(string(), string()), + _meta: object({ + schemas: record(string(), string()), + tables: record(string(), string()), + columns: record(string(), string()), + }), + internal: kitInternals, +}).strict(); + +export const schemaInternal = object({ + version: literal('5'), + dialect: dialect, + tables: record(string(), table), + _meta: object({ + tables: record(string(), string()), + columns: record(string(), string()), + }), + internal: kitInternals, +}).strict(); + +export const schemaV3 = schemaInternalV3.merge(schemaHash); +export const schemaV4 = schemaInternalV4.merge(schemaHash); +export const schemaV5 = schemaInternalV5.merge(schemaHash); +export const schema = schemaInternal.merge(schemaHash); + +const tableSquashedV4 = object({ + name: string(), + schema: string().optional(), + columns: record(string(), column), + indexes: record(string(), string()), + foreignKeys: record(string(), string()), +}).strict(); + +const tableSquashed = object({ + name: string(), + columns: record(string(), column), + indexes: record(string(), string()), + foreignKeys: record(string(), string()), + compositePrimaryKeys: record(string(), string()), + uniqueConstraints: record(string(), string()).default({}), +}).strict(); + +export const schemaSquashed = object({ + version: literal('5'), + dialect: dialect, + tables: record(string(), tableSquashed), +}).strict(); + +export const schemaSquashedV4 = object({ + version: literal('4'), + dialect: dialect, + tables: record(string(), tableSquashedV4), + schemas: record(string(), string()), +}).strict(); + +export type Dialect = TypeOf; +export type Column = TypeOf; +export type Table = TypeOf; +export type TableV4 = TypeOf; +export type MySqlSchema = TypeOf; +export type MySqlSchemaV3 = TypeOf; +export type MySqlSchemaV4 = TypeOf; +export type MySqlSchemaV5 = TypeOf; +export type MySqlSchemaInternal = TypeOf; +export type MySqlKitInternals = TypeOf; +export type MySqlSchemaSquashed = TypeOf; +export type MySqlSchemaSquashedV4 = TypeOf; +export type Index = TypeOf; +export type ForeignKey = TypeOf; +export type PrimaryKey = TypeOf; +export type UniqueConstraint = TypeOf; + +export const MySqlSquasher = { + squashIdx: (idx: Index) => { + index.parse(idx); + return `${idx.name};${idx.columns.join(',')};${idx.isUnique};${idx.using ?? ''};${idx.algorithm ?? ''};${ + idx.lock ?? '' + }`; + }, + unsquashIdx: (input: string): Index => { + const [name, columnsString, isUnique, using, algorithm, lock] = input.split(';'); + const destructed = { + name, + columns: columnsString.split(','), + isUnique: isUnique === 'true', + using: using ? using : undefined, + algorithm: algorithm ? algorithm : undefined, + lock: lock ? lock : undefined, + }; + return index.parse(destructed); + }, + squashPK: (pk: PrimaryKey) => { + return `${pk.name};${pk.columns.join(',')}`; + }, + unsquashPK: (pk: string): PrimaryKey => { + const splitted = pk.split(';'); + return { name: splitted[0], columns: splitted[1].split(',') }; + }, + squashUnique: (unq: UniqueConstraint) => { + return `${unq.name};${unq.columns.join(',')}`; + }, + unsquashUnique: (unq: string): UniqueConstraint => { + const [name, columns] = unq.split(';'); + return { name, columns: columns.split(',') }; + }, + squashFK: (fk: ForeignKey) => { + return `${fk.name};${fk.tableFrom};${fk.columnsFrom.join(',')};${fk.tableTo};${fk.columnsTo.join(',')};${ + fk.onUpdate ?? '' + };${fk.onDelete ?? ''}`; + }, + unsquashFK: (input: string): ForeignKey => { + const [ + name, + tableFrom, + columnsFromStr, + tableTo, + columnsToStr, + onUpdate, + onDelete, + ] = input.split(';'); + + const result: ForeignKey = fk.parse({ + name, + tableFrom, + columnsFrom: columnsFromStr.split(','), + tableTo, + columnsTo: columnsToStr.split(','), + onUpdate, + onDelete, + }); + return result; + }, +}; + +export const squashMysqlSchemeV4 = ( + json: MySqlSchemaV4, +): MySqlSchemaSquashedV4 => { + const mappedTables = Object.fromEntries( + Object.entries(json.tables).map((it) => { + const squashedIndexes = mapValues(it[1].indexes, (index) => { + return MySqlSquasher.squashIdx(index); + }); + + const squashedFKs = mapValues(it[1].foreignKeys, (fk) => { + return MySqlSquasher.squashFK(fk); + }); + + return [ + it[0], + { + name: it[1].name, + schema: it[1].schema, + columns: it[1].columns, + indexes: squashedIndexes, + foreignKeys: squashedFKs, + }, + ]; + }), + ); + return { + version: '4', + dialect: json.dialect, + tables: mappedTables, + schemas: json.schemas, + }; +}; + +export const squashMysqlScheme = (json: MySqlSchema): MySqlSchemaSquashed => { + const mappedTables = Object.fromEntries( + Object.entries(json.tables).map((it) => { + const squashedIndexes = mapValues(it[1].indexes, (index) => { + return MySqlSquasher.squashIdx(index); + }); + + const squashedFKs = mapValues(it[1].foreignKeys, (fk) => { + return MySqlSquasher.squashFK(fk); + }); + + const squashedPKs = mapValues(it[1].compositePrimaryKeys, (pk) => { + return MySqlSquasher.squashPK(pk); + }); + + const squashedUniqueConstraints = mapValues( + it[1].uniqueConstraints, + (unq) => { + return MySqlSquasher.squashUnique(unq); + }, + ); + + return [ + it[0], + { + name: it[1].name, + columns: it[1].columns, + indexes: squashedIndexes, + foreignKeys: squashedFKs, + compositePrimaryKeys: squashedPKs, + uniqueConstraints: squashedUniqueConstraints, + }, + ]; + }), + ); + return { + version: '5', + dialect: json.dialect, + tables: mappedTables, + }; +}; + +export const mysqlSchema = schema; +export const mysqlSchemaV3 = schemaV3; +export const mysqlSchemaV4 = schemaV4; +export const mysqlSchemaV5 = schemaV5; +export const mysqlSchemaSquashed = schemaSquashed; + +// no prev version +export const backwardCompatibleMysqlSchema = union([mysqlSchemaV5, schema]); + +export const dryMySql = mysqlSchema.parse({ + version: '5', + dialect: 'mysql', + id: originUUID, + prevId: '', + tables: {}, + schemas: {}, + _meta: { + schemas: {}, + tables: {}, + columns: {}, + }, +}); diff --git a/drizzle-kit/src/serializer/mysqlSerializer.ts b/drizzle-kit/src/serializer/mysqlSerializer.ts new file mode 100644 index 000000000..14e867128 --- /dev/null +++ b/drizzle-kit/src/serializer/mysqlSerializer.ts @@ -0,0 +1,728 @@ +import chalk from 'chalk'; +import { getTableName, is } from 'drizzle-orm'; +import { SQL } from 'drizzle-orm'; +import { AnyMySqlTable, MySqlDialect, type PrimaryKey as PrimaryKeyORM, uniqueKeyName } from 'drizzle-orm/mysql-core'; +import { getTableConfig } from 'drizzle-orm/mysql-core'; +import { RowDataPacket } from 'mysql2/promise'; +import { withStyle } from '../cli/validations/outputs'; +import { IntrospectStage, IntrospectStatus } from '../cli/views'; +import { + Column, + ForeignKey, + Index, + MySqlKitInternals, + MySqlSchemaInternal, + PrimaryKey, + Table, + UniqueConstraint, +} from '../serializer/mysqlSchema'; +import type { DB } from '../utils'; +import { sqlToStr } from '.'; +// import { MySqlColumnWithAutoIncrement } from "drizzle-orm/mysql-core"; +// import { MySqlDateBaseColumn } from "drizzle-orm/mysql-core"; + +const dialect = new MySqlDialect(); + +export const indexName = (tableName: string, columns: string[]) => { + return `${tableName}_${columns.join('_')}_index`; +}; + +export const generateMySqlSnapshot = ( + tables: AnyMySqlTable[], +): MySqlSchemaInternal => { + const result: Record = {}; + const internal: MySqlKitInternals = { tables: {}, indexes: {} }; + for (const table of tables) { + const { + name: tableName, + columns, + indexes, + foreignKeys, + schema, + primaryKeys, + uniqueConstraints, + } = getTableConfig(table); + const columnsObject: Record = {}; + const indexesObject: Record = {}; + const foreignKeysObject: Record = {}; + const primaryKeysObject: Record = {}; + const uniqueConstraintObject: Record = {}; + + columns.forEach((column) => { + const notNull: boolean = column.notNull; + const sqlTypeLowered = column.getSQLType().toLowerCase(); + const autoIncrement = typeof (column as any).autoIncrement === 'undefined' + ? false + : (column as any).autoIncrement; + + const generated = column.generated; + + const columnToSet: Column = { + name: column.name, + type: column.getSQLType(), + primaryKey: false, + // If field is autoincrement it's notNull by default + // notNull: autoIncrement ? true : notNull, + notNull, + autoincrement: autoIncrement, + onUpdate: (column as any).hasOnUpdateNow, + generated: generated + ? { + as: is(generated.as, SQL) + ? dialect.sqlToQuery(generated.as as SQL).sql + : typeof generated.as === 'function' + ? dialect.sqlToQuery(generated.as() as SQL).sql + : (generated.as as any), + type: generated.mode ?? 'stored', + } + : undefined, + }; + + if (column.primary) { + primaryKeysObject[`${tableName}_${column.name}`] = { + name: `${tableName}_${column.name}`, + columns: [column.name], + }; + } + + if (column.isUnique) { + const existingUnique = uniqueConstraintObject[column.uniqueName!]; + if (typeof existingUnique !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning(`We\'ve found duplicated unique constraint names in ${ + chalk.underline.blue( + tableName, + ) + } table. + The unique constraint ${ + chalk.underline.blue( + column.uniqueName, + ) + } on the ${ + chalk.underline.blue( + column.name, + ) + } column is confilcting with a unique constraint name already defined for ${ + chalk.underline.blue( + existingUnique.columns.join(','), + ) + } columns\n`) + }`, + ); + process.exit(1); + } + uniqueConstraintObject[column.uniqueName!] = { + name: column.uniqueName!, + columns: [columnToSet.name], + }; + } + + if (column.default !== undefined) { + if (is(column.default, SQL)) { + columnToSet.default = sqlToStr(column.default); + } else { + if (typeof column.default === 'string') { + columnToSet.default = `'${column.default}'`; + } else { + if (sqlTypeLowered === 'json') { + columnToSet.default = `'${JSON.stringify(column.default)}'`; + } else if (column.default instanceof Date) { + if (sqlTypeLowered === 'date') { + columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; + } else if ( + sqlTypeLowered.startsWith('datetime') + || sqlTypeLowered.startsWith('timestamp') + ) { + columnToSet.default = `'${ + column.default + .toISOString() + .replace('T', ' ') + .slice(0, 23) + }'`; + } + } else { + columnToSet.default = column.default; + } + } + if (['blob', 'text', 'json'].includes(column.getSQLType())) { + columnToSet.default = `(${columnToSet.default})`; + } + } + } + columnsObject[column.name] = columnToSet; + }); + + primaryKeys.map((pk: PrimaryKeyORM) => { + const columnNames = pk.columns.map((c: any) => c.name); + primaryKeysObject[pk.getName()] = { + name: pk.getName(), + columns: columnNames, + }; + + // all composite pk's should be treated as notNull + for (const column of pk.columns) { + columnsObject[column.name].notNull = true; + } + }); + + uniqueConstraints?.map((unq) => { + const columnNames = unq.columns.map((c) => c.name); + + const name = unq.name ?? uniqueKeyName(table, columnNames); + + const existingUnique = uniqueConstraintObject[name]; + if (typeof existingUnique !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning( + `We\'ve found duplicated unique constraint names in ${ + chalk.underline.blue( + tableName, + ) + } table. \nThe unique constraint ${ + chalk.underline.blue( + name, + ) + } on the ${ + chalk.underline.blue( + columnNames.join(','), + ) + } columns is confilcting with a unique constraint name already defined for ${ + chalk.underline.blue( + existingUnique.columns.join(','), + ) + } columns\n`, + ) + }`, + ); + process.exit(1); + } + + uniqueConstraintObject[name] = { + name: unq.name!, + columns: columnNames, + }; + }); + + const fks: ForeignKey[] = foreignKeys.map((fk) => { + const name = fk.getName(); + const tableFrom = tableName; + const onDelete = fk.onDelete ?? 'no action'; + const onUpdate = fk.onUpdate ?? 'no action'; + const reference = fk.reference(); + + const referenceFT = reference.foreignTable; + + // eslint-disable-next-line @typescript-eslint/no-unsafe-argument + const tableTo = getTableName(referenceFT); + const columnsFrom = reference.columns.map((it) => it.name); + const columnsTo = reference.foreignColumns.map((it) => it.name); + return { + name, + tableFrom, + tableTo, + columnsFrom, + columnsTo, + onDelete, + onUpdate, + } as ForeignKey; + }); + + fks.forEach((it) => { + foreignKeysObject[it.name] = it; + }); + + indexes.forEach((value) => { + const columns = value.config.columns; + const name = value.config.name; + + let indexColumns = columns.map((it) => { + if (is(it, SQL)) { + const sql = dialect.sqlToQuery(it, 'indexes').sql; + if (typeof internal!.indexes![name] === 'undefined') { + internal!.indexes![name] = { + columns: { + [sql]: { + isExpression: true, + }, + }, + }; + } else { + if (typeof internal!.indexes![name]?.columns[sql] === 'undefined') { + internal!.indexes![name]!.columns[sql] = { + isExpression: true, + }; + } else { + internal!.indexes![name]!.columns[sql]!.isExpression = true; + } + } + return sql; + } else { + return `${it.name}`; + } + }); + + if (value.config.unique) { + if (typeof uniqueConstraintObject[name] !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning( + `We\'ve found duplicated unique constraint names in ${ + chalk.underline.blue( + tableName, + ) + } table. \nThe unique index ${ + chalk.underline.blue( + name, + ) + } on the ${ + chalk.underline.blue( + indexColumns.join(','), + ) + } columns is confilcting with a unique constraint name already defined for ${ + chalk.underline.blue( + uniqueConstraintObject[name].columns.join(','), + ) + } columns\n`, + ) + }`, + ); + process.exit(1); + } + } else { + if (typeof foreignKeysObject[name] !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning( + `In MySQL, when creating a foreign key, an index is automatically generated with the same name as the foreign key constraint.\n\nWe have encountered a collision between the index name on columns ${ + chalk.underline.blue( + indexColumns.join(','), + ) + } and the foreign key on columns ${ + chalk.underline.blue( + foreignKeysObject[name].columnsFrom.join(','), + ) + }. Please change either the index name or the foreign key name. For more information, please refer to https://dev.mysql.com/doc/refman/8.0/en/constraint-foreign-key.html\n + `, + ) + }`, + ); + process.exit(1); + } + } + + indexesObject[name] = { + name, + columns: indexColumns, + isUnique: value.config.unique ?? false, + using: value.config.using, + algorithm: value.config.algorythm, + lock: value.config.lock, + }; + }); + + // only handle tables without schemas + if (!schema) { + result[tableName] = { + name: tableName, + columns: columnsObject, + indexes: indexesObject, + foreignKeys: foreignKeysObject, + compositePrimaryKeys: primaryKeysObject, + uniqueConstraints: uniqueConstraintObject, + }; + } + } + + return { + version: '5', + dialect: 'mysql', + tables: result, + _meta: { + tables: {}, + columns: {}, + }, + internal, + }; +}; + +function clearDefaults(defaultValue: any, collate: string) { + if (typeof collate === 'undefined' || collate === null) { + collate = `utf8mb4`; + } + + let resultDefault = defaultValue; + collate = `_${collate}`; + if (defaultValue.startsWith(collate)) { + resultDefault = resultDefault + .substring(collate.length, defaultValue.length) + .replace(/\\/g, ''); + if (resultDefault.startsWith("'") && resultDefault.endsWith("'")) { + return `('${resultDefault.substring(1, resultDefault.length - 1)}')`; + } else { + return `'${resultDefault}'`; + } + } else { + return `(${resultDefault})`; + } +} + +export const fromDatabase = async ( + db: DB, + inputSchema: string, + tablesFilter: (table: string) => boolean = (table) => true, + progressCallback?: ( + stage: IntrospectStage, + count: number, + status: IntrospectStatus, + ) => void, +): Promise => { + const result: Record = {}; + const internals: MySqlKitInternals = { tables: {}, indexes: {} }; + + const columns = await db.query(`select * from information_schema.columns + where table_schema = '${inputSchema}' and table_name != '__drizzle_migrations' + order by table_name, ordinal_position;`); + + const response = columns as RowDataPacket[]; + + const schemas: string[] = []; + + let columnsCount = 0; + let tablesCount = new Set(); + let indexesCount = 0; + let foreignKeysCount = 0; + + const idxs = await db.query( + `select * from INFORMATION_SCHEMA.STATISTICS + WHERE INFORMATION_SCHEMA.STATISTICS.TABLE_SCHEMA = '${inputSchema}' and INFORMATION_SCHEMA.STATISTICS.INDEX_NAME != 'PRIMARY';`, + ); + + const idxRows = idxs as RowDataPacket[]; + + for (const column of response) { + if (!tablesFilter(column['TABLE_NAME'] as string)) continue; + + columnsCount += 1; + if (progressCallback) { + progressCallback('columns', columnsCount, 'fetching'); + } + const schema: string = column['TABLE_SCHEMA']; + const tableName = column['TABLE_NAME']; + + tablesCount.add(`${schema}.${tableName}`); + if (progressCallback) { + progressCallback('columns', tablesCount.size, 'fetching'); + } + const columnName: string = column['COLUMN_NAME']; + const isNullable = column['IS_NULLABLE'] === 'YES'; // 'YES', 'NO' + const dataType = column['DATA_TYPE']; // varchar + const columnType = column['COLUMN_TYPE']; // varchar(256) + const isPrimary = column['COLUMN_KEY'] === 'PRI'; // 'PRI', '' + const columnDefault: string = column['COLUMN_DEFAULT']; + const collation: string = column['CHARACTER_SET_NAME']; + const geenratedExpression: string = column['GENERATION_EXPRESSION']; + + let columnExtra = column['EXTRA']; + let isAutoincrement = false; // 'auto_increment', '' + let isDefaultAnExpression = false; // 'auto_increment', '' + + if (typeof column['EXTRA'] !== 'undefined') { + columnExtra = column['EXTRA']; + isAutoincrement = column['EXTRA'] === 'auto_increment'; // 'auto_increment', '' + isDefaultAnExpression = column['EXTRA'].includes('DEFAULT_GENERATED'); // 'auto_increment', '' + } + + // if (isPrimary) { + // if (typeof tableToPk[tableName] === "undefined") { + // tableToPk[tableName] = [columnName]; + // } else { + // tableToPk[tableName].push(columnName); + // } + // } + + if (schema !== inputSchema) { + schemas.push(schema); + } + + const table = result[tableName]; + + // let changedType = columnType.replace("bigint unsigned", "serial") + let changedType = columnType; + + if (columnType === 'bigint unsigned' && !isNullable && isAutoincrement) { + // check unique here + const uniqueIdx = idxRows.filter( + (it) => + it['COLUMN_NAME'] === columnName + && it['TABLE_NAME'] === tableName + && it['NON_UNIQUE'] === 0, + ); + if (uniqueIdx && uniqueIdx.length === 1) { + changedType = columnType.replace('bigint unsigned', 'serial'); + } + } + + if (columnType.startsWith('tinyint')) { + changedType = 'tinyint'; + } + + let onUpdate: boolean | undefined = undefined; + if ( + columnType.startsWith('timestamp') + && typeof columnExtra !== 'undefined' + && columnExtra.includes('on update CURRENT_TIMESTAMP') + ) { + onUpdate = true; + } + + const newColumn: Column = { + default: columnDefault === null + ? undefined + : /^-?[\d.]+(?:e-?\d+)?$/.test(columnDefault) + && !['decimal', 'char', 'varchar'].some((type) => columnType.startsWith(type)) + ? Number(columnDefault) + : isDefaultAnExpression + ? clearDefaults(columnDefault, collation) + : `'${columnDefault}'`, + autoincrement: isAutoincrement, + name: columnName, + type: changedType, + primaryKey: false, + notNull: !isNullable, + onUpdate, + generated: geenratedExpression + ? { + as: geenratedExpression, + type: columnExtra === 'VIRTUAL GENERATED' ? 'virtual' : 'stored', + } + : undefined, + }; + + // Set default to internal object + if (isDefaultAnExpression) { + if (typeof internals!.tables![tableName] === 'undefined') { + internals!.tables![tableName] = { + columns: { + [columnName]: { + isDefaultAnExpression: true, + }, + }, + }; + } else { + if ( + typeof internals!.tables![tableName]!.columns[columnName] + === 'undefined' + ) { + internals!.tables![tableName]!.columns[columnName] = { + isDefaultAnExpression: true, + }; + } else { + internals!.tables![tableName]!.columns[ + columnName + ]!.isDefaultAnExpression = true; + } + } + } + + if (!table) { + result[tableName] = { + name: tableName, + columns: { + [columnName]: newColumn, + }, + compositePrimaryKeys: {}, + indexes: {}, + foreignKeys: {}, + uniqueConstraints: {}, + }; + } else { + result[tableName]!.columns[columnName] = newColumn; + } + } + + const tablePks = await db.query( + `SELECT table_name, column_name, ordinal_position + FROM information_schema.table_constraints t + LEFT JOIN information_schema.key_column_usage k + USING(constraint_name,table_schema,table_name) + WHERE t.constraint_type='PRIMARY KEY' + and table_name != '__drizzle_migrations' + AND t.table_schema = '${inputSchema}' + ORDER BY ordinal_position`, + ); + + const tableToPk: { [tname: string]: string[] } = {}; + + const tableToPkRows = tablePks as RowDataPacket[]; + for (const tableToPkRow of tableToPkRows) { + const tableName: string = tableToPkRow['TABLE_NAME']; + const columnName: string = tableToPkRow['COLUMN_NAME']; + const position: string = tableToPkRow['ordinal_position']; + + if (typeof result[tableName] === 'undefined') { + continue; + } + + if (typeof tableToPk[tableName] === 'undefined') { + tableToPk[tableName] = [columnName]; + } else { + tableToPk[tableName].push(columnName); + } + } + + for (const [key, value] of Object.entries(tableToPk)) { + // if (value.length > 1) { + result[key].compositePrimaryKeys = { + [`${key}_${value.join('_')}`]: { + name: `${key}_${value.join('_')}`, + columns: value, + }, + }; + // } else if (value.length === 1) { + // result[key].columns[value[0]].primaryKey = true; + // } else { + // } + } + if (progressCallback) { + progressCallback('columns', columnsCount, 'done'); + progressCallback('tables', tablesCount.size, 'done'); + } + try { + const fks = await db.query( + `SELECT + kcu.TABLE_SCHEMA, + kcu.TABLE_NAME, + kcu.CONSTRAINT_NAME, + kcu.COLUMN_NAME, + kcu.REFERENCED_TABLE_SCHEMA, + kcu.REFERENCED_TABLE_NAME, + kcu.REFERENCED_COLUMN_NAME, + rc.UPDATE_RULE, + rc.DELETE_RULE + FROM + INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu + LEFT JOIN + information_schema.referential_constraints rc + ON kcu.CONSTRAINT_NAME = rc.CONSTRAINT_NAME + WHERE kcu.TABLE_SCHEMA = '${inputSchema}' AND kcu.CONSTRAINT_NAME != 'PRIMARY' + AND kcu.REFERENCED_TABLE_NAME IS NOT NULL;`, + ); + + const fkRows = fks as RowDataPacket[]; + + for (const fkRow of fkRows) { + foreignKeysCount += 1; + if (progressCallback) { + progressCallback('fks', foreignKeysCount, 'fetching'); + } + const tableSchema = fkRow['TABLE_SCHEMA']; + const tableName: string = fkRow['TABLE_NAME']; + const constraintName = fkRow['CONSTRAINT_NAME']; + const columnName: string = fkRow['COLUMN_NAME']; + const refTableSchema = fkRow['REFERENCED_TABLE_SCHEMA']; + const refTableName = fkRow['REFERENCED_TABLE_NAME']; + const refColumnName: string = fkRow['REFERENCED_COLUMN_NAME']; + const updateRule: string = fkRow['UPDATE_RULE']; + const deleteRule = fkRow['DELETE_RULE']; + + const tableInResult = result[tableName]; + if (typeof tableInResult === 'undefined') continue; + + if (typeof tableInResult.foreignKeys[constraintName] !== 'undefined') { + tableInResult.foreignKeys[constraintName]!.columnsFrom.push(columnName); + tableInResult.foreignKeys[constraintName]!.columnsTo.push( + refColumnName, + ); + } else { + tableInResult.foreignKeys[constraintName] = { + name: constraintName, + tableFrom: tableName, + tableTo: refTableName, + columnsFrom: [columnName], + columnsTo: [refColumnName], + onDelete: deleteRule?.toLowerCase(), + onUpdate: updateRule?.toLowerCase(), + }; + } + + tableInResult.foreignKeys[constraintName]!.columnsFrom = [ + ...new Set(tableInResult.foreignKeys[constraintName]!.columnsFrom), + ]; + + tableInResult.foreignKeys[constraintName]!.columnsTo = [ + ...new Set(tableInResult.foreignKeys[constraintName]!.columnsTo), + ]; + } + } catch (e) { + // console.log(`Can't proccess foreign keys`); + } + if (progressCallback) { + progressCallback('fks', foreignKeysCount, 'done'); + } + + for (const idxRow of idxRows) { + const tableSchema = idxRow['TABLE_SCHEMA']; + const tableName = idxRow['TABLE_NAME']; + const constraintName = idxRow['INDEX_NAME']; + const columnName: string = idxRow['COLUMN_NAME']; + const isUnique = idxRow['NON_UNIQUE'] === 0; + + const tableInResult = result[tableName]; + if (typeof tableInResult === 'undefined') continue; + + // if (tableInResult.columns[columnName].type === "serial") continue; + + indexesCount += 1; + if (progressCallback) { + progressCallback('indexes', indexesCount, 'fetching'); + } + + if (isUnique) { + if ( + typeof tableInResult.uniqueConstraints[constraintName] !== 'undefined' + ) { + tableInResult.uniqueConstraints[constraintName]!.columns.push( + columnName, + ); + } else { + tableInResult.uniqueConstraints[constraintName] = { + name: constraintName, + columns: [columnName], + }; + } + } else { + // in MySQL FK creates index by default. Name of index is the same as fk constraint name + // so for introspect we will just skip it + if (typeof tableInResult.foreignKeys[constraintName] === 'undefined') { + if (typeof tableInResult.indexes[constraintName] !== 'undefined') { + tableInResult.indexes[constraintName]!.columns.push(columnName); + } else { + tableInResult.indexes[constraintName] = { + name: constraintName, + columns: [columnName], + isUnique: isUnique, + }; + } + } + } + } + + if (progressCallback) { + progressCallback('indexes', indexesCount, 'done'); + // progressCallback("enums", 0, "fetching"); + progressCallback('enums', 0, 'done'); + } + + return { + version: '5', + dialect: 'mysql', + tables: result, + _meta: { + tables: {}, + columns: {}, + }, + internal: internals, + }; +}; diff --git a/drizzle-kit/src/serializer/pgImports.ts b/drizzle-kit/src/serializer/pgImports.ts new file mode 100644 index 000000000..ffedd084c --- /dev/null +++ b/drizzle-kit/src/serializer/pgImports.ts @@ -0,0 +1,54 @@ +import { is } from 'drizzle-orm'; +import { AnyPgTable, isPgEnum, isPgSequence, PgEnum, PgSchema, PgSequence, PgTable } from 'drizzle-orm/pg-core'; +import { safeRegister } from '../cli/commands/utils'; + +export const prepareFromExports = (exports: Record) => { + const tables: AnyPgTable[] = []; + const enums: PgEnum[] = []; + const schemas: PgSchema[] = []; + const sequences: PgSequence[] = []; + + const i0values = Object.values(exports); + i0values.forEach((t) => { + if (isPgEnum(t)) { + enums.push(t); + return; + } + if (is(t, PgTable)) { + tables.push(t); + } + + if (is(t, PgSchema)) { + schemas.push(t); + } + + if (isPgSequence(t)) { + sequences.push(t); + } + }); + + return { tables, enums, schemas, sequences }; +}; + +export const prepareFromPgImports = async (imports: string[]) => { + let tables: AnyPgTable[] = []; + let enums: PgEnum[] = []; + let schemas: PgSchema[] = []; + let sequences: PgSequence[] = []; + + const { unregister } = await safeRegister(); + for (let i = 0; i < imports.length; i++) { + const it = imports[i]; + + const i0: Record = require(`${it}`); + const prepared = prepareFromExports(i0); + + tables.push(...prepared.tables); + enums.push(...prepared.enums); + schemas.push(...prepared.schemas); + sequences.push(...prepared.sequences); + } + unregister(); + + return { tables: Array.from(new Set(tables)), enums, schemas, sequences }; +}; diff --git a/drizzle-kit/src/serializer/pgSchema.ts b/drizzle-kit/src/serializer/pgSchema.ts new file mode 100644 index 000000000..5860a6fef --- /dev/null +++ b/drizzle-kit/src/serializer/pgSchema.ts @@ -0,0 +1,726 @@ +import { vectorOps } from 'src/extensions/vector'; +import { mapValues, originUUID, snapshotVersion } from '../global'; + +import { any, array, boolean, enum as enumType, literal, number, object, record, string, TypeOf, union } from 'zod'; + +const indexV2 = object({ + name: string(), + columns: record( + string(), + object({ + name: string(), + }), + ), + isUnique: boolean(), +}).strict(); + +const columnV2 = object({ + name: string(), + type: string(), + primaryKey: boolean(), + notNull: boolean(), + default: any().optional(), + references: string().optional(), +}).strict(); + +const tableV2 = object({ + name: string(), + columns: record(string(), columnV2), + indexes: record(string(), indexV2), +}).strict(); + +const enumSchemaV1 = object({ + name: string(), + values: record(string(), string()), +}).strict(); + +const enumSchema = object({ + name: string(), + schema: string(), + values: string().array(), +}).strict(); + +export const pgSchemaV2 = object({ + version: literal('2'), + tables: record(string(), tableV2), + enums: record(string(), enumSchemaV1), +}).strict(); + +// ------- V1 -------- +const references = object({ + foreignKeyName: string(), + table: string(), + column: string(), + onDelete: string().optional(), + onUpdate: string().optional(), +}).strict(); + +const columnV1 = object({ + name: string(), + type: string(), + primaryKey: boolean(), + notNull: boolean(), + default: any().optional(), + references: references.optional(), +}).strict(); + +const tableV1 = object({ + name: string(), + columns: record(string(), columnV1), + indexes: record(string(), indexV2), +}).strict(); + +export const pgSchemaV1 = object({ + version: literal('1'), + tables: record(string(), tableV1), + enums: record(string(), enumSchemaV1), +}).strict(); + +const indexColumn = object({ + expression: string(), + isExpression: boolean(), + asc: boolean(), + nulls: string().optional(), + opclass: string().optional(), +}); + +export type IndexColumnType = TypeOf; + +const index = object({ + name: string(), + columns: indexColumn.array(), + isUnique: boolean(), + with: record(string(), any()).optional(), + method: string().default('btree'), + where: string().optional(), + concurrently: boolean().default(false), +}).strict(); + +const indexV4 = object({ + name: string(), + columns: string().array(), + isUnique: boolean(), + with: record(string(), string()).optional(), + method: string().default('btree'), + where: string().optional(), + concurrently: boolean().default(false), +}).strict(); + +const indexV5 = object({ + name: string(), + columns: string().array(), + isUnique: boolean(), + with: record(string(), string()).optional(), + method: string().default('btree'), + where: string().optional(), + concurrently: boolean().default(false), +}).strict(); + +const indexV6 = object({ + name: string(), + columns: string().array(), + isUnique: boolean(), + with: record(string(), string()).optional(), + method: string().default('btree'), + where: string().optional(), + concurrently: boolean().default(false), +}).strict(); + +const fk = object({ + name: string(), + tableFrom: string(), + columnsFrom: string().array(), + tableTo: string(), + schemaTo: string().optional(), + columnsTo: string().array(), + onUpdate: string().optional(), + onDelete: string().optional(), +}).strict(); + +export const sequenceSchema = object({ + name: string(), + increment: string().optional(), + minValue: string().optional(), + maxValue: string().optional(), + startWith: string().optional(), + cache: string().optional(), + cycle: boolean().optional(), + schema: string(), +}).strict(); + +export const sequenceSquashed = object({ + name: string(), + schema: string(), + values: string(), +}).strict(); + +const columnV7 = object({ + name: string(), + type: string(), + typeSchema: string().optional(), + primaryKey: boolean(), + notNull: boolean(), + default: any().optional(), + isUnique: any().optional(), + uniqueName: string().optional(), + nullsNotDistinct: boolean().optional(), +}).strict(); + +const column = object({ + name: string(), + type: string(), + typeSchema: string().optional(), + primaryKey: boolean(), + notNull: boolean(), + default: any().optional(), + isUnique: any().optional(), + uniqueName: string().optional(), + nullsNotDistinct: boolean().optional(), + generated: object({ + type: literal('stored'), + as: string(), + }).optional(), + identity: sequenceSchema + .merge(object({ type: enumType(['always', 'byDefault']) })) + .optional(), +}).strict(); + +const columnSquashed = object({ + name: string(), + type: string(), + typeSchema: string().optional(), + primaryKey: boolean(), + notNull: boolean(), + default: any().optional(), + isUnique: any().optional(), + uniqueName: string().optional(), + nullsNotDistinct: boolean().optional(), + generated: object({ + type: literal('stored'), + as: string(), + }).optional(), + identity: string().optional(), +}).strict(); + +const tableV3 = object({ + name: string(), + columns: record(string(), column), + indexes: record(string(), index), + foreignKeys: record(string(), fk), +}).strict(); + +const compositePK = object({ + name: string(), + columns: string().array(), +}).strict(); + +const uniqueConstraint = object({ + name: string(), + columns: string().array(), + nullsNotDistinct: boolean(), +}).strict(); + +const tableV4 = object({ + name: string(), + schema: string(), + columns: record(string(), column), + indexes: record(string(), indexV4), + foreignKeys: record(string(), fk), +}).strict(); + +const tableV5 = object({ + name: string(), + schema: string(), + columns: record(string(), column), + indexes: record(string(), indexV5), + foreignKeys: record(string(), fk), + compositePrimaryKeys: record(string(), compositePK), + uniqueConstraints: record(string(), uniqueConstraint).default({}), +}).strict(); + +const tableV6 = object({ + name: string(), + schema: string(), + columns: record(string(), column), + indexes: record(string(), indexV6), + foreignKeys: record(string(), fk), + compositePrimaryKeys: record(string(), compositePK), + uniqueConstraints: record(string(), uniqueConstraint).default({}), +}).strict(); + +const tableV7 = object({ + name: string(), + schema: string(), + columns: record(string(), columnV7), + indexes: record(string(), index), + foreignKeys: record(string(), fk), + compositePrimaryKeys: record(string(), compositePK), + uniqueConstraints: record(string(), uniqueConstraint).default({}), +}).strict(); + +const table = object({ + name: string(), + schema: string(), + columns: record(string(), column), + indexes: record(string(), index), + foreignKeys: record(string(), fk), + compositePrimaryKeys: record(string(), compositePK), + uniqueConstraints: record(string(), uniqueConstraint).default({}), +}).strict(); + +const schemaHash = object({ + id: string(), + prevId: string(), +}); + +export const kitInternals = object({ + tables: record( + string(), + object({ + columns: record( + string(), + object({ + isArray: boolean().optional(), + dimensions: number().optional(), + rawType: string().optional(), + isDefaultAnExpression: boolean().optional(), + }).optional(), + ), + }).optional(), + ), +}).optional(); + +export const pgSchemaInternalV3 = object({ + version: literal('3'), + dialect: literal('pg'), + tables: record(string(), tableV3), + enums: record(string(), enumSchemaV1), +}).strict(); + +export const pgSchemaInternalV4 = object({ + version: literal('4'), + dialect: literal('pg'), + tables: record(string(), tableV4), + enums: record(string(), enumSchemaV1), + schemas: record(string(), string()), +}).strict(); + +// "table" -> "schema.table" for schema proper support +export const pgSchemaInternalV5 = object({ + version: literal('5'), + dialect: literal('pg'), + tables: record(string(), tableV5), + enums: record(string(), enumSchemaV1), + schemas: record(string(), string()), + _meta: object({ + schemas: record(string(), string()), + tables: record(string(), string()), + columns: record(string(), string()), + }), + internal: kitInternals, +}).strict(); + +export const pgSchemaInternalV6 = object({ + version: literal('6'), + dialect: literal('postgresql'), + tables: record(string(), tableV6), + enums: record(string(), enumSchema), + schemas: record(string(), string()), + _meta: object({ + schemas: record(string(), string()), + tables: record(string(), string()), + columns: record(string(), string()), + }), + internal: kitInternals, +}).strict(); + +export const pgSchemaExternal = object({ + version: literal('5'), + dialect: literal('pg'), + tables: array(table), + enums: array(enumSchemaV1), + schemas: array(object({ name: string() })), + _meta: object({ + schemas: record(string(), string()), + tables: record(string(), string()), + columns: record(string(), string()), + }), +}).strict(); + +export const pgSchemaInternalV7 = object({ + version: literal('7'), + dialect: literal('postgresql'), + tables: record(string(), tableV7), + enums: record(string(), enumSchema), + schemas: record(string(), string()), + sequences: record(string(), sequenceSchema), + _meta: object({ + schemas: record(string(), string()), + tables: record(string(), string()), + columns: record(string(), string()), + }), + internal: kitInternals, +}).strict(); + +export const pgSchemaInternal = object({ + version: literal('7'), + dialect: literal('postgresql'), + tables: record(string(), table), + enums: record(string(), enumSchema), + schemas: record(string(), string()), + sequences: record(string(), sequenceSchema).default({}), + _meta: object({ + schemas: record(string(), string()), + tables: record(string(), string()), + columns: record(string(), string()), + }), + internal: kitInternals, +}).strict(); + +const tableSquashed = object({ + name: string(), + schema: string(), + columns: record(string(), columnSquashed), + indexes: record(string(), string()), + foreignKeys: record(string(), string()), + compositePrimaryKeys: record(string(), string()), + uniqueConstraints: record(string(), string()), +}).strict(); + +const tableSquashedV4 = object({ + name: string(), + schema: string(), + columns: record(string(), column), + indexes: record(string(), string()), + foreignKeys: record(string(), string()), +}).strict(); + +export const pgSchemaSquashedV4 = object({ + version: literal('4'), + dialect: literal('pg'), + tables: record(string(), tableSquashedV4), + enums: record(string(), enumSchemaV1), + schemas: record(string(), string()), +}).strict(); + +export const pgSchemaSquashedV6 = object({ + version: literal('6'), + dialect: literal('postgresql'), + tables: record(string(), tableSquashed), + enums: record(string(), enumSchema), + schemas: record(string(), string()), +}).strict(); + +export const pgSchemaSquashed = object({ + version: literal('7'), + dialect: literal('postgresql'), + tables: record(string(), tableSquashed), + enums: record(string(), enumSchema), + schemas: record(string(), string()), + sequences: record(string(), sequenceSquashed), +}).strict(); + +export const pgSchemaV3 = pgSchemaInternalV3.merge(schemaHash); +export const pgSchemaV4 = pgSchemaInternalV4.merge(schemaHash); +export const pgSchemaV5 = pgSchemaInternalV5.merge(schemaHash); +export const pgSchemaV6 = pgSchemaInternalV6.merge(schemaHash); +export const pgSchemaV7 = pgSchemaInternalV7.merge(schemaHash); +export const pgSchema = pgSchemaInternal.merge(schemaHash); + +export type Enum = TypeOf; +export type Sequence = TypeOf; +export type Column = TypeOf; +export type TableV3 = TypeOf; +export type TableV4 = TypeOf; +export type TableV5 = TypeOf; +export type Table = TypeOf; +export type PgSchema = TypeOf; +export type PgSchemaInternal = TypeOf; +export type PgSchemaV6Internal = TypeOf; +export type PgSchemaExternal = TypeOf; +export type PgSchemaSquashed = TypeOf; +export type PgSchemaSquashedV4 = TypeOf; +export type PgSchemaSquashedV6 = TypeOf; +export type Index = TypeOf; +export type ForeignKey = TypeOf; +export type PrimaryKey = TypeOf; +export type UniqueConstraint = TypeOf; +export type PgKitInternals = TypeOf; + +export type PgSchemaV1 = TypeOf; +export type PgSchemaV2 = TypeOf; +export type PgSchemaV3 = TypeOf; +export type PgSchemaV4 = TypeOf; +export type PgSchemaV5 = TypeOf; +export type PgSchemaV6 = TypeOf; + +export const backwardCompatiblePgSchema = union([ + pgSchemaV5, + pgSchemaV6, + pgSchema, +]); + +export const PgSquasher = { + squashIdx: (idx: Index) => { + index.parse(idx); + return `${idx.name};${ + idx.columns + .map( + (c) => + `${c.expression}--${c.isExpression}--${c.asc}--${c.nulls}--${ + c.opclass && vectorOps.includes(c.opclass) ? c.opclass : '' + }`, + ) + .join(',,') + };${idx.isUnique};${idx.concurrently};${idx.method};${idx.where};${JSON.stringify(idx.with)}`; + }, + unsquashIdx: (input: string): Index => { + const [ + name, + columnsString, + isUnique, + concurrently, + method, + where, + idxWith, + ] = input.split(';'); + + const columnString = columnsString.split(',,'); + const columns: IndexColumnType[] = []; + + for (const column of columnString) { + const [expression, isExpression, asc, nulls, opclass] = column.split('--'); + columns.push({ + nulls: nulls as IndexColumnType['nulls'], + isExpression: isExpression === 'true', + asc: asc === 'true', + expression: expression, + opclass: opclass === 'undefined' ? undefined : opclass, + }); + } + + const result: Index = index.parse({ + name, + columns: columns, + isUnique: isUnique === 'true', + concurrently: concurrently === 'true', + method, + where: where === 'undefined' ? undefined : where, + with: !idxWith || idxWith === 'undefined' ? undefined : JSON.parse(idxWith), + }); + return result; + }, + squashIdxPush: (idx: Index) => { + index.parse(idx); + return `${idx.name};${ + idx.columns + .map((c) => `${c.isExpression ? '' : c.expression}--${c.asc}--${c.nulls}`) + .join(',,') + };${idx.isUnique};${idx.method};${JSON.stringify(idx.with)}`; + }, + unsquashIdxPush: (input: string): Index => { + const [name, columnsString, isUnique, method, idxWith] = input.split(';'); + + const columnString = columnsString.split('--'); + const columns: IndexColumnType[] = []; + + for (const column of columnString) { + const [expression, asc, nulls, opclass] = column.split(','); + columns.push({ + nulls: nulls as IndexColumnType['nulls'], + isExpression: expression === '', + asc: asc === 'true', + expression: expression, + }); + } + + const result: Index = index.parse({ + name, + columns: columns, + isUnique: isUnique === 'true', + concurrently: false, + method, + with: idxWith === 'undefined' ? undefined : JSON.parse(idxWith), + }); + return result; + }, + squashFK: (fk: ForeignKey) => { + return `${fk.name};${fk.tableFrom};${fk.columnsFrom.join(',')};${fk.tableTo};${fk.columnsTo.join(',')};${ + fk.onUpdate ?? '' + };${fk.onDelete ?? ''};${fk.schemaTo || 'public'}`; + }, + squashPK: (pk: PrimaryKey) => { + return `${pk.columns.join(',')};${pk.name}`; + }, + unsquashPK: (pk: string): PrimaryKey => { + const splitted = pk.split(';'); + return { name: splitted[1], columns: splitted[0].split(',') }; + }, + squashUnique: (unq: UniqueConstraint) => { + return `${unq.name};${unq.columns.join(',')};${unq.nullsNotDistinct}`; + }, + unsquashUnique: (unq: string): UniqueConstraint => { + const [name, columns, nullsNotDistinct] = unq.split(';'); + return { + name, + columns: columns.split(','), + nullsNotDistinct: nullsNotDistinct === 'true', + }; + }, + unsquashFK: (input: string): ForeignKey => { + const [ + name, + tableFrom, + columnsFromStr, + tableTo, + columnsToStr, + onUpdate, + onDelete, + schemaTo, + ] = input.split(';'); + + const result: ForeignKey = fk.parse({ + name, + tableFrom, + columnsFrom: columnsFromStr.split(','), + schemaTo: schemaTo, + tableTo, + columnsTo: columnsToStr.split(','), + onUpdate, + onDelete, + }); + return result; + }, + squashSequence: (seq: Omit) => { + return `${seq.minValue};${seq.maxValue};${seq.increment};${seq.startWith};${seq.cache};${seq.cycle ?? ''}`; + }, + unsquashSequence: (seq: string): Omit => { + const splitted = seq.split(';'); + return { + minValue: splitted[0] !== 'undefined' ? splitted[0] : undefined, + maxValue: splitted[1] !== 'undefined' ? splitted[1] : undefined, + increment: splitted[2] !== 'undefined' ? splitted[2] : undefined, + startWith: splitted[3] !== 'undefined' ? splitted[3] : undefined, + cache: splitted[4] !== 'undefined' ? splitted[4] : undefined, + cycle: splitted[5] === 'true', + }; + }, + squashIdentity: ( + seq: Omit & { type: 'always' | 'byDefault' }, + ) => { + return `${seq.name};${seq.type};${seq.minValue};${seq.maxValue};${seq.increment};${seq.startWith};${seq.cache};${ + seq.cycle ?? '' + }`; + }, + unsquashIdentity: ( + seq: string, + ): Omit & { type: 'always' | 'byDefault' } => { + const splitted = seq.split(';'); + return { + name: splitted[0], + type: splitted[1] as 'always' | 'byDefault', + minValue: splitted[2] !== 'undefined' ? splitted[2] : undefined, + maxValue: splitted[3] !== 'undefined' ? splitted[3] : undefined, + increment: splitted[4] !== 'undefined' ? splitted[4] : undefined, + startWith: splitted[5] !== 'undefined' ? splitted[5] : undefined, + cache: splitted[6] !== 'undefined' ? splitted[6] : undefined, + cycle: splitted[7] === 'true', + }; + }, +}; + +export const squashPgScheme = ( + json: PgSchema, + action?: 'push' | undefined, +): PgSchemaSquashed => { + const mappedTables = Object.fromEntries( + Object.entries(json.tables).map((it) => { + const squashedIndexes = mapValues(it[1].indexes, (index) => { + return action === 'push' + ? PgSquasher.squashIdxPush(index) + : PgSquasher.squashIdx(index); + }); + + const squashedFKs = mapValues(it[1].foreignKeys, (fk) => { + return PgSquasher.squashFK(fk); + }); + + const squashedPKs = mapValues(it[1].compositePrimaryKeys, (pk) => { + return PgSquasher.squashPK(pk); + }); + + const mappedColumns = Object.fromEntries( + Object.entries(it[1].columns).map((it) => { + const mappedIdentity = it[1].identity + ? PgSquasher.squashIdentity(it[1].identity) + : undefined; + return [ + it[0], + { + ...it[1], + identity: mappedIdentity, + }, + ]; + }), + ); + + const squashedUniqueConstraints = mapValues( + it[1].uniqueConstraints, + (unq) => { + return PgSquasher.squashUnique(unq); + }, + ); + + return [ + it[0], + { + name: it[1].name, + schema: it[1].schema, + columns: mappedColumns, + indexes: squashedIndexes, + foreignKeys: squashedFKs, + compositePrimaryKeys: squashedPKs, + uniqueConstraints: squashedUniqueConstraints, + }, + ]; + }), + ); + + const mappedSequences = Object.fromEntries( + Object.entries(json.sequences).map((it) => { + return [ + it[0], + { + name: it[1].name, + schema: it[1].schema, + values: PgSquasher.squashSequence(it[1]), + }, + ]; + }), + ); + + return { + version: '7', + dialect: json.dialect, + tables: mappedTables, + enums: json.enums, + schemas: json.schemas, + sequences: mappedSequences, + }; +}; + +export const dryPg = pgSchema.parse({ + version: snapshotVersion, + dialect: 'postgresql', + id: originUUID, + prevId: '', + tables: {}, + enums: {}, + schemas: {}, + sequences: {}, + _meta: { + schemas: {}, + tables: {}, + columns: {}, + }, +}); diff --git a/drizzle-kit/src/serializer/pgSerializer.ts b/drizzle-kit/src/serializer/pgSerializer.ts new file mode 100644 index 000000000..b479e59e2 --- /dev/null +++ b/drizzle-kit/src/serializer/pgSerializer.ts @@ -0,0 +1,1317 @@ +import chalk from 'chalk'; +import { getTableName, is, SQL } from 'drizzle-orm'; +import { + AnyPgTable, + ExtraConfigColumn, + IndexedColumn, + PgColumn, + PgDialect, + PgEnum, + PgEnumColumn, + PgInteger, + PgSchema, + PgSequence, + uniqueKeyName, +} from 'drizzle-orm/pg-core'; +import { getTableConfig } from 'drizzle-orm/pg-core'; +import { vectorOps } from 'src/extensions/vector'; +import { withStyle } from '../cli/validations/outputs'; +import type { IntrospectStage, IntrospectStatus } from '../cli/views'; +import type { + Column as Column, + Enum, + ForeignKey, + Index, + IndexColumnType, + PgKitInternals, + PgSchemaInternal, + PrimaryKey, + Sequence, + Table, + UniqueConstraint, +} from '../serializer/pgSchema'; +import { type DB, isPgArrayType } from '../utils'; +import { sqlToStr } from '.'; + +const dialect = new PgDialect(); + +export const indexName = (tableName: string, columns: string[]) => { + return `${tableName}_${columns.join('_')}_index`; +}; + +function stringFromIdentityProperty( + field: string | number | undefined, +): string | undefined { + return typeof field === 'string' + ? (field as string) + : typeof field === 'undefined' + ? undefined + : String(field); +} + +function maxRangeForIdentityBasedOn(columnType: string) { + return columnType === 'integer' + ? '2147483647' + : columnType === 'bigint' + ? '9223372036854775807' + : '32767'; +} + +function minRangeForIdentityBasedOn(columnType: string) { + return columnType === 'integer' + ? '-2147483648' + : columnType === 'bitint' + ? '-9223372036854775808' + : '-32768'; +} + +function stringFromDatabaseIdentityProperty(field: any): string | undefined { + return typeof field === 'string' + ? (field as string) + : typeof field === 'undefined' + ? undefined + : typeof field === 'bigint' + ? field.toString() + : String(field); +} + +function buildArrayString(array: any[], sqlType: string): string { + sqlType = sqlType.split('[')[0]; + const values = array + .map((value) => { + if (typeof value === 'number' || typeof value === 'bigint') { + return value.toString(); + } else if (typeof value === 'boolean') { + return value ? 'true' : 'false'; + } else if (Array.isArray(value)) { + return buildArrayString(value, sqlType); + } else if (value instanceof Date) { + if (sqlType === 'date') { + return `"${value.toISOString().split('T')[0]}"`; + } else if (sqlType === 'timestamp') { + return `"${ + value.toISOString() + .replace('T', ' ') + .slice(0, 23) + }"`; + } else { + return `"${value.toISOString()}"`; + } + } else if (typeof value === 'object') { + return `"${ + JSON + .stringify(value) + .replaceAll('"', '\\"') + }"`; + } + + return `"${value}"`; + }) + .join(','); + + return `{${values}}`; +} + +export const generatePgSnapshot = ( + tables: AnyPgTable[], + enums: PgEnum[], + schemas: PgSchema[], + sequences: PgSequence[], + schemaFilter?: string[], +): PgSchemaInternal => { + const result: Record = {}; + const sequencesToReturn: Record = {}; + + // This object stores unique names for indexes and will be used to detect if you have the same names for indexes + // within the same PostgreSQL schema + const indexesInSchema: Record = {}; + + for (const table of tables) { + const { + name: tableName, + columns, + indexes, + foreignKeys, + checks, + schema, + primaryKeys, + uniqueConstraints, + } = getTableConfig(table); + + if (schemaFilter && !schemaFilter.includes(schema ?? 'public')) { + continue; + } + + const columnsObject: Record = {}; + const indexesObject: Record = {}; + const foreignKeysObject: Record = {}; + const primaryKeysObject: Record = {}; + const uniqueConstraintObject: Record = {}; + + columns.forEach((column) => { + const notNull: boolean = column.notNull; + const primaryKey: boolean = column.primary; + const sqlTypeLowered = column.getSQLType().toLowerCase(); + + const typeSchema = is(column, PgEnumColumn) + ? column.enum.schema || 'public' + : undefined; + const generated = column.generated; + const identity = column.generatedIdentity; + + const increment = stringFromIdentityProperty(identity?.sequenceOptions?.increment) ?? '1'; + const minValue = stringFromIdentityProperty(identity?.sequenceOptions?.minValue) + ?? (parseFloat(increment) < 0 + ? minRangeForIdentityBasedOn(column.columnType) + : '1'); + const maxValue = stringFromIdentityProperty(identity?.sequenceOptions?.maxValue) + ?? (parseFloat(increment) < 0 + ? '-1' + : maxRangeForIdentityBasedOn(column.getSQLType())); + const startWith = stringFromIdentityProperty(identity?.sequenceOptions?.startWith) + ?? (parseFloat(increment) < 0 ? maxValue : minValue); + const cache = stringFromIdentityProperty(identity?.sequenceOptions?.cache) ?? '1'; + + const columnToSet: Column = { + name: column.name, + type: column.getSQLType(), + typeSchema: typeSchema, + primaryKey, + notNull, + generated: generated + ? { + as: is(generated.as, SQL) + ? dialect.sqlToQuery(generated.as as SQL).sql + : typeof generated.as === 'function' + ? dialect.sqlToQuery(generated.as() as SQL).sql + : (generated.as as any), + type: 'stored', + } + : undefined, + identity: identity + ? { + type: identity.type, + name: identity.sequenceName ?? `${tableName}_${column.name}_seq`, + schema: schema ?? 'public', + increment, + startWith, + minValue, + maxValue, + cache, + cycle: identity?.sequenceOptions?.cycle ?? false, + } + : undefined, + }; + + if (column.isUnique) { + const existingUnique = uniqueConstraintObject[column.uniqueName!]; + if (typeof existingUnique !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning(`We\'ve found duplicated unique constraint names in ${ + chalk.underline.blue( + tableName, + ) + } table. + The unique constraint ${ + chalk.underline.blue( + column.uniqueName, + ) + } on the ${ + chalk.underline.blue( + column.name, + ) + } column is confilcting with a unique constraint name already defined for ${ + chalk.underline.blue( + existingUnique.columns.join(','), + ) + } columns\n`) + }`, + ); + process.exit(1); + } + uniqueConstraintObject[column.uniqueName!] = { + name: column.uniqueName!, + nullsNotDistinct: column.uniqueType === 'not distinct', + columns: [columnToSet.name], + }; + } + + if (column.default !== undefined) { + if (is(column.default, SQL)) { + columnToSet.default = sqlToStr(column.default); + } else { + if (typeof column.default === 'string') { + columnToSet.default = `'${column.default}'`; + } else { + if (sqlTypeLowered === 'jsonb' || sqlTypeLowered === 'json') { + columnToSet.default = `'${ + JSON.stringify( + column.default, + ) + }'::${sqlTypeLowered}`; + } else if (column.default instanceof Date) { + if (sqlTypeLowered === 'date') { + columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; + } else if (sqlTypeLowered === 'timestamp') { + columnToSet.default = `'${ + column.default + .toISOString() + .replace('T', ' ') + .slice(0, 23) + }'`; + } else { + columnToSet.default = `'${column.default.toISOString()}'`; + } + } else if (isPgArrayType(sqlTypeLowered) && Array.isArray(column.default)) { + columnToSet.default = `'${ + buildArrayString( + column.default, + sqlTypeLowered, + ) + }'`; + } else { + // Should do for all types + // columnToSet.default = `'${column.default}'::${sqlTypeLowered}`; + columnToSet.default = column.default; + } + } + } + } + columnsObject[column.name] = columnToSet; + }); + + primaryKeys.map((pk) => { + const columnNames = pk.columns.map((c) => c.name); + primaryKeysObject[pk.getName()] = { + name: pk.getName(), + columns: columnNames, + }; + }); + + uniqueConstraints?.map((unq) => { + const columnNames = unq.columns.map((c) => c.name); + + const name = unq.name ?? uniqueKeyName(table, columnNames); + + const existingUnique = uniqueConstraintObject[name]; + if (typeof existingUnique !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning(`We\'ve found duplicated unique constraint names in ${ + chalk.underline.blue( + tableName, + ) + } table. + The unique constraint ${ + chalk.underline.blue( + name, + ) + } on the ${ + chalk.underline.blue( + columnNames.join(','), + ) + } columns is confilcting with a unique constraint name already defined for ${ + chalk.underline.blue( + existingUnique.columns.join(','), + ) + } columns\n`) + }`, + ); + process.exit(1); + } + + uniqueConstraintObject[name] = { + name: unq.name!, + nullsNotDistinct: unq.nullsNotDistinct, + columns: columnNames, + }; + }); + + const fks: ForeignKey[] = foreignKeys.map((fk) => { + const name = fk.getName(); + const tableFrom = tableName; + const onDelete = fk.onDelete; + const onUpdate = fk.onUpdate; + const reference = fk.reference(); + + const tableTo = getTableName(reference.foreignTable); + // TODO: resolve issue with schema undefined/public for db push(or squasher) + // getTableConfig(reference.foreignTable).schema || "public"; + const schemaTo = getTableConfig(reference.foreignTable).schema; + + const columnsFrom = reference.columns.map((it) => it.name); + const columnsTo = reference.foreignColumns.map((it) => it.name); + + return { + name, + tableFrom, + tableTo, + schemaTo, + columnsFrom, + columnsTo, + onDelete, + onUpdate, + } as ForeignKey; + }); + + fks.forEach((it) => { + foreignKeysObject[it.name] = it; + }); + + indexes.forEach((value) => { + const columns = value.config.columns; + + let indexColumnNames: string[] = []; + columns.forEach((it) => { + if (is(it, SQL)) { + if (typeof value.config.name === 'undefined') { + console.log( + `\n${ + withStyle.errorWarning( + `Please specify an index name in ${ + getTableName( + value.config.table, + ) + } table that has "${ + dialect.sqlToQuery(it).sql + }" expression. We can generate index names for indexes on columns only; for expressions in indexes, you need to specify the name yourself.`, + ) + }`, + ); + process.exit(1); + } + } + it = it as IndexedColumn; + if ( + !is(it, SQL) + && it.type! === 'PgVector' + && typeof it.indexConfig!.opClass === 'undefined' + ) { + console.log( + `\n${ + withStyle.errorWarning( + `You are specifying an index on the ${ + chalk.blueBright( + it.name, + ) + } column inside the ${ + chalk.blueBright( + tableName, + ) + } table with the ${ + chalk.blueBright( + 'vector', + ) + } type without specifying an operator class. Vector extension doesn't have a default operator class, so you need to specify one of the available options. Here is a list of available op classes for the vector extension: [${ + vectorOps + .map((it) => `${chalk.underline(`${it}`)}`) + .join( + ', ', + ) + }].\n\nYou can specify it using current syntax: ${ + chalk.underline( + `index("${value.config.name}").using("${value.config.method}", table.${it.name}.op("${ + vectorOps[0] + }"))`, + ) + }\n\nYou can check the "pg_vector" docs for more info: https://github.com/pgvector/pgvector?tab=readme-ov-file#indexing\n`, + ) + }`, + ); + process.exit(1); + } + indexColumnNames.push((it as ExtraConfigColumn).name); + }); + + const name = value.config.name + ? value.config.name + : indexName(tableName, indexColumnNames); + + let indexColumns: IndexColumnType[] = columns.map( + (it): IndexColumnType => { + if (is(it, SQL)) { + return { + expression: dialect.sqlToQuery(it, 'indexes').sql, + asc: true, + isExpression: true, + nulls: 'last', + }; + } else { + it = it as IndexedColumn; + return { + expression: it.name!, + isExpression: false, + asc: it.indexConfig?.order === 'asc', + nulls: it.indexConfig?.nulls + ? it.indexConfig?.nulls + : it.indexConfig?.order === 'desc' + ? 'first' + : 'last', + opclass: it.indexConfig?.opClass, + }; + } + }, + ); + + // check for index names duplicates + if (typeof indexesInSchema[schema ?? 'public'] !== 'undefined') { + if (indexesInSchema[schema ?? 'public'].includes(name)) { + console.log( + `\n${ + withStyle.errorWarning( + `We\'ve found duplicated index name across ${ + chalk.underline.blue( + schema ?? 'public', + ) + } schema. Please rename your index in either the ${ + chalk.underline.blue( + tableName, + ) + } table or the table with the duplicated index name`, + ) + }`, + ); + process.exit(1); + } + indexesInSchema[schema ?? 'public'].push(name); + } else { + indexesInSchema[schema ?? 'public'] = [name]; + } + + indexesObject[name] = { + name, + columns: indexColumns, + isUnique: value.config.unique ?? false, + where: value.config.where + ? dialect.sqlToQuery(value.config.where).sql + : undefined, + concurrently: value.config.concurrently ?? false, + method: value.config.method ?? 'btree', + with: value.config.with ?? {}, + }; + }); + + const tableKey = `${schema ?? 'public'}.${tableName}`; + + result[tableKey] = { + name: tableName, + schema: schema ?? '', + columns: columnsObject, + indexes: indexesObject, + foreignKeys: foreignKeysObject, + compositePrimaryKeys: primaryKeysObject, + uniqueConstraints: uniqueConstraintObject, + }; + } + + for (const sequence of sequences) { + const name = sequence.seqName!; + if ( + typeof sequencesToReturn[`${sequence.schema ?? 'public'}.${name}`] + === 'undefined' + ) { + const increment = stringFromIdentityProperty(sequence?.seqOptions?.increment) ?? '1'; + const minValue = stringFromIdentityProperty(sequence?.seqOptions?.minValue) + ?? (parseFloat(increment) < 0 ? '-9223372036854775808' : '1'); + const maxValue = stringFromIdentityProperty(sequence?.seqOptions?.maxValue) + ?? (parseFloat(increment) < 0 ? '-1' : '9223372036854775807'); + const startWith = stringFromIdentityProperty(sequence?.seqOptions?.startWith) + ?? (parseFloat(increment) < 0 ? maxValue : minValue); + const cache = stringFromIdentityProperty(sequence?.seqOptions?.cache) ?? '1'; + + sequencesToReturn[`${sequence.schema ?? 'public'}.${name}`] = { + name, + schema: sequence.schema ?? 'public', + increment, + startWith, + minValue, + maxValue, + cache, + cycle: sequence.seqOptions?.cycle ?? false, + }; + } else { + // duplicate seq error + } + } + + const enumsToReturn: Record = enums.reduce<{ + [key: string]: Enum; + }>((map, obj) => { + const enumSchema = obj.schema || 'public'; + const key = `${enumSchema}.${obj.enumName}`; + map[key] = { + name: obj.enumName, + schema: enumSchema, + values: obj.enumValues, + }; + return map; + }, {}); + + const schemasObject = Object.fromEntries( + schemas + .filter((it) => { + if (schemaFilter) { + return ( + schemaFilter.includes(it.schemaName) && it.schemaName !== 'public' + ); + } else { + return it.schemaName !== 'public'; + } + }) + .map((it) => [it.schemaName, it.schemaName]), + ); + + return { + version: '7', + dialect: 'postgresql', + tables: result, + enums: enumsToReturn, + schemas: schemasObject, + sequences: sequencesToReturn, + _meta: { + schemas: {}, + tables: {}, + columns: {}, + }, + }; +}; + +const trimChar = (str: string, char: string) => { + let start = 0; + let end = str.length; + + while (start < end && str[start] === char) ++start; + while (end > start && str[end - 1] === char) --end; + + // this.toString() due to ava deep equal issue with String { "value" } + return start > 0 || end < str.length + ? str.substring(start, end) + : str.toString(); +}; + +export const fromDatabase = async ( + db: DB, + tablesFilter: (table: string) => boolean = () => true, + schemaFilters: string[], + progressCallback?: ( + stage: IntrospectStage, + count: number, + status: IntrospectStatus, + ) => void, +): Promise => { + const result: Record = {}; + const internals: PgKitInternals = { tables: {} }; + + const where = schemaFilters.map((t) => `table_schema = '${t}'`).join(' or '); + + const allTables = await db.query( + `SELECT table_schema, table_name FROM information_schema.tables${where === '' ? '' : ` WHERE ${where}`};`, + ); + + const schemas = new Set(allTables.map((it) => it.table_schema)); + schemas.delete('public'); + + const allSchemas = await db.query<{ + table_schema: string; + }>(`select s.nspname as table_schema + from pg_catalog.pg_namespace s + join pg_catalog.pg_user u on u.usesysid = s.nspowner + where nspname not in ('information_schema', 'pg_catalog', 'public') + and nspname not like 'pg_toast%' + and nspname not like 'pg_temp_%' + order by table_schema;`); + + allSchemas.forEach((item) => { + if (schemaFilters.includes(item.table_schema)) { + schemas.add(item.table_schema); + } + }); + + let columnsCount = 0; + let indexesCount = 0; + let foreignKeysCount = 0; + let tableCount = 0; + + const sequencesToReturn: Record = {}; + + const seqWhere = schemaFilters.map((t) => `schemaname = '${t}'`).join(' or '); + + const allSequences = await db.query( + `select schemaname, sequencename, start_value, min_value, max_value, increment_by, cycle, cache_size from pg_sequences as seq${ + seqWhere === '' ? '' : ` WHERE ${seqWhere}` + };`, + ); + + for (const dbSeq of allSequences) { + const schemaName = dbSeq.schemaname; + const sequenceName = dbSeq.sequencename; + const startValue = stringFromDatabaseIdentityProperty(dbSeq.start_value); + const minValue = stringFromDatabaseIdentityProperty(dbSeq.min_value); + const maxValue = stringFromDatabaseIdentityProperty(dbSeq.max_value); + const incrementBy = stringFromDatabaseIdentityProperty(dbSeq.increment_by); + const cycle = dbSeq.cycle; + const cacheSize = stringFromDatabaseIdentityProperty(dbSeq.cache_size); + const key = `${schemaName}.${sequenceName}`; + + sequencesToReturn[key] = { + name: sequenceName, + schema: schemaName, + startWith: startValue, + minValue, + maxValue, + increment: incrementBy, + cycle, + cache: cacheSize, + }; + } + + const whereEnums = schemaFilters + .map((t) => `n.nspname = '${t}'`) + .join(' or '); + + const allEnums = await db.query( + `select n.nspname as enum_schema, + t.typname as enum_name, + e.enumlabel as enum_value, + e.enumsortorder as sort_order + from pg_type t + join pg_enum e on t.oid = e.enumtypid + join pg_catalog.pg_namespace n ON n.oid = t.typnamespace + ${whereEnums === '' ? '' : ` WHERE ${whereEnums}`} + order by enum_schema, enum_name, sort_order;`, + ); + + const enumsToReturn: Record = {}; + + for (const dbEnum of allEnums) { + const enumName = dbEnum.enum_name; + const enumValue = dbEnum.enum_value as string; + const enumSchema: string = dbEnum.enum_schema || 'public'; + const key = `${enumSchema}.${enumName}`; + + if (enumsToReturn[key] !== undefined && enumsToReturn[key] !== null) { + enumsToReturn[key].values.push(enumValue); + } else { + enumsToReturn[key] = { + name: enumName, + values: [enumValue], + schema: enumSchema, + }; + } + } + if (progressCallback) { + progressCallback('enums', Object.keys(enumsToReturn).length, 'done'); + } + + const sequencesInColumns: string[] = []; + + const all = allTables.map((row) => { + return new Promise(async (res, rej) => { + const tableName = row.table_name as string; + if (!tablesFilter(tableName)) return res(''); + tableCount += 1; + const tableSchema = row.table_schema; + + try { + const columnToReturn: Record = {}; + const indexToReturn: Record = {}; + const foreignKeysToReturn: Record = {}; + const primaryKeys: Record = {}; + const uniqueConstrains: Record = {}; + + const tableResponse = await db.query( + `SELECT a.attrelid::regclass::text, a.attname, is_nullable, a.attndims as array_dimensions + , CASE WHEN a.atttypid = ANY ('{int,int8,int2}'::regtype[]) + AND EXISTS ( + SELECT FROM pg_attrdef ad + WHERE ad.adrelid = a.attrelid + AND ad.adnum = a.attnum + AND pg_get_expr(ad.adbin, ad.adrelid) + = 'nextval(''' + || (pg_get_serial_sequence (a.attrelid::regclass::text + , a.attname))::regclass + || '''::regclass)' + ) + THEN CASE a.atttypid + WHEN 'int'::regtype THEN 'serial' + WHEN 'int8'::regtype THEN 'bigserial' + WHEN 'int2'::regtype THEN 'smallserial' + END + ELSE format_type(a.atttypid, a.atttypmod) + END AS data_type, INFORMATION_SCHEMA.COLUMNS.table_name, ns.nspname as type_schema, + pg_get_serial_sequence('"${tableSchema}"."${tableName}"', a.attname)::regclass as seq_name, INFORMATION_SCHEMA.COLUMNS.column_name, + INFORMATION_SCHEMA.COLUMNS.column_default, INFORMATION_SCHEMA.COLUMNS.data_type as additional_dt, + INFORMATION_SCHEMA.COLUMNS.udt_name as enum_name, + INFORMATION_SCHEMA.COLUMNS.is_generated, generation_expression, + INFORMATION_SCHEMA.COLUMNS.is_identity,INFORMATION_SCHEMA.COLUMNS.identity_generation, + INFORMATION_SCHEMA.COLUMNS.identity_start, INFORMATION_SCHEMA.COLUMNS.identity_increment, + INFORMATION_SCHEMA.COLUMNS.identity_maximum, INFORMATION_SCHEMA.COLUMNS.identity_minimum, + INFORMATION_SCHEMA.COLUMNS.identity_cycle + FROM pg_attribute a + JOIN INFORMATION_SCHEMA.COLUMNS ON INFORMATION_SCHEMA.COLUMNS.column_name = a.attname + JOIN pg_type t ON t.oid = a.atttypid LEFT JOIN pg_namespace ns ON ns.oid = t.typnamespace + WHERE a.attrelid = '"${tableSchema}"."${tableName}"'::regclass and INFORMATION_SCHEMA.COLUMNS.table_name = '${tableName}' and INFORMATION_SCHEMA.COLUMNS.table_schema = '${tableSchema}' + AND a.attnum > 0 + AND NOT a.attisdropped + ORDER BY a.attnum;`, + ); + + const tableConstraints = await db.query( + `SELECT c.column_name, c.data_type, constraint_type, constraint_name, constraint_schema + FROM information_schema.table_constraints tc + JOIN information_schema.constraint_column_usage AS ccu USING (constraint_schema, constraint_name) + JOIN information_schema.columns AS c ON c.table_schema = tc.constraint_schema + AND tc.table_name = c.table_name AND ccu.column_name = c.column_name + WHERE tc.table_name = '${tableName}' and constraint_schema = '${tableSchema}';`, + ); + + columnsCount += tableResponse.length; + if (progressCallback) { + progressCallback('columns', columnsCount, 'fetching'); + } + + const tableForeignKeys = await db.query( + `SELECT + con.contype AS constraint_type, + nsp.nspname AS constraint_schema, + con.conname AS constraint_name, + rel.relname AS table_name, + att.attname AS column_name, + fnsp.nspname AS foreign_table_schema, + frel.relname AS foreign_table_name, + fatt.attname AS foreign_column_name, + CASE con.confupdtype + WHEN 'a' THEN 'NO ACTION' + WHEN 'r' THEN 'RESTRICT' + WHEN 'n' THEN 'SET NULL' + WHEN 'c' THEN 'CASCADE' + WHEN 'd' THEN 'SET DEFAULT' + END AS update_rule, + CASE con.confdeltype + WHEN 'a' THEN 'NO ACTION' + WHEN 'r' THEN 'RESTRICT' + WHEN 'n' THEN 'SET NULL' + WHEN 'c' THEN 'CASCADE' + WHEN 'd' THEN 'SET DEFAULT' + END AS delete_rule + FROM + pg_catalog.pg_constraint con + JOIN pg_catalog.pg_class rel ON rel.oid = con.conrelid + JOIN pg_catalog.pg_namespace nsp ON nsp.oid = con.connamespace + LEFT JOIN pg_catalog.pg_attribute att ON att.attnum = ANY (con.conkey) + AND att.attrelid = con.conrelid + LEFT JOIN pg_catalog.pg_class frel ON frel.oid = con.confrelid + LEFT JOIN pg_catalog.pg_namespace fnsp ON fnsp.oid = frel.relnamespace + LEFT JOIN pg_catalog.pg_attribute fatt ON fatt.attnum = ANY (con.confkey) + AND fatt.attrelid = con.confrelid + WHERE + nsp.nspname = '${tableSchema}' + AND rel.relname = '${tableName}' + AND con.contype IN ('f');`, + ); + + foreignKeysCount += tableForeignKeys.length; + if (progressCallback) { + progressCallback('fks', foreignKeysCount, 'fetching'); + } + for (const fk of tableForeignKeys) { + // const tableFrom = fk.table_name; + const columnFrom: string = fk.column_name; + const tableTo = fk.foreign_table_name; + const columnTo: string = fk.foreign_column_name; + const schemaTo: string = fk.foreign_table_schema; + const foreignKeyName = fk.constraint_name; + const onUpdate = fk.update_rule?.toLowerCase(); + const onDelete = fk.delete_rule?.toLowerCase(); + + if (typeof foreignKeysToReturn[foreignKeyName] !== 'undefined') { + foreignKeysToReturn[foreignKeyName].columnsFrom.push(columnFrom); + foreignKeysToReturn[foreignKeyName].columnsTo.push(columnTo); + } else { + foreignKeysToReturn[foreignKeyName] = { + name: foreignKeyName, + tableFrom: tableName, + tableTo, + schemaTo, + columnsFrom: [columnFrom], + columnsTo: [columnTo], + onDelete, + onUpdate, + }; + } + + foreignKeysToReturn[foreignKeyName].columnsFrom = [ + ...new Set(foreignKeysToReturn[foreignKeyName].columnsFrom), + ]; + + foreignKeysToReturn[foreignKeyName].columnsTo = [ + ...new Set(foreignKeysToReturn[foreignKeyName].columnsTo), + ]; + } + + const uniqueConstrainsRows = tableConstraints.filter( + (mapRow) => mapRow.constraint_type === 'UNIQUE', + ); + + for (const unqs of uniqueConstrainsRows) { + // const tableFrom = fk.table_name; + const columnName: string = unqs.column_name; + const constraintName: string = unqs.constraint_name; + + if (typeof uniqueConstrains[constraintName] !== 'undefined') { + uniqueConstrains[constraintName].columns.push(columnName); + } else { + uniqueConstrains[constraintName] = { + columns: [columnName], + nullsNotDistinct: false, + name: constraintName, + }; + } + } + + for (const columnResponse of tableResponse) { + const columnName = columnResponse.attname; + const columnAdditionalDT = columnResponse.additional_dt; + const columnDimensions = columnResponse.array_dimensions; + const enumType: string = columnResponse.enum_name; + let columnType: string = columnResponse.data_type; + const typeSchema = columnResponse.type_schema; + const defaultValueRes: string = columnResponse.column_default; + + const isGenerated = columnResponse.is_generated === 'ALWAYS'; + const generationExpression = columnResponse.generation_expression; + const isIdentity = columnResponse.is_identity === 'YES'; + const identityGeneration = columnResponse.identity_generation === 'ALWAYS' + ? 'always' + : 'byDefault'; + const identityStart = columnResponse.identity_start; + const identityIncrement = columnResponse.identity_increment; + const identityMaximum = columnResponse.identity_maximum; + const identityMinimum = columnResponse.identity_minimum; + const identityCycle = columnResponse.identity_cycle === 'YES'; + const identityName = columnResponse.seq_name; + + const primaryKey = tableConstraints.filter( + (mapRow) => + columnName === mapRow.column_name + && mapRow.constraint_type === 'PRIMARY KEY', + ); + + const cprimaryKey = tableConstraints.filter( + (mapRow) => mapRow.constraint_type === 'PRIMARY KEY', + ); + + if (cprimaryKey.length > 1) { + const tableCompositePkName = await db.query( + `SELECT conname AS primary_key + FROM pg_constraint join pg_class on (pg_class.oid = conrelid) + WHERE contype = 'p' + AND connamespace = $1::regnamespace + AND pg_class.relname = $2;`, + [tableSchema, tableName], + ); + primaryKeys[tableCompositePkName[0].primary_key] = { + name: tableCompositePkName[0].primary_key, + columns: cprimaryKey.map((c: any) => c.column_name), + }; + } + + let columnTypeMapped = columnType; + + // Set default to internal object + if (columnAdditionalDT === 'ARRAY') { + if (typeof internals.tables[tableName] === 'undefined') { + internals.tables[tableName] = { + columns: { + [columnName]: { + isArray: true, + dimensions: columnDimensions, + rawType: columnTypeMapped.substring( + 0, + columnTypeMapped.length - 2, + ), + }, + }, + }; + } else { + if ( + typeof internals.tables[tableName]!.columns[columnName] + === 'undefined' + ) { + internals.tables[tableName]!.columns[columnName] = { + isArray: true, + dimensions: columnDimensions, + rawType: columnTypeMapped.substring( + 0, + columnTypeMapped.length - 2, + ), + }; + } + } + } + + const defaultValue = defaultForColumn( + columnResponse, + internals, + tableName, + ); + if ( + defaultValue === 'NULL' + || (defaultValueRes && defaultValueRes.startsWith('(') && defaultValueRes.endsWith(')')) + ) { + if (typeof internals!.tables![tableName] === 'undefined') { + internals!.tables![tableName] = { + columns: { + [columnName]: { + isDefaultAnExpression: true, + }, + }, + }; + } else { + if ( + typeof internals!.tables![tableName]!.columns[columnName] + === 'undefined' + ) { + internals!.tables![tableName]!.columns[columnName] = { + isDefaultAnExpression: true, + }; + } else { + internals!.tables![tableName]!.columns[ + columnName + ]!.isDefaultAnExpression = true; + } + } + } + + const isSerial = columnType === 'serial'; + + if (columnTypeMapped.startsWith('numeric(')) { + columnTypeMapped = columnTypeMapped.replace(',', ', '); + } + + if (columnAdditionalDT === 'ARRAY') { + for (let i = 1; i < Number(columnDimensions); i++) { + columnTypeMapped += '[]'; + } + } + + columnTypeMapped = columnTypeMapped + .replace('character varying', 'varchar') + .replace(' without time zone', '') + // .replace("timestamp without time zone", "timestamp") + .replace('character', 'char'); + + columnTypeMapped = trimChar(columnTypeMapped, '"'); + + columnToReturn[columnName] = { + name: columnName, + type: + // filter vectors, but in future we should filter any extension that was installed by user + columnAdditionalDT === 'USER-DEFINED' + && !['vector', 'geometry'].includes(enumType) + ? enumType + : columnTypeMapped, + typeSchema: enumsToReturn[`${typeSchema}.${enumType}`] !== undefined + ? enumsToReturn[`${typeSchema}.${enumType}`].schema + : undefined, + primaryKey: primaryKey.length === 1 && cprimaryKey.length < 2, + // default: isSerial ? undefined : defaultValue, + notNull: columnResponse.is_nullable === 'NO', + generated: isGenerated + ? { as: generationExpression, type: 'stored' } + : undefined, + identity: isIdentity + ? { + type: identityGeneration, + name: identityName, + increment: stringFromDatabaseIdentityProperty(identityIncrement), + minValue: stringFromDatabaseIdentityProperty(identityMinimum), + maxValue: stringFromDatabaseIdentityProperty(identityMaximum), + startWith: stringFromDatabaseIdentityProperty(identityStart), + cache: sequencesToReturn[identityName]?.cache + ? sequencesToReturn[identityName]?.cache + : sequencesToReturn[`${tableSchema}.${identityName}`]?.cache + ? sequencesToReturn[`${tableSchema}.${identityName}`]?.cache + : undefined, + cycle: identityCycle, + schema: tableSchema, + } + : undefined, + }; + + if (identityName) { + // remove "" from sequence name + delete sequencesToReturn[ + `${tableSchema}.${ + identityName.startsWith('"') && identityName.endsWith('"') ? identityName.slice(1, -1) : identityName + }` + ]; + delete sequencesToReturn[identityName]; + } + + if (!isSerial && typeof defaultValue !== 'undefined') { + columnToReturn[columnName].default = defaultValue; + } + } + + const dbIndexes = await db.query( + `SELECT DISTINCT ON (t.relname, ic.relname, k.i) t.relname as table_name, ic.relname AS indexname, + k.i AS index_order, + i.indisunique as is_unique, + am.amname as method, + ic.reloptions as with, + coalesce(a.attname, + (('{' || pg_get_expr( + i.indexprs, + i.indrelid + ) + || '}')::text[] + )[k.i] + ) AS column_name, + CASE + WHEN pg_get_expr(i.indexprs, i.indrelid) IS NOT NULL THEN 1 + ELSE 0 + END AS is_expression, + i.indoption[k.i-1] & 1 = 1 AS descending, + i.indoption[k.i-1] & 2 = 2 AS nulls_first, + pg_get_expr( + i.indpred, + i.indrelid + ) as where, + opc.opcname + FROM pg_class t + LEFT JOIN pg_index i ON t.oid = i.indrelid + LEFT JOIN pg_class ic ON ic.oid = i.indexrelid + CROSS JOIN LATERAL (SELECT unnest(i.indkey), generate_subscripts(i.indkey, 1) + 1) AS k(attnum, i) + LEFT JOIN pg_attribute AS a + ON i.indrelid = a.attrelid AND k.attnum = a.attnum + JOIN pg_namespace c on c.oid = t.relnamespace + LEFT JOIN pg_am AS am ON ic.relam = am.oid + JOIN pg_opclass opc ON opc.oid = ANY(i.indclass) + WHERE + c.nspname = '${tableSchema}' AND + t.relname = '${tableName}';`, + ); + + const dbIndexFromConstraint = await db.query( + `SELECT + idx.indexrelname AS index_name, + idx.relname AS table_name, + schemaname, + CASE WHEN con.conname IS NOT NULL THEN 1 ELSE 0 END AS generated_by_constraint + FROM + pg_stat_user_indexes idx + LEFT JOIN + pg_constraint con ON con.conindid = idx.indexrelid + WHERE idx.relname = '${tableName}' and schemaname = '${tableSchema}' + group by index_name, table_name,schemaname, generated_by_constraint;`, + ); + + const idxsInConsteraint = dbIndexFromConstraint + .filter((it) => it.generated_by_constraint === 1) + .map((it) => it.index_name); + + for (const dbIndex of dbIndexes) { + const indexName: string = dbIndex.indexname; + const indexColumnName: string = dbIndex.column_name; + const indexIsUnique = dbIndex.is_unique; + const indexMethod = dbIndex.method; + const indexWith: string[] = dbIndex.with; + const indexWhere: string = dbIndex.where; + const opclass: string = dbIndex.opcname; + const isExpression = dbIndex.is_expression === 1; + + const desc: boolean = dbIndex.descending; + const nullsFirst: boolean = dbIndex.nulls_first; + + const mappedWith: Record = {}; + + if (indexWith !== null) { + indexWith + // .slice(1, indexWith.length - 1) + // .split(",") + .forEach((it) => { + const splitted = it.split('='); + mappedWith[splitted[0]] = splitted[1]; + }); + } + + if (idxsInConsteraint.includes(indexName)) continue; + + if (typeof indexToReturn[indexName] !== 'undefined') { + indexToReturn[indexName].columns.push({ + expression: indexColumnName, + asc: !desc, + nulls: nullsFirst ? 'first' : 'last', + opclass, + isExpression, + }); + } else { + indexToReturn[indexName] = { + name: indexName, + columns: [ + { + expression: indexColumnName, + asc: !desc, + nulls: nullsFirst ? 'first' : 'last', + opclass, + isExpression, + }, + ], + isUnique: indexIsUnique, + // should not be a part of diff detecs + concurrently: false, + method: indexMethod, + where: indexWhere === null ? undefined : indexWhere, + with: mappedWith, + }; + } + } + + indexesCount += Object.keys(indexToReturn).length; + if (progressCallback) { + progressCallback('indexes', indexesCount, 'fetching'); + } + result[`${tableSchema}.${tableName}`] = { + name: tableName, + schema: tableSchema !== 'public' ? tableSchema : '', + columns: columnToReturn, + indexes: indexToReturn, + foreignKeys: foreignKeysToReturn, + compositePrimaryKeys: primaryKeys, + uniqueConstraints: uniqueConstrains, + }; + } catch (e) { + rej(e); + return; + } + res(''); + }); + }); + + if (progressCallback) { + progressCallback('tables', tableCount, 'done'); + } + + for await (const _ of all) { + } + + if (progressCallback) { + progressCallback('columns', columnsCount, 'done'); + progressCallback('indexes', indexesCount, 'done'); + progressCallback('fks', foreignKeysCount, 'done'); + } + + const schemasObject = Object.fromEntries([...schemas].map((it) => [it, it])); + + return { + version: '7', + dialect: 'postgresql', + tables: result, + enums: enumsToReturn, + schemas: schemasObject, + sequences: sequencesToReturn, + _meta: { + schemas: {}, + tables: {}, + columns: {}, + }, + internal: internals, + }; +}; + +const defaultForColumn = (column: any, internals: PgKitInternals, tableName: string) => { + const columnName = column.attname; + const isArray = internals?.tables[tableName]?.columns[columnName]?.isArray ?? false; + + if (column.column_default === null) { + return undefined; + } + + if ( + column.data_type === 'serial' + || column.data_type === 'smallserial' + || column.data_type === 'bigserial' + ) { + return undefined; + } + + if (column.column_default.endsWith('[]')) { + column.column_default = column.column_default.slice(0, -2); + } + + // if ( + // !['integer', 'smallint', 'bigint', 'double precision', 'real'].includes(column.data_type) + // ) { + column.column_default = column.column_default.replace(/::(.*?)(? { + if (['integer', 'smallint', 'bigint', 'double precision', 'real'].includes(column.data_type.slice(0, -2))) { + return value; + } else if (column.data_type.startsWith('timestamp')) { + return `${value}`; + } else if (column.data_type.slice(0, -2) === 'interval') { + return value.replaceAll('"', `\"`); + } else if (column.data_type.slice(0, -2) === 'boolean') { + return value === 't' ? 'true' : 'false'; + } else if (['json', 'jsonb'].includes(column.data_type.slice(0, -2))) { + return JSON.stringify(JSON.stringify(JSON.parse(JSON.parse(value)), null, 0)); + } else { + return `\"${value}\"`; + } + }) + .join(',') + }}'`; + } + + if ( + ['integer', 'smallint', 'bigint', 'double precision', 'real'].includes(column.data_type) + ) { + if (/^-?[\d.]+(?:e-?\d+)?$/.test(columnDefaultAsString)) { + return Number(columnDefaultAsString); + } else { + if (typeof internals!.tables![tableName] === 'undefined') { + internals!.tables![tableName] = { + columns: { + [columnName]: { + isDefaultAnExpression: true, + }, + }, + }; + } else { + if ( + typeof internals!.tables![tableName]!.columns[columnName] + === 'undefined' + ) { + internals!.tables![tableName]!.columns[columnName] = { + isDefaultAnExpression: true, + }; + } else { + internals!.tables![tableName]!.columns[ + columnName + ]!.isDefaultAnExpression = true; + } + } + return columnDefaultAsString; + } + } else if (column.data_type === 'json' || column.data_type === 'jsonb') { + const jsonWithoutSpaces = JSON.stringify(JSON.parse(columnDefaultAsString.slice(1, -1))); + return `'${jsonWithoutSpaces}'::${column.data_type}`; + } else if (column.data_type === 'boolean') { + return column.column_default === 'true'; + } else if (columnDefaultAsString === 'NULL') { + return `NULL`; + } else if (columnDefaultAsString.startsWith("'") && columnDefaultAsString.endsWith("'")) { + return columnDefaultAsString; + } else { + return `${columnDefaultAsString.replace(/\\/g, '\`\\')}`; + } +}; diff --git a/drizzle-kit/src/serializer/singlestoreImports.ts b/drizzle-kit/src/serializer/singlestoreImports.ts new file mode 100644 index 000000000..3fcb1e65f --- /dev/null +++ b/drizzle-kit/src/serializer/singlestoreImports.ts @@ -0,0 +1,31 @@ +import { is } from 'drizzle-orm'; +import { AnySingleStoreTable, SingleStoreTable } from 'drizzle-orm/singlestore-core'; +import { safeRegister } from '../cli/commands/utils'; + +export const prepareFromExports = (exports: Record) => { + const tables: AnySingleStoreTable[] = []; + + const i0values = Object.values(exports); + i0values.forEach((t) => { + if (is(t, SingleStoreTable)) { + tables.push(t); + } + }); + + return { tables }; +}; + +export const prepareFromSingleStoreImports = async (imports: string[]) => { + const tables: AnySingleStoreTable[] = []; + + const { unregister } = await safeRegister(); + for (let i = 0; i < imports.length; i++) { + const it = imports[i]; + const i0: Record = require(`${it}`); + const prepared = prepareFromExports(i0); + + tables.push(...prepared.tables); + } + unregister(); + return { tables: Array.from(new Set(tables)) }; +}; diff --git a/drizzle-kit/src/serializer/singlestoreSchema.ts b/drizzle-kit/src/serializer/singlestoreSchema.ts new file mode 100644 index 000000000..a0bbae1bf --- /dev/null +++ b/drizzle-kit/src/serializer/singlestoreSchema.ts @@ -0,0 +1,203 @@ +import { any, boolean, enum as enumType, literal, object, record, string, TypeOf, union } from 'zod'; +import { mapValues, originUUID, snapshotVersion } from '../global'; + +// ------- V3 -------- +const index = object({ + name: string(), + columns: string().array(), + isUnique: boolean(), + using: enumType(['btree', 'hash']).optional(), + algorithm: enumType(['default', 'inplace', 'copy']).optional(), + lock: enumType(['default', 'none', 'shared', 'exclusive']).optional(), +}).strict(); + +const column = object({ + name: string(), + type: string(), + primaryKey: boolean(), + notNull: boolean(), + autoincrement: boolean().optional(), + default: any().optional(), + onUpdate: any().optional(), + generated: object({ + type: enumType(['stored', 'virtual']), + as: string(), + }).optional(), +}).strict(); + +const compositePK = object({ + name: string(), + columns: string().array(), +}).strict(); + +const uniqueConstraint = object({ + name: string(), + columns: string().array(), +}).strict(); + +const table = object({ + name: string(), + columns: record(string(), column), + indexes: record(string(), index), + compositePrimaryKeys: record(string(), compositePK), + uniqueConstraints: record(string(), uniqueConstraint).default({}), +}).strict(); + +export const kitInternals = object({ + tables: record( + string(), + object({ + columns: record( + string(), + object({ isDefaultAnExpression: boolean().optional() }).optional(), + ), + }).optional(), + ).optional(), + indexes: record( + string(), + object({ + columns: record( + string(), + object({ isExpression: boolean().optional() }).optional(), + ), + }).optional(), + ).optional(), +}).optional(); + +// use main dialect +const dialect = literal('singlestore'); + +const schemaHash = object({ + id: string(), + prevId: string(), +}); + +export const schemaInternal = object({ + version: literal('1'), + dialect: dialect, + tables: record(string(), table), + _meta: object({ + tables: record(string(), string()), + columns: record(string(), string()), + }), + internal: kitInternals, +}).strict(); + +export const schema = schemaInternal.merge(schemaHash); + +const tableSquashed = object({ + name: string(), + columns: record(string(), column), + indexes: record(string(), string()), + compositePrimaryKeys: record(string(), string()), + uniqueConstraints: record(string(), string()).default({}), +}).strict(); + +export const schemaSquashed = object({ + version: literal('1'), + dialect: dialect, + tables: record(string(), tableSquashed), +}).strict(); + +export type Dialect = TypeOf; +export type Column = TypeOf; +export type Table = TypeOf; +export type SingleStoreSchema = TypeOf; +export type SingleStoreSchemaInternal = TypeOf; +export type SingleStoreKitInternals = TypeOf; +export type SingleStoreSchemaSquashed = TypeOf; +export type Index = TypeOf; +export type PrimaryKey = TypeOf; +export type UniqueConstraint = TypeOf; + +export const SingleStoreSquasher = { + squashIdx: (idx: Index) => { + index.parse(idx); + return `${idx.name};${idx.columns.join(',')};${idx.isUnique};${idx.using ?? ''};${idx.algorithm ?? ''};${ + idx.lock ?? '' + }`; + }, + unsquashIdx: (input: string): Index => { + const [name, columnsString, isUnique, using, algorithm, lock] = input.split(';'); + const destructed = { + name, + columns: columnsString.split(','), + isUnique: isUnique === 'true', + using: using ? using : undefined, + algorithm: algorithm ? algorithm : undefined, + lock: lock ? lock : undefined, + }; + return index.parse(destructed); + }, + squashPK: (pk: PrimaryKey) => { + return `${pk.name};${pk.columns.join(',')}`; + }, + unsquashPK: (pk: string): PrimaryKey => { + const splitted = pk.split(';'); + return { name: splitted[0], columns: splitted[1].split(',') }; + }, + squashUnique: (unq: UniqueConstraint) => { + return `${unq.name};${unq.columns.join(',')}`; + }, + unsquashUnique: (unq: string): UniqueConstraint => { + const [name, columns] = unq.split(';'); + return { name, columns: columns.split(',') }; + }, +}; + +export const squashSingleStoreScheme = (json: SingleStoreSchema): SingleStoreSchemaSquashed => { + const mappedTables = Object.fromEntries( + Object.entries(json.tables).map((it) => { + const squashedIndexes = mapValues(it[1].indexes, (index) => { + return SingleStoreSquasher.squashIdx(index); + }); + + const squashedPKs = mapValues(it[1].compositePrimaryKeys, (pk) => { + return SingleStoreSquasher.squashPK(pk); + }); + + const squashedUniqueConstraints = mapValues( + it[1].uniqueConstraints, + (unq) => { + return SingleStoreSquasher.squashUnique(unq); + }, + ); + + return [ + it[0], + { + name: it[1].name, + columns: it[1].columns, + indexes: squashedIndexes, + compositePrimaryKeys: squashedPKs, + uniqueConstraints: squashedUniqueConstraints, + }, + ]; + }), + ); + return { + version: '1', + dialect: json.dialect, + tables: mappedTables, + }; +}; + +export const singlestoreSchema = schema; +export const singlestoreSchemaSquashed = schemaSquashed; + +// no prev version +export const backwardCompatibleSingleStoreSchema = union([singlestoreSchema, schema]); + +export const drySingleStore = singlestoreSchema.parse({ + version: '1', + dialect: 'singlestore', + id: originUUID, + prevId: '', + tables: {}, + schemas: {}, + _meta: { + schemas: {}, + tables: {}, + columns: {}, + }, +}); diff --git a/drizzle-kit/src/serializer/singlestoreSerializer.ts b/drizzle-kit/src/serializer/singlestoreSerializer.ts new file mode 100644 index 000000000..fc91becf2 --- /dev/null +++ b/drizzle-kit/src/serializer/singlestoreSerializer.ts @@ -0,0 +1,604 @@ +import chalk from 'chalk'; +import { is, SQL } from 'drizzle-orm'; +import { + AnySingleStoreTable, + getTableConfig, + type PrimaryKey as PrimaryKeyORM, + SingleStoreDialect, + uniqueKeyName, +} from 'drizzle-orm/singlestore-core'; +import { RowDataPacket } from 'mysql2/promise'; +import { withStyle } from '../cli/validations/outputs'; +import { IntrospectStage, IntrospectStatus } from '../cli/views'; + +import type { DB } from '../utils'; +import { sqlToStr } from '.'; +import { + Column, + Index, + PrimaryKey, + SingleStoreKitInternals, + SingleStoreSchemaInternal, + Table, + UniqueConstraint, +} from './singlestoreSchema'; +// import { SingleStoreColumnWithAutoIncrement } from "drizzle-orm/mysql-core"; +// import { SingleStoreDateBaseColumn } from "drizzle-orm/mysql-core"; + +const dialect = new SingleStoreDialect(); + +export const indexName = (tableName: string, columns: string[]) => { + return `${tableName}_${columns.join('_')}_index`; +}; + +export const generateSingleStoreSnapshot = ( + tables: AnySingleStoreTable[], +): SingleStoreSchemaInternal => { + const result: Record = {}; + const internal: SingleStoreKitInternals = { tables: {}, indexes: {} }; + for (const table of tables) { + const { + name: tableName, + columns, + indexes, + schema, + primaryKeys, + uniqueConstraints, + } = getTableConfig(table); + const columnsObject: Record = {}; + const indexesObject: Record = {}; + const primaryKeysObject: Record = {}; + const uniqueConstraintObject: Record = {}; + + columns.forEach((column) => { + const notNull: boolean = column.notNull; + const sqlTypeLowered = column.getSQLType().toLowerCase(); + const autoIncrement = typeof (column as any).autoIncrement === 'undefined' + ? false + : (column as any).autoIncrement; + + const generated = column.generated; + + const columnToSet: Column = { + name: column.name, + type: column.getSQLType(), + primaryKey: false, + // If field is autoincrement it's notNull by default + // notNull: autoIncrement ? true : notNull, + notNull, + autoincrement: autoIncrement, + onUpdate: (column as any).hasOnUpdateNow, + generated: generated + ? { + as: is(generated.as, SQL) + ? dialect.sqlToQuery(generated.as as SQL).sql + : typeof generated.as === 'function' + ? dialect.sqlToQuery(generated.as() as SQL).sql + : (generated.as as any), + type: generated.mode ?? 'stored', + } + : undefined, + }; + + if (column.primary) { + primaryKeysObject[`${tableName}_${column.name}`] = { + name: `${tableName}_${column.name}`, + columns: [column.name], + }; + } + + if (column.isUnique) { + const existingUnique = uniqueConstraintObject[column.uniqueName!]; + if (typeof existingUnique !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning(`We\'ve found duplicated unique constraint names in ${ + chalk.underline.blue( + tableName, + ) + } table. + The unique constraint ${ + chalk.underline.blue( + column.uniqueName, + ) + } on the ${ + chalk.underline.blue( + column.name, + ) + } column is confilcting with a unique constraint name already defined for ${ + chalk.underline.blue( + existingUnique.columns.join(','), + ) + } columns\n`) + }`, + ); + process.exit(1); + } + uniqueConstraintObject[column.uniqueName!] = { + name: column.uniqueName!, + columns: [columnToSet.name], + }; + } + + if (column.default !== undefined) { + if (is(column.default, SQL)) { + columnToSet.default = sqlToStr(column.default); + } else { + if (typeof column.default === 'string') { + columnToSet.default = `'${column.default}'`; + } else { + if (sqlTypeLowered === 'json') { + columnToSet.default = `'${JSON.stringify(column.default)}'`; + } else if (column.default instanceof Date) { + if (sqlTypeLowered === 'date') { + columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; + } else if ( + sqlTypeLowered.startsWith('datetime') + || sqlTypeLowered.startsWith('timestamp') + ) { + columnToSet.default = `'${ + column.default + .toISOString() + .replace('T', ' ') + .slice(0, 23) + }'`; + } + } else { + columnToSet.default = column.default; + } + } + if (['blob', 'text', 'json'].includes(column.getSQLType())) { + columnToSet.default = `(${columnToSet.default})`; + } + } + } + columnsObject[column.name] = columnToSet; + }); + + primaryKeys.map((pk: PrimaryKeyORM) => { + const columnNames = pk.columns.map((c: any) => c.name); + primaryKeysObject[pk.getName()] = { + name: pk.getName(), + columns: columnNames, + }; + + // all composite pk's should be treated as notNull + for (const column of pk.columns) { + columnsObject[column.name].notNull = true; + } + }); + + uniqueConstraints?.map((unq) => { + const columnNames = unq.columns.map((c) => c.name); + + const name = unq.name ?? uniqueKeyName(table, columnNames); + + const existingUnique = uniqueConstraintObject[name]; + if (typeof existingUnique !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning( + `We\'ve found duplicated unique constraint names in ${ + chalk.underline.blue( + tableName, + ) + } table. \nThe unique constraint ${ + chalk.underline.blue( + name, + ) + } on the ${ + chalk.underline.blue( + columnNames.join(','), + ) + } columns is confilcting with a unique constraint name already defined for ${ + chalk.underline.blue( + existingUnique.columns.join(','), + ) + } columns\n`, + ) + }`, + ); + process.exit(1); + } + + uniqueConstraintObject[name] = { + name: unq.name!, + columns: columnNames, + }; + }); + + indexes.forEach((value) => { + const columns = value.config.columns; + const name = value.config.name; + + let indexColumns = columns.map((it) => { + if (is(it, SQL)) { + const sql = dialect.sqlToQuery(it, 'indexes').sql; + if (typeof internal!.indexes![name] === 'undefined') { + internal!.indexes![name] = { + columns: { + [sql]: { + isExpression: true, + }, + }, + }; + } else { + if (typeof internal!.indexes![name]?.columns[sql] === 'undefined') { + internal!.indexes![name]!.columns[sql] = { + isExpression: true, + }; + } else { + internal!.indexes![name]!.columns[sql]!.isExpression = true; + } + } + return sql; + } else { + return `${it.name}`; + } + }); + + if (value.config.unique) { + if (typeof uniqueConstraintObject[name] !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning( + `We\'ve found duplicated unique constraint names in ${ + chalk.underline.blue( + tableName, + ) + } table. \nThe unique index ${ + chalk.underline.blue( + name, + ) + } on the ${ + chalk.underline.blue( + indexColumns.join(','), + ) + } columns is confilcting with a unique constraint name already defined for ${ + chalk.underline.blue( + uniqueConstraintObject[name].columns.join(','), + ) + } columns\n`, + ) + }`, + ); + process.exit(1); + } + } + + indexesObject[name] = { + name, + columns: indexColumns, + isUnique: value.config.unique ?? false, + using: value.config.using, + algorithm: value.config.algorythm, + lock: value.config.lock, + }; + }); + + // only handle tables without schemas + if (!schema) { + result[tableName] = { + name: tableName, + columns: columnsObject, + indexes: indexesObject, + compositePrimaryKeys: primaryKeysObject, + uniqueConstraints: uniqueConstraintObject, + }; + } + } + + return { + version: '1', + dialect: 'singlestore', + tables: result, + _meta: { + tables: {}, + columns: {}, + }, + internal, + }; +}; + +function clearDefaults(defaultValue: any, collate: string) { + if (typeof collate === 'undefined' || collate === null) { + collate = `utf8mb4`; + } + + let resultDefault = defaultValue; + collate = `_${collate}`; + if (defaultValue.startsWith(collate)) { + resultDefault = resultDefault + .substring(collate.length, defaultValue.length) + .replace(/\\/g, ''); + if (resultDefault.startsWith("'") && resultDefault.endsWith("'")) { + return `('${resultDefault.substring(1, resultDefault.length - 1)}')`; + } else { + return `'${resultDefault}'`; + } + } else { + return `(${resultDefault})`; + } +} + +export const fromDatabase = async ( + db: DB, + inputSchema: string, + tablesFilter: (table: string) => boolean = (table) => true, + progressCallback?: ( + stage: IntrospectStage, + count: number, + status: IntrospectStatus, + ) => void, +): Promise => { + const result: Record = {}; + const internals: SingleStoreKitInternals = { tables: {}, indexes: {} }; + + const columns = await db.query(`select * from information_schema.columns + where table_schema = '${inputSchema}' and table_name != '__drizzle_migrations' + order by table_name, ordinal_position;`); + + const response = columns as RowDataPacket[]; + + const schemas: string[] = []; + + let columnsCount = 0; + let tablesCount = new Set(); + let indexesCount = 0; + let foreignKeysCount = 0; + + const idxs = await db.query( + `select * from INFORMATION_SCHEMA.STATISTICS + WHERE INFORMATION_SCHEMA.STATISTICS.TABLE_SCHEMA = '${inputSchema}' and INFORMATION_SCHEMA.STATISTICS.INDEX_NAME != 'PRIMARY';`, + ); + + const idxRows = idxs as RowDataPacket[]; + + for (const column of response) { + if (!tablesFilter(column['TABLE_NAME'] as string)) continue; + + columnsCount += 1; + if (progressCallback) { + progressCallback('columns', columnsCount, 'fetching'); + } + const schema: string = column['TABLE_SCHEMA']; + const tableName = column['TABLE_NAME']; + + tablesCount.add(`${schema}.${tableName}`); + if (progressCallback) { + progressCallback('columns', tablesCount.size, 'fetching'); + } + const columnName: string = column['COLUMN_NAME']; + const isNullable = column['IS_NULLABLE'] === 'YES'; // 'YES', 'NO' + const dataType = column['DATA_TYPE']; // varchar + const columnType = column['COLUMN_TYPE']; // varchar(256) + const isPrimary = column['COLUMN_KEY'] === 'PRI'; // 'PRI', '' + const columnDefault: string = column['COLUMN_DEFAULT']; + const collation: string = column['CHARACTER_SET_NAME']; + const geenratedExpression: string = column['GENERATION_EXPRESSION']; + + let columnExtra = column['EXTRA']; + let isAutoincrement = false; // 'auto_increment', '' + let isDefaultAnExpression = false; // 'auto_increment', '' + + if (typeof column['EXTRA'] !== 'undefined') { + columnExtra = column['EXTRA']; + isAutoincrement = column['EXTRA'] === 'auto_increment'; // 'auto_increment', '' + isDefaultAnExpression = column['EXTRA'].includes('DEFAULT_GENERATED'); // 'auto_increment', '' + } + + // if (isPrimary) { + // if (typeof tableToPk[tableName] === "undefined") { + // tableToPk[tableName] = [columnName]; + // } else { + // tableToPk[tableName].push(columnName); + // } + // } + + if (schema !== inputSchema) { + schemas.push(schema); + } + + const table = result[tableName]; + + // let changedType = columnType.replace("bigint unsigned", "serial") + let changedType = columnType; + + if (columnType === 'bigint unsigned' && !isNullable && isAutoincrement) { + // check unique here + const uniqueIdx = idxRows.filter( + (it) => + it['COLUMN_NAME'] === columnName + && it['TABLE_NAME'] === tableName + && it['NON_UNIQUE'] === 0, + ); + if (uniqueIdx && uniqueIdx.length === 1) { + changedType = columnType.replace('bigint unsigned', 'serial'); + } + } + + if (columnType.startsWith('tinyint')) { + changedType = 'tinyint'; + } + + let onUpdate: boolean | undefined = undefined; + if ( + columnType.startsWith('timestamp') + && typeof columnExtra !== 'undefined' + && columnExtra.includes('on update CURRENT_TIMESTAMP') + ) { + onUpdate = true; + } + + const newColumn: Column = { + default: columnDefault === null + ? undefined + : /^-?[\d.]+(?:e-?\d+)?$/.test(columnDefault) + && !columnType.startsWith('decimal') + ? Number(columnDefault) + : isDefaultAnExpression + ? clearDefaults(columnDefault, collation) + : `'${columnDefault}'`, + autoincrement: isAutoincrement, + name: columnName, + type: changedType, + primaryKey: false, + notNull: !isNullable, + onUpdate, + generated: geenratedExpression + ? { + as: geenratedExpression, + type: columnExtra === 'VIRTUAL GENERATED' ? 'virtual' : 'stored', + } + : undefined, + }; + + // Set default to internal object + if (isDefaultAnExpression) { + if (typeof internals!.tables![tableName] === 'undefined') { + internals!.tables![tableName] = { + columns: { + [columnName]: { + isDefaultAnExpression: true, + }, + }, + }; + } else { + if ( + typeof internals!.tables![tableName]!.columns[columnName] + === 'undefined' + ) { + internals!.tables![tableName]!.columns[columnName] = { + isDefaultAnExpression: true, + }; + } else { + internals!.tables![tableName]!.columns[ + columnName + ]!.isDefaultAnExpression = true; + } + } + } + + if (!table) { + result[tableName] = { + name: tableName, + columns: { + [columnName]: newColumn, + }, + compositePrimaryKeys: {}, + indexes: {}, + uniqueConstraints: {}, + }; + } else { + result[tableName]!.columns[columnName] = newColumn; + } + } + + const tablePks = await db.query( + `SELECT table_name, column_name, ordinal_position + FROM information_schema.table_constraints t + LEFT JOIN information_schema.key_column_usage k + USING(constraint_name,table_schema,table_name) + WHERE t.constraint_type='PRIMARY KEY' + and table_name != '__drizzle_migrations' + AND t.table_schema = '${inputSchema}' + ORDER BY ordinal_position`, + ); + + const tableToPk: { [tname: string]: string[] } = {}; + + const tableToPkRows = tablePks as RowDataPacket[]; + for (const tableToPkRow of tableToPkRows) { + const tableName: string = tableToPkRow['TABLE_NAME']; + const columnName: string = tableToPkRow['COLUMN_NAME']; + const position: string = tableToPkRow['ordinal_position']; + + if (typeof result[tableName] === 'undefined') { + continue; + } + + if (typeof tableToPk[tableName] === 'undefined') { + tableToPk[tableName] = [columnName]; + } else { + tableToPk[tableName].push(columnName); + } + } + + for (const [key, value] of Object.entries(tableToPk)) { + // if (value.length > 1) { + result[key].compositePrimaryKeys = { + [`${key}_${value.join('_')}`]: { + name: `${key}_${value.join('_')}`, + columns: value, + }, + }; + // } else if (value.length === 1) { + // result[key].columns[value[0]].primaryKey = true; + // } else { + // } + } + if (progressCallback) { + progressCallback('columns', columnsCount, 'done'); + progressCallback('tables', tablesCount.size, 'done'); + } + + for (const idxRow of idxRows) { + const tableSchema = idxRow['TABLE_SCHEMA']; + const tableName = idxRow['TABLE_NAME']; + const constraintName = idxRow['INDEX_NAME']; + const columnName: string = idxRow['COLUMN_NAME']; + const isUnique = idxRow['NON_UNIQUE'] === 0; + + const tableInResult = result[tableName]; + if (typeof tableInResult === 'undefined') continue; + + // if (tableInResult.columns[columnName].type === "serial") continue; + + indexesCount += 1; + if (progressCallback) { + progressCallback('indexes', indexesCount, 'fetching'); + } + + if (isUnique) { + if ( + typeof tableInResult.uniqueConstraints[constraintName] !== 'undefined' + ) { + tableInResult.uniqueConstraints[constraintName]!.columns.push( + columnName, + ); + } else { + tableInResult.uniqueConstraints[constraintName] = { + name: constraintName, + columns: [columnName], + }; + } + } else { + if (typeof tableInResult.indexes[constraintName] !== 'undefined') { + tableInResult.indexes[constraintName]!.columns.push(columnName); + } else { + tableInResult.indexes[constraintName] = { + name: constraintName, + columns: [columnName], + isUnique: isUnique, + }; + } + } + } + + if (progressCallback) { + progressCallback('indexes', indexesCount, 'done'); + // progressCallback("enums", 0, "fetching"); + progressCallback('enums', 0, 'done'); + } + + return { + version: '1', + dialect: 'singlestore', + tables: result, + _meta: { + tables: {}, + columns: {}, + }, + internal: internals, + }; +}; diff --git a/drizzle-kit/src/serializer/sqliteImports.ts b/drizzle-kit/src/serializer/sqliteImports.ts new file mode 100644 index 000000000..534427e47 --- /dev/null +++ b/drizzle-kit/src/serializer/sqliteImports.ts @@ -0,0 +1,33 @@ +import { is } from 'drizzle-orm'; +import { AnySQLiteTable, SQLiteTable } from 'drizzle-orm/sqlite-core'; +import { safeRegister } from '../cli/commands/utils'; + +export const prepareFromExports = (exports: Record) => { + const tables: AnySQLiteTable[] = []; + const i0values = Object.values(exports); + i0values.forEach((t) => { + if (is(t, SQLiteTable)) { + tables.push(t); + } + }); + + return { tables }; +}; + +export const prepareFromSqliteImports = async (imports: string[]) => { + const tables: AnySQLiteTable[] = []; + + const { unregister } = await safeRegister(); + for (let i = 0; i < imports.length; i++) { + const it = imports[i]; + + const i0: Record = require(`${it}`); + const prepared = prepareFromExports(i0); + + tables.push(...prepared.tables); + } + + unregister(); + + return { tables: Array.from(new Set(tables)) }; +}; diff --git a/drizzle-kit/src/serializer/sqliteSchema.ts b/drizzle-kit/src/serializer/sqliteSchema.ts new file mode 100644 index 000000000..a8114e3a8 --- /dev/null +++ b/drizzle-kit/src/serializer/sqliteSchema.ts @@ -0,0 +1,312 @@ +import { any, boolean, enum as enumType, literal, object, record, string, TypeOf, union } from 'zod'; +import { customMapEntries, mapEntries, mapValues, originUUID } from '../global'; + +// ------- V3 -------- +const index = object({ + name: string(), + columns: string().array(), + where: string().optional(), + isUnique: boolean(), +}).strict(); + +const fk = object({ + name: string(), + tableFrom: string(), + columnsFrom: string().array(), + tableTo: string(), + columnsTo: string().array(), + onUpdate: string().optional(), + onDelete: string().optional(), +}).strict(); + +const compositePK = object({ + columns: string().array(), + name: string().optional(), +}).strict(); + +const column = object({ + name: string(), + type: string(), + primaryKey: boolean(), + notNull: boolean(), + autoincrement: boolean().optional(), + default: any().optional(), + generated: object({ + type: enumType(['stored', 'virtual']), + as: string(), + }).optional(), +}).strict(); + +const tableV3 = object({ + name: string(), + columns: record(string(), column), + indexes: record(string(), index), + foreignKeys: record(string(), fk), +}).strict(); + +const uniqueConstraint = object({ + name: string(), + columns: string().array(), +}).strict(); + +const table = object({ + name: string(), + columns: record(string(), column), + indexes: record(string(), index), + foreignKeys: record(string(), fk), + compositePrimaryKeys: record(string(), compositePK), + uniqueConstraints: record(string(), uniqueConstraint).default({}), +}).strict(); + +// use main dialect +const dialect = enumType(['sqlite']); + +const schemaHash = object({ + id: string(), + prevId: string(), +}).strict(); + +export const schemaInternalV3 = object({ + version: literal('3'), + dialect: dialect, + tables: record(string(), tableV3), + enums: object({}), +}).strict(); + +export const schemaInternalV4 = object({ + version: literal('4'), + dialect: dialect, + tables: record(string(), table), + enums: object({}), +}).strict(); + +export const schemaInternalV5 = object({ + version: literal('5'), + dialect: dialect, + tables: record(string(), table), + enums: object({}), + _meta: object({ + tables: record(string(), string()), + columns: record(string(), string()), + }), +}).strict(); + +export const kitInternals = object({ + indexes: record( + string(), + object({ + columns: record( + string(), + object({ isExpression: boolean().optional() }).optional(), + ), + }).optional(), + ).optional(), +}).optional(); + +const latestVersion = literal('6'); +export const schemaInternal = object({ + version: latestVersion, + dialect: dialect, + tables: record(string(), table), + enums: object({}), + _meta: object({ + tables: record(string(), string()), + columns: record(string(), string()), + }), + internal: kitInternals, +}).strict(); + +export const schemaV3 = schemaInternalV3.merge(schemaHash).strict(); +export const schemaV4 = schemaInternalV4.merge(schemaHash).strict(); +export const schemaV5 = schemaInternalV5.merge(schemaHash).strict(); +export const schema = schemaInternal.merge(schemaHash).strict(); + +const tableSquashed = object({ + name: string(), + columns: record(string(), column), + indexes: record(string(), string()), + foreignKeys: record(string(), string()), + compositePrimaryKeys: record(string(), string()), + uniqueConstraints: record(string(), string()).default({}), +}).strict(); + +export const schemaSquashed = object({ + version: latestVersion, + dialect: dialect, + tables: record(string(), tableSquashed), + enums: any(), +}).strict(); + +export type Dialect = TypeOf; +export type Column = TypeOf; +export type Table = TypeOf; +export type SQLiteSchema = TypeOf; +export type SQLiteSchemaV3 = TypeOf; +export type SQLiteSchemaV4 = TypeOf; +export type SQLiteSchemaInternal = TypeOf; +export type SQLiteSchemaSquashed = TypeOf; +export type SQLiteKitInternals = TypeOf; +export type Index = TypeOf; +export type ForeignKey = TypeOf; +export type PrimaryKey = TypeOf; +export type UniqueConstraint = TypeOf; + +export const SQLiteSquasher = { + squashIdx: (idx: Index) => { + index.parse(idx); + return `${idx.name};${idx.columns.join(',')};${idx.isUnique};${idx.where ?? ''}`; + }, + unsquashIdx: (input: string): Index => { + const [name, columnsString, isUnique, where] = input.split(';'); + + const result: Index = index.parse({ + name, + columns: columnsString.split(','), + isUnique: isUnique === 'true', + where: where ?? undefined, + }); + return result; + }, + squashUnique: (unq: UniqueConstraint) => { + return `${unq.name};${unq.columns.join(',')}`; + }, + unsquashUnique: (unq: string): UniqueConstraint => { + const [name, columns] = unq.split(';'); + return { name, columns: columns.split(',') }; + }, + squashFK: (fk: ForeignKey) => { + return `${fk.name};${fk.tableFrom};${fk.columnsFrom.join(',')};${fk.tableTo};${fk.columnsTo.join(',')};${ + fk.onUpdate ?? '' + };${fk.onDelete ?? ''}`; + }, + unsquashFK: (input: string): ForeignKey => { + const [ + name, + tableFrom, + columnsFromStr, + tableTo, + columnsToStr, + onUpdate, + onDelete, + ] = input.split(';'); + + const result: ForeignKey = fk.parse({ + name, + tableFrom, + columnsFrom: columnsFromStr.split(','), + tableTo, + columnsTo: columnsToStr.split(','), + onUpdate, + onDelete, + }); + return result; + }, + squashPushFK: (fk: ForeignKey) => { + return `${fk.tableFrom};${fk.columnsFrom.join(',')};${fk.tableTo};${fk.columnsTo.join(',')};${fk.onUpdate ?? ''};${ + fk.onDelete ?? '' + }`; + }, + unsquashPushFK: (input: string): ForeignKey => { + const [ + tableFrom, + columnsFromStr, + tableTo, + columnsToStr, + onUpdate, + onDelete, + ] = input.split(';'); + + const result: ForeignKey = fk.parse({ + name: '', + tableFrom, + columnsFrom: columnsFromStr.split(','), + tableTo, + columnsTo: columnsToStr.split(','), + onUpdate, + onDelete, + }); + return result; + }, + squashPK: (pk: PrimaryKey) => { + return pk.columns.join(','); + }, + unsquashPK: (pk: string) => { + return pk.split(','); + }, +}; + +export const squashSqliteScheme = ( + json: SQLiteSchema | SQLiteSchemaV4, + action?: 'push' | undefined, +): SQLiteSchemaSquashed => { + const mappedTables = Object.fromEntries( + Object.entries(json.tables).map((it) => { + const squashedIndexes = mapValues(it[1].indexes, (index: Index) => { + return SQLiteSquasher.squashIdx(index); + }); + + const squashedFKs = customMapEntries( + it[1].foreignKeys, + (key, value) => { + return action === 'push' + ? [ + SQLiteSquasher.squashPushFK(value), + SQLiteSquasher.squashPushFK(value), + ] + : [key, SQLiteSquasher.squashFK(value)]; + }, + ); + + const squashedPKs = mapValues(it[1].compositePrimaryKeys, (pk) => { + return SQLiteSquasher.squashPK(pk); + }); + + const squashedUniqueConstraints = mapValues( + it[1].uniqueConstraints, + (unq) => { + return SQLiteSquasher.squashUnique(unq); + }, + ); + + return [ + it[0], + { + name: it[1].name, + columns: it[1].columns, + indexes: squashedIndexes, + foreignKeys: squashedFKs, + compositePrimaryKeys: squashedPKs, + uniqueConstraints: squashedUniqueConstraints, + }, + ]; + }), + ); + + return { + version: '6', + dialect: json.dialect, + tables: mappedTables, + enums: json.enums, + }; +}; + +export const drySQLite = schema.parse({ + version: '6', + dialect: 'sqlite', + id: originUUID, + prevId: '', + tables: {}, + enums: {}, + _meta: { + tables: {}, + columns: {}, + }, +}); + +export const sqliteSchemaV3 = schemaV3; +export const sqliteSchemaV4 = schemaV4; +export const sqliteSchemaV5 = schemaV5; +export const sqliteSchema = schema; +export const SQLiteSchemaSquashed = schemaSquashed; + +export const backwardCompatibleSqliteSchema = union([sqliteSchemaV5, schema]); diff --git a/drizzle-kit/src/serializer/sqliteSerializer.ts b/drizzle-kit/src/serializer/sqliteSerializer.ts new file mode 100644 index 000000000..ce544235b --- /dev/null +++ b/drizzle-kit/src/serializer/sqliteSerializer.ts @@ -0,0 +1,684 @@ +import chalk from 'chalk'; +import { getTableName, is, SQL } from 'drizzle-orm'; +import { + // AnySQLiteColumnBuilder, + AnySQLiteTable, + getTableConfig, + SQLiteBaseInteger, + SQLiteSyncDialect, + uniqueKeyName, +} from 'drizzle-orm/sqlite-core'; +import { withStyle } from '../cli/validations/outputs'; +import type { IntrospectStage, IntrospectStatus } from '../cli/views'; +import type { + Column, + ForeignKey, + Index, + PrimaryKey, + SQLiteKitInternals, + SQLiteSchemaInternal, + Table, + UniqueConstraint, +} from '../serializer/sqliteSchema'; +import type { SQLiteDB } from '../utils'; +import { sqlToStr } from '.'; + +const dialect = new SQLiteSyncDialect(); + +export const generateSqliteSnapshot = ( + tables: AnySQLiteTable[], +): SQLiteSchemaInternal => { + const result: Record = {}; + const internal: SQLiteKitInternals = { indexes: {} }; + for (const table of tables) { + // const tableName = getTableName(table); + const columnsObject: Record = {}; + const indexesObject: Record = {}; + const foreignKeysObject: Record = {}; + const primaryKeysObject: Record = {}; + const uniqueConstraintObject: Record = {}; + + const { + name: tableName, + columns, + indexes, + foreignKeys: tableForeignKeys, + primaryKeys, + uniqueConstraints, + } = getTableConfig(table); + + columns.forEach((column) => { + const notNull: boolean = column.notNull; + const primaryKey: boolean = column.primary; + const generated = column.generated; + + const columnToSet: Column = { + name: column.name, + type: column.getSQLType(), + primaryKey, + notNull, + autoincrement: is(column, SQLiteBaseInteger) + ? column.autoIncrement + : false, + generated: generated + ? { + as: is(generated.as, SQL) + ? `(${dialect.sqlToQuery(generated.as as SQL, 'indexes').sql})` + : typeof generated.as === 'function' + ? `(${dialect.sqlToQuery(generated.as() as SQL, 'indexes').sql})` + : `(${generated.as as any})`, + type: generated.mode ?? 'virtual', + } + : undefined, + }; + + if (column.default !== undefined) { + if (is(column.default, SQL)) { + columnToSet.default = sqlToStr(column.default); + } else { + columnToSet.default = typeof column.default === 'string' + ? `'${column.default}'` + : typeof column.default === 'object' + || Array.isArray(column.default) + ? `'${JSON.stringify(column.default)}'` + : column.default; + } + } + columnsObject[column.name] = columnToSet; + + if (column.isUnique) { + const existingUnique = indexesObject[column.uniqueName!]; + if (typeof existingUnique !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning(`We\'ve found duplicated unique constraint names in ${ + chalk.underline.blue( + tableName, + ) + } table. + The unique constraint ${ + chalk.underline.blue( + column.uniqueName, + ) + } on the ${ + chalk.underline.blue( + column.name, + ) + } column is confilcting with a unique constraint name already defined for ${ + chalk.underline.blue( + existingUnique.columns.join(','), + ) + } columns\n`) + }`, + ); + process.exit(1); + } + indexesObject[column.uniqueName!] = { + name: column.uniqueName!, + columns: [columnToSet.name], + isUnique: true, + }; + } + }); + + const foreignKeys: ForeignKey[] = tableForeignKeys.map((fk) => { + const name = fk.getName(); + const tableFrom = tableName; + const onDelete = fk.onDelete ?? 'no action'; + const onUpdate = fk.onUpdate ?? 'no action'; + const reference = fk.reference(); + + const referenceFT = reference.foreignTable; + + // eslint-disable-next-line @typescript-eslint/no-unsafe-argument + const tableTo = getTableName(referenceFT); + const columnsFrom = reference.columns.map((it) => it.name); + const columnsTo = reference.foreignColumns.map((it) => it.name); + return { + name, + tableFrom, + tableTo, + columnsFrom, + columnsTo, + onDelete, + onUpdate, + } as ForeignKey; + }); + + foreignKeys.forEach((it) => { + foreignKeysObject[it.name] = it; + }); + + indexes.forEach((value) => { + const columns = value.config.columns; + const name = value.config.name; + + let indexColumns = columns.map((it) => { + if (is(it, SQL)) { + const sql = dialect.sqlToQuery(it, 'indexes').sql; + if (typeof internal!.indexes![name] === 'undefined') { + internal!.indexes![name] = { + columns: { + [sql]: { + isExpression: true, + }, + }, + }; + } else { + if (typeof internal!.indexes![name]?.columns[sql] === 'undefined') { + internal!.indexes![name]!.columns[sql] = { + isExpression: true, + }; + } else { + internal!.indexes![name]!.columns[sql]!.isExpression = true; + } + } + return sql; + } else { + return it.name; + } + }); + + let where: string | undefined = undefined; + if (value.config.where !== undefined) { + if (is(value.config.where, SQL)) { + where = dialect.sqlToQuery(value.config.where).sql; + } + } + + indexesObject[name] = { + name, + columns: indexColumns, + isUnique: value.config.unique ?? false, + where, + }; + }); + + uniqueConstraints?.map((unq) => { + const columnNames = unq.columns.map((c) => c.name); + + const name = unq.name ?? uniqueKeyName(table, columnNames); + + const existingUnique = indexesObject[name]; + if (typeof existingUnique !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning( + `We\'ve found duplicated unique constraint names in ${ + chalk.underline.blue( + tableName, + ) + } table. \nThe unique constraint ${ + chalk.underline.blue( + name, + ) + } on the ${ + chalk.underline.blue( + columnNames.join(','), + ) + } columns is confilcting with a unique constraint name already defined for ${ + chalk.underline.blue( + existingUnique.columns.join(','), + ) + } columns\n`, + ) + }`, + ); + process.exit(1); + } + + indexesObject[name] = { + name: unq.name!, + columns: columnNames, + isUnique: true, + }; + }); + + primaryKeys.forEach((it) => { + if (it.columns.length > 1) { + primaryKeysObject[it.getName()] = { + columns: it.columns.map((it) => it.name), + name: it.getName(), + }; + } else { + columnsObject[it.columns[0].name].primaryKey = true; + } + }); + + result[tableName] = { + name: tableName, + columns: columnsObject, + indexes: indexesObject, + foreignKeys: foreignKeysObject, + compositePrimaryKeys: primaryKeysObject, + uniqueConstraints: uniqueConstraintObject, + }; + } + + return { + version: '6', + dialect: 'sqlite', + tables: result, + enums: {}, + _meta: { + tables: {}, + columns: {}, + }, + internal, + }; +}; + +function mapSqlToSqliteType(sqlType: string): string { + const lowered = sqlType.toLowerCase(); + if ( + [ + 'int', + 'integer', + 'integer auto_increment', + 'tinyint', + 'smallint', + 'mediumint', + 'bigint', + 'unsigned big int', + 'int2', + 'int8', + ].some((it) => lowered.startsWith(it)) + ) { + return 'integer'; + } else if ( + [ + 'character', + 'varchar', + 'varying character', + 'national varying character', + 'nchar', + 'native character', + 'nvarchar', + 'text', + 'clob', + ].some((it) => lowered.startsWith(it)) + ) { + const match = lowered.match(/\d+/); + + if (match) { + return `text(${match[0]})`; + } + + return 'text'; + } else if (lowered.startsWith('blob')) { + return 'blob'; + } else if ( + ['real', 'double', 'double precision', 'float'].some((it) => lowered.startsWith(it)) + ) { + return 'real'; + } else { + return 'numeric'; + } +} + +interface ColumnInfo { + columnName: string; + expression: string; + type: 'stored' | 'virtual'; +} + +function extractGeneratedColumns(input: string): Record { + const columns: Record = {}; + const lines = input.split(/,\s*(?![^()]*\))/); // Split by commas outside parentheses + + for (const line of lines) { + if (line.includes('GENERATED ALWAYS AS')) { + const parts = line.trim().split(/\s+/); + const columnName = parts[0].replace(/[`'"]/g, ''); // Remove quotes around the column name + const expression = line + .substring(line.indexOf('('), line.indexOf(')') + 1) + .trim(); + + // Extract type ensuring to remove any trailing characters like ')' + const typeIndex = parts.findIndex((part) => part.match(/(stored|virtual)/i)); + let type: ColumnInfo['type'] = 'virtual'; + if (typeIndex !== -1) { + type = parts[typeIndex] + .replace(/[^a-z]/gi, '') + .toLowerCase() as ColumnInfo['type']; + } + + columns[columnName] = { + columnName: columnName, + expression: expression, + type, + }; + } + } + return columns; +} + +export const fromDatabase = async ( + db: SQLiteDB, + tablesFilter: (table: string) => boolean = (table) => true, + progressCallback?: ( + stage: IntrospectStage, + count: number, + status: IntrospectStatus, + ) => void, +): Promise => { + const result: Record = {}; + + const columns = await db.query<{ + tableName: string; + columnName: string; + columnType: string; + notNull: number; + defaultValue: string; + pk: number; + seq: number; + hidden: number; + sql: string; + }>( + `SELECT + m.name as "tableName", p.name as "columnName", p.type as "columnType", p."notnull" as "notNull", p.dflt_value as "defaultValue", p.pk as pk, p.hidden as hidden, m.sql + FROM sqlite_master AS m JOIN pragma_table_xinfo(m.name) AS p + WHERE m.type = 'table' + and m.tbl_name != 'sqlite_sequence' + and m.tbl_name != 'sqlite_stat1' + and m.tbl_name != '_litestream_seq' + and m.tbl_name != '_litestream_lock' + and m.tbl_name != 'libsql_wasm_func_table' + and m.tbl_name != '__drizzle_migrations' + and m.tbl_name != '_cf_KV'; + `, + ); + + const tablesWithSeq: string[] = []; + + const seq = await db.query<{ + name: string; + }>( + `SELECT * FROM sqlite_master WHERE name != 'sqlite_sequence' + and name != 'sqlite_stat1' + and name != '_litestream_seq' + and name != '_litestream_lock' + and tbl_name != '_cf_KV' + and sql GLOB '*[ *' || CHAR(9) || CHAR(10) || CHAR(13) || ']AUTOINCREMENT[^'']*';`, + ); + + for (const s of seq) { + tablesWithSeq.push(s.name); + } + + let columnsCount = 0; + let tablesCount = new Set(); + let indexesCount = 0; + let foreignKeysCount = 0; + + // append primaryKeys by table + const tableToPk: { [tname: string]: string[] } = {}; + + let tableToGeneratedColumnsInfo: Record< + string, + Record + > = {}; + + for (const column of columns) { + if (!tablesFilter(column.tableName)) continue; + + columnsCount += 1; + if (progressCallback) { + progressCallback('columns', columnsCount, 'fetching'); + } + const tableName = column.tableName; + + tablesCount.add(tableName); + if (progressCallback) { + progressCallback('tables', tablesCount.size, 'fetching'); + } + const columnName = column.columnName; + const isNotNull = column.notNull === 1; // 'YES', 'NO' + const columnType = column.columnType; // varchar(256) + const isPrimary = column.pk !== 0; // 'PRI', '' + const columnDefault: string = column.defaultValue; + + const isAutoincrement = isPrimary && tablesWithSeq.includes(tableName); + + if (isPrimary) { + if (typeof tableToPk[tableName] === 'undefined') { + tableToPk[tableName] = [columnName]; + } else { + tableToPk[tableName].push(columnName); + } + } + + const table = result[tableName]; + + if (column.hidden === 2 || column.hidden === 3) { + if ( + typeof tableToGeneratedColumnsInfo[column.tableName] === 'undefined' + ) { + tableToGeneratedColumnsInfo[column.tableName] = extractGeneratedColumns( + column.sql, + ); + } + } + + const newColumn: Column = { + default: columnDefault === null + ? undefined + : /^-?[\d.]+(?:e-?\d+)?$/.test(columnDefault) + ? Number(columnDefault) + : ['CURRENT_TIME', 'CURRENT_DATE', 'CURRENT_TIMESTAMP'].includes( + columnDefault, + ) + ? `(${columnDefault})` + : columnDefault === 'false' + ? false + : columnDefault === 'true' + ? true + : columnDefault.startsWith("'") && columnDefault.endsWith("'") + ? columnDefault + // ? columnDefault.substring(1, columnDefault.length - 1) + : `(${columnDefault})`, + autoincrement: isAutoincrement, + name: columnName, + type: mapSqlToSqliteType(columnType), + primaryKey: false, + notNull: isNotNull, + generated: tableToGeneratedColumnsInfo[tableName] + && tableToGeneratedColumnsInfo[tableName][columnName] + ? { + type: tableToGeneratedColumnsInfo[tableName][columnName].type, + as: tableToGeneratedColumnsInfo[tableName][columnName].expression, + } + : undefined, + }; + + if (!table) { + result[tableName] = { + name: tableName, + columns: { + [columnName]: newColumn, + }, + compositePrimaryKeys: {}, + indexes: {}, + foreignKeys: {}, + uniqueConstraints: {}, + }; + } else { + result[tableName]!.columns[columnName] = newColumn; + } + } + + for (const [key, value] of Object.entries(tableToPk)) { + if (value.length > 1) { + result[key].compositePrimaryKeys = { + [`${key}_${value.join('_')}_pk`]: { + columns: value, + name: `${key}_${value.join('_')}_pk`, + }, + }; + } else if (value.length === 1) { + result[key].columns[value[0]].primaryKey = true; + } else { + } + } + + if (progressCallback) { + progressCallback('columns', columnsCount, 'done'); + progressCallback('tables', tablesCount.size, 'done'); + } + try { + const fks = await db.query<{ + tableFrom: string; + tableTo: string; + from: string; + to: string; + onUpdate: string; + onDelete: string; + seq: number; + id: number; + }>( + `SELECT m.name as "tableFrom", f.id as "id", f."table" as "tableTo", f."from", f."to", f."on_update" as "onUpdate", f."on_delete" as "onDelete", f.seq as "seq" + FROM sqlite_master m, pragma_foreign_key_list(m.name) as f + where m.tbl_name != '_cf_KV';`, + ); + + const fkByTableName: Record = {}; + + for (const fkRow of fks) { + foreignKeysCount += 1; + if (progressCallback) { + progressCallback('fks', foreignKeysCount, 'fetching'); + } + const tableName: string = fkRow.tableFrom; + const columnName: string = fkRow.from; + const refTableName = fkRow.tableTo; + const refColumnName: string = fkRow.to; + const updateRule: string = fkRow.onUpdate; + const deleteRule = fkRow.onDelete; + const sequence = fkRow.seq; + const id = fkRow.id; + + const tableInResult = result[tableName]; + if (typeof tableInResult === 'undefined') continue; + + if (typeof fkByTableName[`${tableName}_${id}`] !== 'undefined') { + fkByTableName[`${tableName}_${id}`]!.columnsFrom.push(columnName); + fkByTableName[`${tableName}_${id}`]!.columnsTo.push(refColumnName); + } else { + fkByTableName[`${tableName}_${id}`] = { + name: '', + tableFrom: tableName, + tableTo: refTableName, + columnsFrom: [columnName], + columnsTo: [refColumnName], + onDelete: deleteRule?.toLowerCase(), + onUpdate: updateRule?.toLowerCase(), + }; + } + + const columnsFrom = fkByTableName[`${tableName}_${id}`].columnsFrom; + const columnsTo = fkByTableName[`${tableName}_${id}`].columnsTo; + fkByTableName[ + `${tableName}_${id}` + ].name = `${tableName}_${ + columnsFrom.join( + '_', + ) + }_${refTableName}_${columnsTo.join('_')}_fk`; + } + + for (const idx of Object.keys(fkByTableName)) { + const value = fkByTableName[idx]; + result[value.tableFrom].foreignKeys[value.name] = value; + } + } catch (e) { + // console.log(`Can't proccess foreign keys`); + } + if (progressCallback) { + progressCallback('fks', foreignKeysCount, 'done'); + } + const idxs = await db.query<{ + tableName: string; + indexName: string; + columnName: string; + isUnique: number; + seq: string; + }>( + `SELECT + m.tbl_name as tableName, + il.name as indexName, + ii.name as columnName, + il.[unique] as isUnique, + il.seq as seq +FROM sqlite_master AS m, + pragma_index_list(m.name) AS il, + pragma_index_info(il.name) AS ii +WHERE + m.type = 'table' + and il.name NOT LIKE 'sqlite_autoindex_%' + and m.tbl_name != '_cf_KV';`, + ); + + for (const idxRow of idxs) { + const tableName = idxRow.tableName; + const constraintName = idxRow.indexName; + const columnName: string = idxRow.columnName; + const isUnique = idxRow.isUnique === 1; + + const tableInResult = result[tableName]; + if (typeof tableInResult === 'undefined') continue; + + indexesCount += 1; + if (progressCallback) { + progressCallback('indexes', indexesCount, 'fetching'); + } + + if ( + typeof tableInResult.indexes[constraintName] !== 'undefined' + && columnName + ) { + tableInResult.indexes[constraintName]!.columns.push(columnName); + } else { + tableInResult.indexes[constraintName] = { + name: constraintName, + columns: columnName ? [columnName] : [], + isUnique: isUnique, + }; + } + // if (isUnique) { + // if (typeof tableInResult.uniqueConstraints[constraintName] !== "undefined") { + // tableInResult.uniqueConstraints[constraintName]!.columns.push(columnName); + // } else { + // tableInResult.uniqueConstraints[constraintName] = { + // name: constraintName, + // columns: [columnName], + // }; + // } + // } else { + // if (typeof tableInResult.indexes[constraintName] !== "undefined") { + // tableInResult.indexes[constraintName]!.columns.push(columnName); + // } else { + // tableInResult.indexes[constraintName] = { + // name: constraintName, + // columns: [columnName], + // isUnique: isUnique, + // }; + // } + // } + } + if (progressCallback) { + progressCallback('indexes', indexesCount, 'done'); + // progressCallback("enums", 0, "fetching"); + progressCallback('enums', 0, 'done'); + } + + return { + version: '6', + dialect: 'sqlite', + tables: result, + enums: {}, + _meta: { + tables: {}, + columns: {}, + }, + }; +}; diff --git a/drizzle-kit/src/serializer/studio.ts b/drizzle-kit/src/serializer/studio.ts new file mode 100644 index 000000000..5515e6f59 --- /dev/null +++ b/drizzle-kit/src/serializer/studio.ts @@ -0,0 +1,666 @@ +import { serve } from '@hono/node-server'; +import { zValidator } from '@hono/zod-validator'; +import { createHash } from 'crypto'; +import { + AnyColumn, + AnyTable, + createTableRelationsHelpers, + extractTablesRelationalConfig, + is, + Many, + normalizeRelation, + One, + Relations, + TablesRelationalConfig, +} from 'drizzle-orm'; +import { AnyMySqlTable, getTableConfig as mysqlTableConfig, MySqlTable } from 'drizzle-orm/mysql-core'; +import { AnyPgTable, getTableConfig as pgTableConfig, PgTable } from 'drizzle-orm/pg-core'; +import { + AnySingleStoreTable, + getTableConfig as singlestoreTableConfig, + SingleStoreTable, +} from 'drizzle-orm/singlestore-core'; +import { AnySQLiteTable, getTableConfig as sqliteTableConfig, SQLiteTable } from 'drizzle-orm/sqlite-core'; +import fs from 'fs'; +import { Hono } from 'hono'; +import { cors } from 'hono/cors'; +import { createServer } from 'node:https'; +import { assertUnreachable } from 'src/global'; +import superjson from 'superjson'; +import { z } from 'zod'; +import { safeRegister } from '../cli/commands/utils'; +import type { MysqlCredentials } from '../cli/validations/mysql'; +import type { PostgresCredentials } from '../cli/validations/postgres'; +import type { SingleStoreCredentials } from '../cli/validations/singlestore'; +import type { SqliteCredentials } from '../cli/validations/sqlite'; +import { prepareFilenames } from '.'; + +type CustomDefault = { + schema: string; + table: string; + column: string; + func: () => unknown; +}; + +type SchemaFile = { + name: string; + content: string; +}; + +export type Setup = { + dbHash: string; + dialect: 'postgresql' | 'mysql' | 'sqlite' | 'singlestore'; + driver?: 'aws-data-api' | 'd1-http' | 'turso' | 'pglite'; + proxy: (params: ProxyParams) => Promise; + customDefaults: CustomDefault[]; + schema: Record>>; + relations: Record; + schemaFiles?: SchemaFile[]; +}; + +export type ProxyParams = { + sql: string; + params: any[]; + typings?: any[]; + mode: 'array' | 'object'; + method: 'values' | 'get' | 'all' | 'run' | 'execute'; +}; + +export const preparePgSchema = async (path: string | string[]) => { + const imports = prepareFilenames(path); + const pgSchema: Record> = {}; + const relations: Record = {}; + + // files content as string + const files = imports.map((it, index) => ({ + // get the file name from the path + name: it.split('/').pop() || `schema${index}.ts`, + content: fs.readFileSync(it, 'utf-8'), + })); + + const { unregister } = await safeRegister(); + for (let i = 0; i < imports.length; i++) { + const it = imports[i]; + + const i0: Record = require(`${it}`); + const i0values = Object.entries(i0); + + i0values.forEach(([k, t]) => { + if (is(t, PgTable)) { + const schema = pgTableConfig(t).schema || 'public'; + pgSchema[schema] = pgSchema[schema] || {}; + pgSchema[schema][k] = t; + } + + if (is(t, Relations)) { + relations[k] = t; + } + }); + } + unregister(); + + return { schema: pgSchema, relations, files }; +}; + +export const prepareMySqlSchema = async (path: string | string[]) => { + const imports = prepareFilenames(path); + const mysqlSchema: Record> = { + public: {}, + }; + const relations: Record = {}; + + // files content as string + const files = imports.map((it, index) => ({ + // get the file name from the path + name: it.split('/').pop() || `schema${index}.ts`, + content: fs.readFileSync(it, 'utf-8'), + })); + + const { unregister } = await safeRegister(); + for (let i = 0; i < imports.length; i++) { + const it = imports[i]; + + const i0: Record = require(`${it}`); + const i0values = Object.entries(i0); + + i0values.forEach(([k, t]) => { + if (is(t, MySqlTable)) { + const schema = mysqlTableConfig(t).schema || 'public'; + mysqlSchema[schema][k] = t; + } + + if (is(t, Relations)) { + relations[k] = t; + } + }); + } + unregister(); + + return { schema: mysqlSchema, relations, files }; +}; + +export const prepareSQLiteSchema = async (path: string | string[]) => { + const imports = prepareFilenames(path); + const sqliteSchema: Record> = { + public: {}, + }; + const relations: Record = {}; + + // files content as string + const files = imports.map((it, index) => ({ + // get the file name from the path + name: it.split('/').pop() || `schema${index}.ts`, + content: fs.readFileSync(it, 'utf-8'), + })); + + const { unregister } = await safeRegister(); + for (let i = 0; i < imports.length; i++) { + const it = imports[i]; + + const i0: Record = require(`${it}`); + const i0values = Object.entries(i0); + + i0values.forEach(([k, t]) => { + if (is(t, SQLiteTable)) { + const schema = 'public'; // sqlite does not have schemas + sqliteSchema[schema][k] = t; + } + + if (is(t, Relations)) { + relations[k] = t; + } + }); + } + unregister(); + + return { schema: sqliteSchema, relations, files }; +}; + +export const prepareSingleStoreSchema = async (path: string | string[]) => { + const imports = prepareFilenames(path); + const singlestoreSchema: Record> = { + public: {}, + }; + const relations: Record = {}; + + // files content as string + const files = imports.map((it, index) => ({ + // get the file name from the path + name: it.split('/').pop() || `schema${index}.ts`, + content: fs.readFileSync(it, 'utf-8'), + })); + + const { unregister } = await safeRegister(); + for (let i = 0; i < imports.length; i++) { + const it = imports[i]; + + const i0: Record = require(`${it}`); + const i0values = Object.entries(i0); + + i0values.forEach(([k, t]) => { + if (is(t, SingleStoreTable)) { + const schema = singlestoreTableConfig(t).schema || 'public'; + singlestoreSchema[schema][k] = t; + } + + if (is(t, Relations)) { + relations[k] = t; + } + }); + } + unregister(); + + return { schema: singlestoreSchema, relations, files }; +}; + +const getCustomDefaults = >( + schema: Record>, +): CustomDefault[] => { + const customDefaults: CustomDefault[] = []; + + Object.entries(schema).map(([schema, tables]) => { + Object.entries(tables).map(([, table]) => { + let tableConfig: { + name: string; + columns: AnyColumn[]; + }; + if (is(table, PgTable)) { + tableConfig = pgTableConfig(table); + } else if (is(table, MySqlTable)) { + tableConfig = mysqlTableConfig(table); + } else if (is(table, SQLiteTable)) { + tableConfig = sqliteTableConfig(table); + } else { + tableConfig = singlestoreTableConfig(table); + } + + tableConfig.columns.map((column) => { + if (column.defaultFn) { + customDefaults.push({ + schema, + table: tableConfig.name, + column: column.name, + func: column.defaultFn, + }); + } + }); + }); + }); + + return customDefaults; +}; + +export const drizzleForPostgres = async ( + credentials: PostgresCredentials, + pgSchema: Record>, + relations: Record, + schemaFiles?: SchemaFile[], +): Promise => { + const { preparePostgresDB } = await import('../cli/connections'); + const db = await preparePostgresDB(credentials); + const customDefaults = getCustomDefaults(pgSchema); + + let dbUrl: string; + + if ('driver' in credentials) { + const { driver } = credentials; + if (driver === 'aws-data-api') { + dbUrl = `aws-data-api://${credentials.database}/${credentials.secretArn}/${credentials.resourceArn}`; + } else if (driver === 'pglite') { + dbUrl = credentials.url; + } else { + assertUnreachable(driver); + } + } else if ('url' in credentials) { + dbUrl = credentials.url; + } else { + dbUrl = + `postgresql://${credentials.user}:${credentials.password}@${credentials.host}:${credentials.port}/${credentials.database}`; + } + + const dbHash = createHash('sha256').update(dbUrl).digest('hex'); + + return { + dbHash, + dialect: 'postgresql', + driver: 'driver' in credentials ? credentials.driver : undefined, + proxy: db.proxy, + customDefaults, + schema: pgSchema, + relations, + schemaFiles, + }; +}; + +export const drizzleForMySQL = async ( + credentials: MysqlCredentials, + mysqlSchema: Record>, + relations: Record, + schemaFiles?: SchemaFile[], +): Promise => { + const { connectToMySQL } = await import('../cli/connections'); + const { proxy } = await connectToMySQL(credentials); + + const customDefaults = getCustomDefaults(mysqlSchema); + + let dbUrl: string; + + if ('url' in credentials) { + dbUrl = credentials.url; + } else { + dbUrl = + `mysql://${credentials.user}:${credentials.password}@${credentials.host}:${credentials.port}/${credentials.database}`; + } + + const dbHash = createHash('sha256').update(dbUrl).digest('hex'); + + return { + dbHash, + dialect: 'mysql', + proxy, + customDefaults, + schema: mysqlSchema, + relations, + schemaFiles, + }; +}; + +export const drizzleForSQLite = async ( + credentials: SqliteCredentials, + sqliteSchema: Record>, + relations: Record, + schemaFiles?: SchemaFile[], +): Promise => { + const { connectToSQLite } = await import('../cli/connections'); + + const sqliteDB = await connectToSQLite(credentials); + const customDefaults = getCustomDefaults(sqliteSchema); + + let dbUrl: string; + + if ('driver' in credentials) { + const { driver } = credentials; + if (driver === 'd1-http') { + dbUrl = `d1-http://${credentials.accountId}/${credentials.databaseId}/${credentials.token}`; + } else if (driver === 'turso') { + dbUrl = `turso://${credentials.url}/${credentials.authToken}`; + } else { + assertUnreachable(driver); + } + } else { + dbUrl = credentials.url; + } + + const dbHash = createHash('sha256').update(dbUrl).digest('hex'); + + return { + dbHash, + dialect: 'sqlite', + driver: 'driver' in credentials ? credentials.driver : undefined, + proxy: sqliteDB.proxy, + customDefaults, + schema: sqliteSchema, + relations, + schemaFiles, + }; +}; + +export const drizzleForSingleStore = async ( + credentials: SingleStoreCredentials, + singlestoreSchema: Record>, + relations: Record, + schemaFiles?: SchemaFile[], +): Promise => { + const { connectToSingleStore } = await import('../cli/connections'); + const { proxy } = await connectToSingleStore(credentials); + + const customDefaults = getCustomDefaults(singlestoreSchema); + + let dbUrl: string; + + if ('url' in credentials) { + dbUrl = credentials.url; + } else { + dbUrl = + `singlestore://${credentials.user}:${credentials.password}@${credentials.host}:${credentials.port}/${credentials.database}`; + } + + const dbHash = createHash('sha256').update(dbUrl).digest('hex'); + + return { + dbHash, + dialect: 'singlestore', + proxy, + customDefaults, + schema: singlestoreSchema, + relations, + schemaFiles, + }; +}; + +export const extractRelations = (tablesConfig: { + tables: TablesRelationalConfig; + tableNamesMap: Record; +}) => { + const relations = Object.values(tablesConfig.tables) + .map((it) => + Object.entries(it.relations).map(([name, relation]) => { + const normalized = normalizeRelation( + tablesConfig.tables, + tablesConfig.tableNamesMap, + relation, + ); + const rel = relation; + const refTableName = rel.referencedTableName; + const refTable = rel.referencedTable; + const fields = normalized.fields.map((it) => it.name).flat(); + const refColumns = normalized.references.map((it) => it.name).flat(); + + let refSchema: string | undefined; + if (is(refTable, PgTable)) { + refSchema = pgTableConfig(refTable).schema; + } else if (is(refTable, MySqlTable)) { + refSchema = mysqlTableConfig(refTable).schema; + } else if (is(refTable, SQLiteTable)) { + refSchema = undefined; + } else if (is(refTable, SingleStoreTable)) { + refSchema = singlestoreTableConfig(refTable).schema; + } else { + throw new Error('unsupported dialect'); + } + + let type: 'one' | 'many'; + if (is(rel, One)) { + type = 'one'; + } else if (is(rel, Many)) { + type = 'many'; + } else { + throw new Error('unsupported relation type'); + } + + return { + name, + type, + table: it.dbName, + schema: it.schema || 'public', + columns: fields, + refTable: refTableName, + refSchema: refSchema || 'public', + refColumns: refColumns, + }; + }) + ) + .flat(); + return relations; +}; + +const init = z.object({ + type: z.literal('init'), +}); + +const proxySchema = z.object({ + type: z.literal('proxy'), + data: z.object({ + sql: z.string(), + params: z.array(z.any()).optional(), + typings: z.string().array().optional(), + mode: z.enum(['array', 'object']).default('object'), + method: z.union([ + z.literal('values'), + z.literal('get'), + z.literal('all'), + z.literal('run'), + z.literal('execute'), + ]), + }), +}); + +const defaultsSchema = z.object({ + type: z.literal('defaults'), + data: z + .array( + z.object({ + schema: z.string(), + table: z.string(), + column: z.string(), + }), + ) + .min(1), +}); + +const schema = z.union([init, proxySchema, defaultsSchema]); + +superjson.registerCustom( + { + isApplicable: (v): v is Buffer => v instanceof Buffer, + serialize: (v) => [...v], + deserialize: (v) => Buffer.from(v), + }, + 'buffer', +); + +const jsonStringify = (data: any) => { + return JSON.stringify(data, (_key, value) => { + if (typeof value === 'bigint') { + return value.toString(); + } + + // Convert Buffer and ArrayBuffer to base64 + if ( + (value + && typeof value === 'object' + && 'type' in value + && 'data' in value + && value.type === 'Buffer') + || value instanceof ArrayBuffer + || value instanceof Buffer + ) { + return Buffer.from(value).toString('base64'); + } + + return value; + }); +}; + +export type Server = { + start: (params: { + host: string; + port: number; + key?: string; + cert?: string; + cb: (err: Error | null, address: string) => void; + }) => void; +}; + +export const prepareServer = async ( + { + dialect, + driver, + proxy, + customDefaults, + schema: drizzleSchema, + relations, + dbHash, + schemaFiles, + }: Setup, + app?: Hono, +): Promise => { + app = app !== undefined ? app : new Hono(); + + app.use(cors()); + app.use(async (ctx, next) => { + await next(); + // * https://wicg.github.io/private-network-access/#headers + ctx.header('Access-Control-Allow-Private-Network', 'true'); + }); + app.onError((err, ctx) => { + console.error(err); + return ctx.json({ + status: 'error', + error: err.message, + }); + }); + + const relationalSchema: Record = { + ...Object.fromEntries( + Object.entries(drizzleSchema) + .map(([schemaName, schema]) => { + // have unique keys across schemas + const mappedTableEntries = Object.entries(schema).map( + ([tableName, table]) => { + return [`__${schemaName}__.${tableName}`, table]; + }, + ); + + return mappedTableEntries; + }) + .flat(), + ), + ...relations, + }; + + const relationsConfig = extractTablesRelationalConfig( + relationalSchema, + createTableRelationsHelpers, + ); + + app.post('/', zValidator('json', schema), async (c) => { + const body = c.req.valid('json'); + const { type } = body; + + if (type === 'init') { + const preparedDefaults = customDefaults.map((d) => ({ + schema: d.schema, + table: d.table, + column: d.column, + })); + + return c.json({ + version: '6', + dialect, + driver, + schemaFiles, + customDefaults: preparedDefaults, + relations: extractRelations(relationsConfig), + dbHash, + }); + } + + if (type === 'proxy') { + const result = await proxy({ + ...body.data, + params: body.data.params || [], + }); + return c.json(JSON.parse(jsonStringify(result))); + } + + if (type === 'defaults') { + const columns = body.data; + + const result = columns.map((column) => { + const found = customDefaults.find((d) => { + return ( + d.schema === column.schema + && d.table === column.table + && d.column === column.column + ); + }); + + if (!found) { + throw new Error( + `Custom default not found for ${column.schema}.${column.table}.${column.column}`, + ); + } + + const value = found.func(); + + return { + ...column, + value, + }; + }); + + return c.json(JSON.parse(jsonStringify(result))); + } + + throw new Error(`Unknown type: ${type}`); + }); + + return { + start: (params: Parameters[0]) => { + serve( + { + fetch: app!.fetch, + createServer: params.key ? createServer : undefined, + hostname: params.host, + port: params.port, + serverOptions: { + key: params.key, + cert: params.cert, + }, + }, + () => params.cb(null, `${params.host}:${params.port}`), + ); + }, + }; +}; diff --git a/drizzle-kit/src/simulator.ts b/drizzle-kit/src/simulator.ts new file mode 100644 index 000000000..71dbac1aa --- /dev/null +++ b/drizzle-kit/src/simulator.ts @@ -0,0 +1,157 @@ +declare global { + interface Array { + exactlyOne(): T; + } +} + +Array.prototype.exactlyOne = function() { + if (this.length !== 1) { + return undefined; + } + return this[0]; +}; + +interface TablesHandler { + can(added: T[], removed: T[]): boolean; + handle(added: T[], removed: T[]): { created: T[]; deleted: T[]; renamed: { from: T; to: T }[] }; +} + +interface ColumnsHandler { + can(tableName: string, added: T[], removed: T[]): boolean; + handle( + tableName: string, + added: T[], + removed: T[], + ): { tableName: string; created: T[]; deleted: T[]; renamed: { from: T; to: T }[] }; +} + +class DryRun implements TablesHandler { + can(added: T[], removed: T[]): boolean { + return added.length === 0 && removed.length === 0; + } + handle(added: T[], _: T[]): { created: T[]; deleted: T[]; renamed: { from: T; to: T }[] } { + return { created: added, deleted: [], renamed: [] }; + } +} + +// class Fallback implements Handler { +// can(_: Table[], __: Table[]): boolean { +// return true +// } +// handle(added: Table[], _: Table[]): { created: Table[]; deleted: Table[]; renamed: { from: Table; to: Table; }[]; } { +// return { created: added, deleted: , renamed: [] } +// } +// } + +class Case1 implements TablesHandler { + can(_: T[], removed: T[]): boolean { + return removed.length === 1 && removed[0].name === 'citiess'; + } + + handle(added: T[], removed: T[]): { created: T[]; deleted: T[]; renamed: { from: T; to: T }[] } { + return { created: added, deleted: removed, renamed: [] }; + } +} +class Case2 implements TablesHandler { + // authOtp, deleted, users -> authOtp renamed, cities added, deleted deleted + can(_: T[], removed: T[]): boolean { + return removed.length === 3 && removed[0].name === 'auth_otp'; + } + + handle(added: T[], removed: T[]): { created: T[]; deleted: T[]; renamed: { from: T; to: T }[] } { + return { created: added.slice(1), deleted: removed.slice(1), renamed: [{ from: removed[0], to: added[0] }] }; + } +} + +type Named = { name: string }; + +const handlers: TablesHandler[] = []; +handlers.push(new Case1()); +handlers.push(new Case2()); +handlers.push(new DryRun()); + +export const resolveTables = (added: T[], removed: T[]) => { + const handler = handlers.filter((it) => { + return it.can(added, removed); + }).exactlyOne(); + + if (!handler) { + console.log('added', added.map((it) => it.name).join()); + console.log('removed', removed.map((it) => it.name).join()); + throw new Error('No handler'); + } + + console.log(`Simluated by ${handler.constructor.name}`); + return handler.handle(added, removed); +}; +class LehaColumnsHandler implements ColumnsHandler { + can(tableName: string, _: T[], __: T[]): boolean { + return tableName === 'users'; + } + + handle( + tableName: string, + added: T[], + removed: T[], + ): { tableName: string; created: T[]; deleted: T[]; renamed: { from: T; to: T }[] } { + return { tableName, created: [], deleted: [], renamed: [{ from: removed[0], to: added[0] }] }; + } +} + +class DryRunColumnsHandler implements ColumnsHandler { + can(tableName: string, _: T[], __: T[]): boolean { + return true; + } + + handle( + tableName: string, + added: T[], + removed: T[], + ): { tableName: string; created: T[]; deleted: T[]; renamed: { from: T; to: T }[] } { + return { tableName, created: added, deleted: removed, renamed: [] }; + } +} + +class V1V2AuthOtpColumnsHandler implements ColumnsHandler { + can(tableName: string, _: T[], __: T[]): boolean { + return tableName === 'auth_otp'; + } + + handle( + tableName: string, + added: T[], + removed: T[], + ): { tableName: string; created: T[]; deleted: T[]; renamed: { from: T; to: T }[] } { + const phonePrev = removed.filter((it) => it.name === 'phone')[0]; + const phoneNew = added.filter((it) => it.name === 'phone1')[0]; + + const newAdded = added.filter((it) => it.name !== 'phone1'); + const newRemoved = removed.filter((it) => it.name !== 'phone'); + + return { tableName, created: newAdded, deleted: newRemoved, renamed: [{ from: phonePrev, to: phoneNew }] }; + } + + // handle(tableName:string, added: T[], _: T[]): { created: T[]; deleted: T[]; renamed: { from: T; to: T; }[]; } { + // return { created: added, deleted: [], renamed: [] } + // } +} + +const columnsHandlers: ColumnsHandler[] = []; +columnsHandlers.push(new V1V2AuthOtpColumnsHandler()); +columnsHandlers.push(new LehaColumnsHandler()); +columnsHandlers.push(new DryRunColumnsHandler()); + +export const resolveColumns = (tableName: string, added: T[], removed: T[]) => { + const handler = columnsHandlers.filter((it) => { + return it.can(tableName, added, removed); + })[0]; + + if (!handler) { + console.log('added', added.map((it) => it.name).join()); + console.log('removed', removed.map((it) => it.name).join()); + throw new Error('No columns handler for table: ' + tableName); + } + + console.log(`${tableName} columns simluated by ${handler.constructor.name}`); + return handler.handle(tableName, added, removed); +}; diff --git a/drizzle-kit/src/snapshotsDiffer.ts b/drizzle-kit/src/snapshotsDiffer.ts new file mode 100644 index 000000000..11d126013 --- /dev/null +++ b/drizzle-kit/src/snapshotsDiffer.ts @@ -0,0 +1,2494 @@ +import { + any, + array, + boolean, + enum as enumType, + literal, + never, + object, + record, + string, + TypeOf, + union, + ZodTypeAny, +} from 'zod'; +import { applyJsonDiff, diffColumns, diffSchemasOrTables } from './jsonDiffer'; +import { fromJson } from './sqlgenerator'; + +import { + _prepareAddColumns, + _prepareDropColumns, + _prepareSqliteAddColumns, + JsonAddColumnStatement, + JsonAlterCompositePK, + JsonAlterTableSetSchema, + JsonAlterUniqueConstraint, + JsonCreateCompositePK, + JsonCreateReferenceStatement, + JsonCreateUniqueConstraint, + JsonDeleteCompositePK, + JsonDeleteUniqueConstraint, + JsonDropColumnStatement, + JsonReferenceStatement, + JsonRenameColumnStatement, + JsonSqliteAddColumnStatement, + JsonStatement, + prepareAddCompositePrimaryKeyMySql, + prepareAddCompositePrimaryKeyPg, + prepareAddCompositePrimaryKeySingleStore, + prepareAddCompositePrimaryKeySqlite, + prepareAddUniqueConstraintPg as prepareAddUniqueConstraint, + prepareAddValuesToEnumJson, + prepareAlterColumnsMysql, + prepareAlterColumnsSingleStore, + prepareAlterCompositePrimaryKeyMySql, + prepareAlterCompositePrimaryKeyPg, + prepareAlterCompositePrimaryKeySingleStore, + prepareAlterCompositePrimaryKeySqlite, + prepareAlterReferencesJson, + prepareAlterSequenceJson, + prepareCreateEnumJson, + prepareCreateIndexesJson, + prepareCreateReferencesJson, + prepareCreateSchemasJson, + prepareCreateSequenceJson, + prepareDeleteCompositePrimaryKeyMySql, + prepareDeleteCompositePrimaryKeyPg, + prepareDeleteCompositePrimaryKeySingleStore, + prepareDeleteCompositePrimaryKeySqlite, + prepareDeleteSchemasJson as prepareDropSchemasJson, + prepareDeleteUniqueConstraintPg as prepareDeleteUniqueConstraint, + prepareDropEnumJson, + prepareDropIndexesJson, + prepareDropReferencesJson, + prepareDropSequenceJson, + prepareDropTableJson, + prepareMoveEnumJson, + prepareMoveSequenceJson, + prepareMySqlCreateTableJson, + preparePgAlterColumns, + preparePgCreateIndexesJson, + preparePgCreateTableJson, + prepareRenameColumns, + prepareRenameEnumJson, + prepareRenameSchemasJson, + prepareRenameSequenceJson, + prepareRenameTableJson, + prepareSingleStoreCreateTableJson, + prepareSqliteAlterColumns, + prepareSQLiteCreateTable, +} from './jsonStatements'; + +import { Named, NamedWithSchema } from './cli/commands/migrate'; +import { mapEntries, mapKeys, mapValues } from './global'; +import { MySqlSchema, MySqlSchemaSquashed, MySqlSquasher } from './serializer/mysqlSchema'; +import { PgSchema, PgSchemaSquashed, sequenceSquashed } from './serializer/pgSchema'; +import { SingleStoreSchema, SingleStoreSchemaSquashed, SingleStoreSquasher } from './serializer/singlestoreSchema'; +import { SQLiteSchema, SQLiteSchemaSquashed, SQLiteSquasher } from './serializer/sqliteSchema'; +import { copy, prepareMigrationMeta } from './utils'; + +const makeChanged = (schema: T) => { + return object({ + type: enumType(['changed']), + old: schema, + new: schema, + }); +}; + +const makeSelfOrChanged = (schema: T) => { + return union([ + schema, + object({ + type: enumType(['changed']), + old: schema, + new: schema, + }), + ]); +}; + +export const makePatched = (schema: T) => { + return union([ + object({ + type: literal('added'), + value: schema, + }), + object({ + type: literal('deleted'), + value: schema, + }), + object({ + type: literal('changed'), + old: schema, + new: schema, + }), + ]); +}; + +export const makeSelfOrPatched = (schema: T) => { + return union([ + object({ + type: literal('none'), + value: schema, + }), + object({ + type: literal('added'), + value: schema, + }), + object({ + type: literal('deleted'), + value: schema, + }), + object({ + type: literal('changed'), + old: schema, + new: schema, + }), + ]); +}; + +const columnSchema = object({ + name: string(), + type: string(), + typeSchema: string().optional(), + primaryKey: boolean().optional(), + default: any().optional(), + notNull: boolean().optional(), + // should it be optional? should if be here? + autoincrement: boolean().optional(), + onUpdate: boolean().optional(), + isUnique: any().optional(), + uniqueName: string().optional(), + nullsNotDistinct: boolean().optional(), + generated: object({ + as: string(), + type: enumType(['stored', 'virtual']).default('stored'), + }).optional(), + identity: string().optional(), +}).strict(); + +const alteredColumnSchema = object({ + name: makeSelfOrChanged(string()), + type: makeChanged(string()).optional(), + default: makePatched(any()).optional(), + primaryKey: makePatched(boolean()).optional(), + notNull: makePatched(boolean()).optional(), + typeSchema: makePatched(string()).optional(), + onUpdate: makePatched(boolean()).optional(), + autoincrement: makePatched(boolean()).optional(), + generated: makePatched( + object({ + as: string(), + type: enumType(['stored', 'virtual']).default('stored'), + }), + ).optional(), + + identity: makePatched(string()).optional(), +}).strict(); + +const enumSchema = object({ + name: string(), + schema: string(), + values: array(string()), +}).strict(); + +const changedEnumSchema = object({ + name: string(), + schema: string(), + addedValues: object({ + before: string(), + value: string(), + }).array(), + deletedValues: array(string()), +}).strict(); + +const tableScheme = object({ + name: string(), + schema: string().default(''), + columns: record(string(), columnSchema), + indexes: record(string(), string()), + foreignKeys: record(string(), string()), + compositePrimaryKeys: record(string(), string()).default({}), + uniqueConstraints: record(string(), string()).default({}), +}).strict(); + +export const alteredTableScheme = object({ + name: string(), + schema: string(), + altered: alteredColumnSchema.array(), + addedIndexes: record(string(), string()), + deletedIndexes: record(string(), string()), + alteredIndexes: record( + string(), + object({ + __new: string(), + __old: string(), + }).strict(), + ), + addedForeignKeys: record(string(), string()), + deletedForeignKeys: record(string(), string()), + alteredForeignKeys: record( + string(), + object({ + __new: string(), + __old: string(), + }).strict(), + ), + addedCompositePKs: record(string(), string()), + deletedCompositePKs: record(string(), string()), + alteredCompositePKs: record( + string(), + object({ + __new: string(), + __old: string(), + }), + ), + addedUniqueConstraints: record(string(), string()), + deletedUniqueConstraints: record(string(), string()), + alteredUniqueConstraints: record( + string(), + object({ + __new: string(), + __old: string(), + }), + ), +}).strict(); + +export const diffResultScheme = object({ + alteredTablesWithColumns: alteredTableScheme.array(), + alteredEnums: changedEnumSchema.array(), + alteredSequences: sequenceSquashed.array(), +}).strict(); + +export const diffResultSchemeMysql = object({ + alteredTablesWithColumns: alteredTableScheme.array(), + alteredEnums: never().array(), +}); + +export const diffResultSchemeSingleStore = object({ + alteredTablesWithColumns: alteredTableScheme.array(), + alteredEnums: never().array(), +}); + +export const diffResultSchemeSQLite = object({ + alteredTablesWithColumns: alteredTableScheme.array(), + alteredEnums: never().array(), +}); + +export type Column = TypeOf; +export type AlteredColumn = TypeOf; +export type Enum = TypeOf; +export type Sequence = TypeOf; +export type Table = TypeOf; +export type AlteredTable = TypeOf; +export type DiffResult = TypeOf; +export type DiffResultMysql = TypeOf; +export type DiffResultSingleStore = TypeOf; +export type DiffResultSQLite = TypeOf; + +export interface ResolverInput { + created: T[]; + deleted: T[]; +} + +export interface ResolverOutput { + created: T[]; + renamed: { from: T; to: T }[]; + deleted: T[]; +} + +export interface ResolverOutputWithMoved { + created: T[]; + moved: { name: string; schemaFrom: string; schemaTo: string }[]; + renamed: { from: T; to: T }[]; + deleted: T[]; +} + +export interface ColumnsResolverInput { + tableName: string; + schema: string; + created: T[]; + deleted: T[]; +} + +export interface ColumnsResolverOutput { + tableName: string; + schema: string; + created: T[]; + renamed: { from: T; to: T }[]; + deleted: T[]; +} + +const schemaChangeFor = ( + table: NamedWithSchema, + renamedSchemas: { from: Named; to: Named }[], +) => { + for (let ren of renamedSchemas) { + if (table.schema === ren.from.name) { + return { key: `${ren.to.name}.${table.name}`, schema: ren.to.name }; + } + } + + return { + key: `${table.schema || 'public'}.${table.name}`, + schema: table.schema, + }; +}; + +const nameChangeFor = (table: Named, renamed: { from: Named; to: Named }[]) => { + for (let ren of renamed) { + if (table.name === ren.from.name) { + return { name: ren.to.name }; + } + } + + return { + name: table.name, + }; +}; + +const nameSchemaChangeFor = ( + table: NamedWithSchema, + renamedTables: { from: NamedWithSchema; to: NamedWithSchema }[], +) => { + for (let ren of renamedTables) { + if (table.name === ren.from.name && table.schema === ren.from.schema) { + return { + key: `${ren.to.schema || 'public'}.${ren.to.name}`, + name: ren.to.name, + schema: ren.to.schema, + }; + } + } + + return { + key: `${table.schema || 'public'}.${table.name}`, + name: table.name, + schema: table.schema, + }; +}; + +const columnChangeFor = ( + column: string, + renamedColumns: { from: Named; to: Named }[], +) => { + for (let ren of renamedColumns) { + if (column === ren.from.name) { + return ren.to.name; + } + } + + return column; +}; + +export const applyPgSnapshotsDiff = async ( + json1: PgSchemaSquashed, + json2: PgSchemaSquashed, + schemasResolver: ( + input: ResolverInput, + ) => Promise>, + enumsResolver: ( + input: ResolverInput, + ) => Promise>, + sequencesResolver: ( + input: ResolverInput, + ) => Promise>, + tablesResolver: ( + input: ResolverInput
, + ) => Promise>, + columnsResolver: ( + input: ColumnsResolverInput, + ) => Promise>, + prevFull: PgSchema, + curFull: PgSchema, + action?: 'push' | undefined, +): Promise<{ + statements: JsonStatement[]; + sqlStatements: string[]; + _meta: + | { + schemas: {}; + tables: {}; + columns: {}; + } + | undefined; +}> => { + const schemasDiff = diffSchemasOrTables(json1.schemas, json2.schemas); + + const { + created: createdSchemas, + deleted: deletedSchemas, + renamed: renamedSchemas, + } = await schemasResolver({ + created: schemasDiff.added.map((it) => ({ name: it })), + deleted: schemasDiff.deleted.map((it) => ({ name: it })), + }); + + const schemasPatchedSnap1 = copy(json1); + schemasPatchedSnap1.tables = mapEntries( + schemasPatchedSnap1.tables, + (_, it) => { + const { key, schema } = schemaChangeFor(it, renamedSchemas); + it.schema = schema; + return [key, it]; + }, + ); + + schemasPatchedSnap1.enums = mapEntries(schemasPatchedSnap1.enums, (_, it) => { + const { key, schema } = schemaChangeFor(it, renamedSchemas); + it.schema = schema; + return [key, it]; + }); + + const enumsDiff = diffSchemasOrTables(schemasPatchedSnap1.enums, json2.enums); + + const { + created: createdEnums, + deleted: deletedEnums, + renamed: renamedEnums, + moved: movedEnums, + } = await enumsResolver({ + created: enumsDiff.added, + deleted: enumsDiff.deleted, + }); + + schemasPatchedSnap1.enums = mapEntries(schemasPatchedSnap1.enums, (_, it) => { + const { key, name, schema } = nameSchemaChangeFor(it, renamedEnums); + it.name = name; + it.schema = schema; + return [key, it]; + }); + + const columnTypesChangeMap = renamedEnums.reduce( + (acc, it) => { + acc[`${it.from.schema}.${it.from.name}`] = { + nameFrom: it.from.name, + nameTo: it.to.name, + schemaFrom: it.from.schema, + schemaTo: it.to.schema, + }; + return acc; + }, + {} as Record< + string, + { + nameFrom: string; + nameTo: string; + schemaFrom: string; + schemaTo: string; + } + >, + ); + + const columnTypesMovesMap = movedEnums.reduce( + (acc, it) => { + acc[`${it.schemaFrom}.${it.name}`] = { + nameFrom: it.name, + nameTo: it.name, + schemaFrom: it.schemaFrom, + schemaTo: it.schemaTo, + }; + return acc; + }, + {} as Record< + string, + { + nameFrom: string; + nameTo: string; + schemaFrom: string; + schemaTo: string; + } + >, + ); + + schemasPatchedSnap1.tables = mapEntries( + schemasPatchedSnap1.tables, + (tableKey, tableValue) => { + const patchedColumns = mapValues(tableValue.columns, (column) => { + const key = `${column.typeSchema || 'public'}.${column.type}`; + const change = columnTypesChangeMap[key] || columnTypesMovesMap[key]; + + if (change) { + column.type = change.nameTo; + column.typeSchema = change.schemaTo; + } + + return column; + }); + + tableValue.columns = patchedColumns; + return [tableKey, tableValue]; + }, + ); + + schemasPatchedSnap1.sequences = mapEntries( + schemasPatchedSnap1.sequences, + (_, it) => { + const { key, schema } = schemaChangeFor(it, renamedSchemas); + it.schema = schema; + return [key, it]; + }, + ); + + const sequencesDiff = diffSchemasOrTables( + schemasPatchedSnap1.sequences, + json2.sequences, + ); + + const { + created: createdSequences, + deleted: deletedSequences, + renamed: renamedSequences, + moved: movedSequences, + } = await sequencesResolver({ + created: sequencesDiff.added, + deleted: sequencesDiff.deleted, + }); + + schemasPatchedSnap1.sequences = mapEntries( + schemasPatchedSnap1.sequences, + (_, it) => { + const { key, name, schema } = nameSchemaChangeFor(it, renamedSequences); + it.name = name; + it.schema = schema; + return [key, it]; + }, + ); + + const sequencesChangeMap = renamedSequences.reduce( + (acc, it) => { + acc[`${it.from.schema}.${it.from.name}`] = { + nameFrom: it.from.name, + nameTo: it.to.name, + schemaFrom: it.from.schema, + schemaTo: it.to.schema, + }; + return acc; + }, + {} as Record< + string, + { + nameFrom: string; + nameTo: string; + schemaFrom: string; + schemaTo: string; + } + >, + ); + + const sequencesMovesMap = movedSequences.reduce( + (acc, it) => { + acc[`${it.schemaFrom}.${it.name}`] = { + nameFrom: it.name, + nameTo: it.name, + schemaFrom: it.schemaFrom, + schemaTo: it.schemaTo, + }; + return acc; + }, + {} as Record< + string, + { + nameFrom: string; + nameTo: string; + schemaFrom: string; + schemaTo: string; + } + >, + ); + + schemasPatchedSnap1.tables = mapEntries( + schemasPatchedSnap1.tables, + (tableKey, tableValue) => { + const patchedColumns = mapValues(tableValue.columns, (column) => { + const key = `${column.typeSchema || 'public'}.${column.type}`; + const change = sequencesChangeMap[key] || sequencesMovesMap[key]; + + if (change) { + column.type = change.nameTo; + column.typeSchema = change.schemaTo; + } + + return column; + }); + + tableValue.columns = patchedColumns; + return [tableKey, tableValue]; + }, + ); + + const tablesDiff = diffSchemasOrTables( + schemasPatchedSnap1.tables as Record, + json2.tables, + ); + + const { + created: createdTables, + deleted: deletedTables, + moved: movedTables, + renamed: renamedTables, // renamed or moved + } = await tablesResolver({ + created: tablesDiff.added, + deleted: tablesDiff.deleted, + }); + + const tablesPatchedSnap1 = copy(schemasPatchedSnap1); + tablesPatchedSnap1.tables = mapEntries(tablesPatchedSnap1.tables, (_, it) => { + const { key, name, schema } = nameSchemaChangeFor(it, renamedTables); + it.name = name; + it.schema = schema; + return [key, it]; + }); + + const res = diffColumns(tablesPatchedSnap1.tables, json2.tables); + + const columnRenames = [] as { + table: string; + schema: string; + renames: { from: Column; to: Column }[]; + }[]; + + const columnCreates = [] as { + table: string; + schema: string; + columns: Column[]; + }[]; + + const columnDeletes = [] as { + table: string; + schema: string; + columns: Column[]; + }[]; + + for (let entry of Object.values(res)) { + const { renamed, created, deleted } = await columnsResolver({ + tableName: entry.name, + schema: entry.schema, + deleted: entry.columns.deleted, + created: entry.columns.added, + }); + + if (created.length > 0) { + columnCreates.push({ + table: entry.name, + schema: entry.schema, + columns: created, + }); + } + + if (deleted.length > 0) { + columnDeletes.push({ + table: entry.name, + schema: entry.schema, + columns: deleted, + }); + } + + if (renamed.length > 0) { + columnRenames.push({ + table: entry.name, + schema: entry.schema, + renames: renamed, + }); + } + } + + const columnRenamesDict = columnRenames.reduce( + (acc, it) => { + acc[`${it.schema || 'public'}.${it.table}`] = it.renames; + return acc; + }, + {} as Record< + string, + { + from: Named; + to: Named; + }[] + >, + ); + + const columnsPatchedSnap1 = copy(tablesPatchedSnap1); + columnsPatchedSnap1.tables = mapEntries( + columnsPatchedSnap1.tables, + (tableKey, tableValue) => { + const patchedColumns = mapKeys( + tableValue.columns, + (columnKey, column) => { + const rens = columnRenamesDict[ + `${tableValue.schema || 'public'}.${tableValue.name}` + ] || []; + + const newName = columnChangeFor(columnKey, rens); + column.name = newName; + return newName; + }, + ); + + tableValue.columns = patchedColumns; + return [tableKey, tableValue]; + }, + ); + + const diffResult = applyJsonDiff(columnsPatchedSnap1, json2); + + // no diffs + const typedResult: DiffResult = diffResultScheme.parse(diffResult); + // const typedResult: DiffResult = {}; + + const jsonStatements: JsonStatement[] = []; + + const jsonCreateIndexesForCreatedTables = createdTables + .map((it) => { + return preparePgCreateIndexesJson( + it.name, + it.schema, + it.indexes, + curFull, + action, + ); + }) + .flat(); + + const jsonDropTables = deletedTables.map((it) => { + return prepareDropTableJson(it); + }); + + const jsonRenameTables = renamedTables.map((it) => { + return prepareRenameTableJson(it.from, it.to); + }); + + const alteredTables = typedResult.alteredTablesWithColumns; + + const jsonRenameColumnsStatements: JsonRenameColumnStatement[] = []; + const jsonDropColumnsStatemets: JsonDropColumnStatement[] = []; + const jsonAddColumnsStatemets: JsonAddColumnStatement[] = []; + + for (let it of columnRenames) { + jsonRenameColumnsStatements.push( + ...prepareRenameColumns(it.table, it.schema, it.renames), + ); + } + + for (let it of columnDeletes) { + jsonDropColumnsStatemets.push( + ..._prepareDropColumns(it.table, it.schema, it.columns), + ); + } + + for (let it of columnCreates) { + jsonAddColumnsStatemets.push( + ..._prepareAddColumns(it.table, it.schema, it.columns), + ); + } + + const jsonAddedCompositePKs: JsonCreateCompositePK[] = []; + const jsonDeletedCompositePKs: JsonDeleteCompositePK[] = []; + const jsonAlteredCompositePKs: JsonAlterCompositePK[] = []; + + const jsonAddedUniqueConstraints: JsonCreateUniqueConstraint[] = []; + const jsonDeletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; + const jsonAlteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; + + const jsonSetTableSchemas: JsonAlterTableSetSchema[] = []; + + for (let it of movedTables) { + jsonSetTableSchemas.push({ + type: 'alter_table_set_schema', + tableName: it.name, + schemaFrom: it.schemaFrom || 'public', + schemaTo: it.schemaTo || 'public', + }); + } + + for (let it of alteredTables) { + // This part is needed to make sure that same columns in a table are not triggered for change + // there is a case where orm and kit are responsible for pk name generation and one of them is not sorting name + // We double-check that pk with same set of columns are both in added and deleted diffs + let addedColumns: string[] = []; + for (const addedPkName of Object.keys(it.addedCompositePKs)) { + const addedPkColumns = it.addedCompositePKs[addedPkName]; + addedColumns = SQLiteSquasher.unsquashPK(addedPkColumns); + } + + let deletedColumns: string[] = []; + for (const deletedPkName of Object.keys(it.deletedCompositePKs)) { + const deletedPkColumns = it.deletedCompositePKs[deletedPkName]; + deletedColumns = SQLiteSquasher.unsquashPK(deletedPkColumns); + } + + // Don't need to sort, but need to add tests for it + // addedColumns.sort(); + // deletedColumns.sort(); + const doPerformDeleteAndCreate = JSON.stringify(addedColumns) !== JSON.stringify(deletedColumns); + + let addedCompositePKs: JsonCreateCompositePK[] = []; + let deletedCompositePKs: JsonDeleteCompositePK[] = []; + let alteredCompositePKs: JsonAlterCompositePK[] = []; + if (doPerformDeleteAndCreate) { + addedCompositePKs = prepareAddCompositePrimaryKeyPg( + it.name, + it.schema, + it.addedCompositePKs, + curFull as PgSchema, + ); + deletedCompositePKs = prepareDeleteCompositePrimaryKeyPg( + it.name, + it.schema, + it.deletedCompositePKs, + prevFull as PgSchema, + ); + } + alteredCompositePKs = prepareAlterCompositePrimaryKeyPg( + it.name, + it.schema, + it.alteredCompositePKs, + prevFull as PgSchema, + curFull as PgSchema, + ); + + // add logic for unique constraints + let addedUniqueConstraints: JsonCreateUniqueConstraint[] = []; + let deletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; + let alteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; + + addedUniqueConstraints = prepareAddUniqueConstraint( + it.name, + it.schema, + it.addedUniqueConstraints, + ); + deletedUniqueConstraints = prepareDeleteUniqueConstraint( + it.name, + it.schema, + it.deletedUniqueConstraints, + ); + if (it.alteredUniqueConstraints) { + const added: Record = {}; + const deleted: Record = {}; + for (const k of Object.keys(it.alteredUniqueConstraints)) { + added[k] = it.alteredUniqueConstraints[k].__new; + deleted[k] = it.alteredUniqueConstraints[k].__old; + } + addedUniqueConstraints.push( + ...prepareAddUniqueConstraint(it.name, it.schema, added), + ); + deletedUniqueConstraints.push( + ...prepareDeleteUniqueConstraint(it.name, it.schema, deleted), + ); + } + + jsonAddedCompositePKs.push(...addedCompositePKs); + jsonDeletedCompositePKs.push(...deletedCompositePKs); + jsonAlteredCompositePKs.push(...alteredCompositePKs); + + jsonAddedUniqueConstraints.push(...addedUniqueConstraints); + jsonDeletedUniqueConstraints.push(...deletedUniqueConstraints); + jsonAlteredUniqueConstraints.push(...alteredUniqueConstraints); + } + + const rColumns = jsonRenameColumnsStatements.map((it) => { + const tableName = it.tableName; + const schema = it.schema; + return { + from: { schema, table: tableName, column: it.oldColumnName }, + to: { schema, table: tableName, column: it.newColumnName }, + }; + }); + + const jsonTableAlternations = alteredTables + .map((it) => { + return preparePgAlterColumns( + it.name, + it.schema, + it.altered, + json2, + action, + ); + }) + .flat(); + + const jsonCreateIndexesFoAlteredTables = alteredTables + .map((it) => { + return preparePgCreateIndexesJson( + it.name, + it.schema, + it.addedIndexes || {}, + curFull, + action, + ); + }) + .flat(); + + const jsonDropIndexesForAllAlteredTables = alteredTables + .map((it) => { + return prepareDropIndexesJson( + it.name, + it.schema, + it.deletedIndexes || {}, + ); + }) + .flat(); + + alteredTables.forEach((it) => { + const droppedIndexes = Object.keys(it.alteredIndexes).reduce( + (current, item: string) => { + current[item] = it.alteredIndexes[item].__old; + return current; + }, + {} as Record, + ); + const createdIndexes = Object.keys(it.alteredIndexes).reduce( + (current, item: string) => { + current[item] = it.alteredIndexes[item].__new; + return current; + }, + {} as Record, + ); + + jsonCreateIndexesFoAlteredTables.push( + ...preparePgCreateIndexesJson( + it.name, + it.schema, + createdIndexes || {}, + curFull, + action, + ), + ); + jsonDropIndexesForAllAlteredTables.push( + ...prepareDropIndexesJson(it.name, it.schema, droppedIndexes || {}), + ); + }); + + const jsonCreateReferencesForCreatedTables: JsonCreateReferenceStatement[] = createdTables + .map((it) => { + return prepareCreateReferencesJson(it.name, it.schema, it.foreignKeys); + }) + .flat(); + + const jsonReferencesForAlteredTables: JsonReferenceStatement[] = alteredTables + .map((it) => { + const forAdded = prepareCreateReferencesJson( + it.name, + it.schema, + it.addedForeignKeys, + ); + + const forAltered = prepareDropReferencesJson( + it.name, + it.schema, + it.deletedForeignKeys, + ); + + const alteredFKs = prepareAlterReferencesJson( + it.name, + it.schema, + it.alteredForeignKeys, + ); + + return [...forAdded, ...forAltered, ...alteredFKs]; + }) + .flat(); + + const jsonCreatedReferencesForAlteredTables = jsonReferencesForAlteredTables.filter((t) => + t.type === 'create_reference' + ); + + const jsonDroppedReferencesForAlteredTables = jsonReferencesForAlteredTables.filter((t) => + t.type === 'delete_reference' + ); + + // Sequences + // - create sequence ✅ + // - create sequence inside schema ✅ + // - rename sequence ✅ + // - change sequence schema ✅ + // - change sequence schema + name ✅ + // - drop sequence - check if sequence is in use. If yes - ??? + // - change sequence values ✅ + + // Generated columns + // - add generated + // - drop generated + // - create table with generated + // - alter - should be not triggered, but should get warning + + // TODO: + // let hasEnumValuesDeletions = false; + // let enumValuesDeletions: { name: string; schema: string; values: string[] }[] = + // []; + // for (let alteredEnum of typedResult.alteredEnums) { + // if (alteredEnum.deletedValues.length > 0) { + // hasEnumValuesDeletions = true; + // enumValuesDeletions.push({ + // name: alteredEnum.name, + // schema: alteredEnum.schema, + // values: alteredEnum.deletedValues, + // }); + // } + // } + // if (hasEnumValuesDeletions) { + // console.log(error("Deletion of enum values is prohibited in Postgres - see here")); + // for(let entry of enumValuesDeletions){ + // console.log(error(`You're trying to delete ${chalk.blue(`[${entry.values.join(", ")}]`)} values from ${chalk.blue(`${entry.schema}.${entry.name}`)}`)) + // } + // } + // if (hasEnumValuesDeletions && action === "push") { + // process.exit(1); + // } + + const createEnums = createdEnums.map((it) => { + return prepareCreateEnumJson(it.name, it.schema, it.values); + }) ?? []; + + const dropEnums = deletedEnums.map((it) => { + return prepareDropEnumJson(it.name, it.schema); + }); + + const moveEnums = movedEnums.map((it) => { + return prepareMoveEnumJson(it.name, it.schemaFrom, it.schemaTo); + }); + + const renameEnums = renamedEnums.map((it) => { + return prepareRenameEnumJson(it.from.name, it.to.name, it.to.schema); + }); + + // todo: block enum rename, enum value rename and enun deletion for now + const jsonAlterEnumsWithAddedValues = typedResult.alteredEnums + .map((it) => { + return prepareAddValuesToEnumJson(it.name, it.schema, it.addedValues); + }) + .flat() ?? []; + + /////////// + + const createSequences = createdSequences.map((it) => { + return prepareCreateSequenceJson(it); + }) ?? []; + + const dropSequences = deletedSequences.map((it) => { + return prepareDropSequenceJson(it.name, it.schema); + }); + + const moveSequences = movedSequences.map((it) => { + return prepareMoveSequenceJson(it.name, it.schemaFrom, it.schemaTo); + }); + + const renameSequences = renamedSequences.map((it) => { + return prepareRenameSequenceJson(it.from.name, it.to.name, it.to.schema); + }); + + const jsonAlterSequences = typedResult.alteredSequences + .map((it) => { + return prepareAlterSequenceJson(it); + }) + .flat() ?? []; + + //////////// + + const createSchemas = prepareCreateSchemasJson( + createdSchemas.map((it) => it.name), + ); + + const renameSchemas = prepareRenameSchemasJson( + renamedSchemas.map((it) => ({ from: it.from.name, to: it.to.name })), + ); + + const dropSchemas = prepareDropSchemasJson( + deletedSchemas.map((it) => it.name), + ); + + const createTables = createdTables.map((it) => { + return preparePgCreateTableJson(it, curFull); + }); + + jsonStatements.push(...createSchemas); + jsonStatements.push(...renameSchemas); + jsonStatements.push(...createEnums); + jsonStatements.push(...moveEnums); + jsonStatements.push(...renameEnums); + jsonStatements.push(...jsonAlterEnumsWithAddedValues); + + jsonStatements.push(...createSequences); + jsonStatements.push(...moveSequences); + jsonStatements.push(...renameSequences); + jsonStatements.push(...jsonAlterSequences); + + jsonStatements.push(...createTables); + + jsonStatements.push(...jsonDropTables); + jsonStatements.push(...jsonSetTableSchemas); + jsonStatements.push(...jsonRenameTables); + jsonStatements.push(...jsonRenameColumnsStatements); + + jsonStatements.push(...jsonDeletedUniqueConstraints); + + jsonStatements.push(...jsonDroppedReferencesForAlteredTables); + + // Will need to drop indexes before changing any columns in table + // Then should go column alternations and then index creation + jsonStatements.push(...jsonDropIndexesForAllAlteredTables); + + jsonStatements.push(...jsonDeletedCompositePKs); + jsonStatements.push(...jsonTableAlternations); + jsonStatements.push(...jsonAddedCompositePKs); + jsonStatements.push(...jsonAddColumnsStatemets); + + jsonStatements.push(...jsonCreateReferencesForCreatedTables); + jsonStatements.push(...jsonCreateIndexesForCreatedTables); + + jsonStatements.push(...jsonCreatedReferencesForAlteredTables); + jsonStatements.push(...jsonCreateIndexesFoAlteredTables); + + jsonStatements.push(...jsonDropColumnsStatemets); + jsonStatements.push(...jsonAlteredCompositePKs); + + jsonStatements.push(...jsonAddedUniqueConstraints); + + jsonStatements.push(...jsonAlteredUniqueConstraints); + + jsonStatements.push(...dropEnums); + jsonStatements.push(...dropSequences); + jsonStatements.push(...dropSchemas); + + // generate filters + const filteredJsonStatements = jsonStatements.filter((st) => { + if (st.type === 'alter_table_alter_column_drop_notnull') { + if ( + jsonStatements.find( + (it) => + it.type === 'alter_table_alter_column_drop_identity' + && it.tableName === st.tableName + && it.schema === st.schema, + ) + ) { + return false; + } + } + if (st.type === 'alter_table_alter_column_set_notnull') { + if ( + jsonStatements.find( + (it) => + it.type === 'alter_table_alter_column_set_identity' + && it.tableName === st.tableName + && it.schema === st.schema, + ) + ) { + return false; + } + } + return true; + }); + + const sqlStatements = fromJson(filteredJsonStatements, 'postgresql'); + + const uniqueSqlStatements: string[] = []; + sqlStatements.forEach((ss) => { + if (!uniqueSqlStatements.includes(ss)) { + uniqueSqlStatements.push(ss); + } + }); + + const rSchemas = renamedSchemas.map((it) => ({ + from: it.from.name, + to: it.to.name, + })); + + const rTables = renamedTables.map((it) => { + return { from: it.from, to: it.to }; + }); + + const _meta = prepareMigrationMeta(rSchemas, rTables, rColumns); + + return { + statements: filteredJsonStatements, + sqlStatements: uniqueSqlStatements, + _meta, + }; +}; + +export const applyMysqlSnapshotsDiff = async ( + json1: MySqlSchemaSquashed, + json2: MySqlSchemaSquashed, + tablesResolver: ( + input: ResolverInput
, + ) => Promise>, + columnsResolver: ( + input: ColumnsResolverInput, + ) => Promise>, + prevFull: MySqlSchema, + curFull: MySqlSchema, + action?: 'push' | undefined, +): Promise<{ + statements: JsonStatement[]; + sqlStatements: string[]; + _meta: + | { + schemas: {}; + tables: {}; + columns: {}; + } + | undefined; +}> => { + // squash indexes and fks + + // squash uniqueIndexes and uniqueConstraint into constraints object + // it should be done for mysql only because it has no diffs for it + + // TODO: @AndriiSherman + // Add an upgrade to v6 and move all snaphosts to this strcutre + // After that we can generate mysql in 1 object directly(same as sqlite) + for (const tableName in json1.tables) { + const table = json1.tables[tableName]; + for (const indexName in table.indexes) { + const index = MySqlSquasher.unsquashIdx(table.indexes[indexName]); + if (index.isUnique) { + table.uniqueConstraints[indexName] = MySqlSquasher.squashUnique({ + name: index.name, + columns: index.columns, + }); + delete json1.tables[tableName].indexes[index.name]; + } + } + } + + for (const tableName in json2.tables) { + const table = json2.tables[tableName]; + for (const indexName in table.indexes) { + const index = MySqlSquasher.unsquashIdx(table.indexes[indexName]); + if (index.isUnique) { + table.uniqueConstraints[indexName] = MySqlSquasher.squashUnique({ + name: index.name, + columns: index.columns, + }); + delete json2.tables[tableName].indexes[index.name]; + } + } + } + + const tablesDiff = diffSchemasOrTables(json1.tables, json2.tables); + + const { + created: createdTables, + deleted: deletedTables, + renamed: renamedTables, // renamed or moved + } = await tablesResolver({ + created: tablesDiff.added, + deleted: tablesDiff.deleted, + }); + + const tablesPatchedSnap1 = copy(json1); + tablesPatchedSnap1.tables = mapEntries(tablesPatchedSnap1.tables, (_, it) => { + const { name } = nameChangeFor(it, renamedTables); + it.name = name; + return [name, it]; + }); + + const res = diffColumns(tablesPatchedSnap1.tables, json2.tables); + const columnRenames = [] as { + table: string; + renames: { from: Column; to: Column }[]; + }[]; + + const columnCreates = [] as { + table: string; + columns: Column[]; + }[]; + + const columnDeletes = [] as { + table: string; + columns: Column[]; + }[]; + + for (let entry of Object.values(res)) { + const { renamed, created, deleted } = await columnsResolver({ + tableName: entry.name, + schema: entry.schema, + deleted: entry.columns.deleted, + created: entry.columns.added, + }); + + if (created.length > 0) { + columnCreates.push({ + table: entry.name, + columns: created, + }); + } + + if (deleted.length > 0) { + columnDeletes.push({ + table: entry.name, + columns: deleted, + }); + } + + if (renamed.length > 0) { + columnRenames.push({ + table: entry.name, + renames: renamed, + }); + } + } + + const columnRenamesDict = columnRenames.reduce( + (acc, it) => { + acc[it.table] = it.renames; + return acc; + }, + {} as Record< + string, + { + from: Named; + to: Named; + }[] + >, + ); + + const columnsPatchedSnap1 = copy(tablesPatchedSnap1); + columnsPatchedSnap1.tables = mapEntries( + columnsPatchedSnap1.tables, + (tableKey, tableValue) => { + const patchedColumns = mapKeys( + tableValue.columns, + (columnKey, column) => { + const rens = columnRenamesDict[tableValue.name] || []; + const newName = columnChangeFor(columnKey, rens); + column.name = newName; + return newName; + }, + ); + + tableValue.columns = patchedColumns; + return [tableKey, tableValue]; + }, + ); + + const diffResult = applyJsonDiff(columnsPatchedSnap1, json2); + + const typedResult: DiffResultMysql = diffResultSchemeMysql.parse(diffResult); + + const jsonStatements: JsonStatement[] = []; + + const jsonCreateIndexesForCreatedTables = createdTables + .map((it) => { + return prepareCreateIndexesJson( + it.name, + it.schema, + it.indexes, + curFull.internal, + ); + }) + .flat(); + + const jsonDropTables = deletedTables.map((it) => { + return prepareDropTableJson(it); + }); + + const jsonRenameTables = renamedTables.map((it) => { + return prepareRenameTableJson(it.from, it.to); + }); + + const alteredTables = typedResult.alteredTablesWithColumns; + + const jsonAddedCompositePKs: JsonCreateCompositePK[] = []; + const jsonDeletedCompositePKs: JsonDeleteCompositePK[] = []; + const jsonAlteredCompositePKs: JsonAlterCompositePK[] = []; + + const jsonAddedUniqueConstraints: JsonCreateUniqueConstraint[] = []; + const jsonDeletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; + const jsonAlteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; + + const jsonRenameColumnsStatements: JsonRenameColumnStatement[] = columnRenames + .map((it) => prepareRenameColumns(it.table, '', it.renames)) + .flat(); + + const jsonAddColumnsStatemets: JsonAddColumnStatement[] = columnCreates + .map((it) => _prepareAddColumns(it.table, '', it.columns)) + .flat(); + + const jsonDropColumnsStatemets: JsonDropColumnStatement[] = columnDeletes + .map((it) => _prepareDropColumns(it.table, '', it.columns)) + .flat(); + + alteredTables.forEach((it) => { + // This part is needed to make sure that same columns in a table are not triggered for change + // there is a case where orm and kit are responsible for pk name generation and one of them is not sorting name + // We double-check that pk with same set of columns are both in added and deleted diffs + let addedColumns: string[] = []; + for (const addedPkName of Object.keys(it.addedCompositePKs)) { + const addedPkColumns = it.addedCompositePKs[addedPkName]; + addedColumns = MySqlSquasher.unsquashPK(addedPkColumns).columns; + } + + let deletedColumns: string[] = []; + for (const deletedPkName of Object.keys(it.deletedCompositePKs)) { + const deletedPkColumns = it.deletedCompositePKs[deletedPkName]; + deletedColumns = MySqlSquasher.unsquashPK(deletedPkColumns).columns; + } + + // Don't need to sort, but need to add tests for it + // addedColumns.sort(); + // deletedColumns.sort(); + const doPerformDeleteAndCreate = JSON.stringify(addedColumns) !== JSON.stringify(deletedColumns); + + let addedCompositePKs: JsonCreateCompositePK[] = []; + let deletedCompositePKs: JsonDeleteCompositePK[] = []; + let alteredCompositePKs: JsonAlterCompositePK[] = []; + + addedCompositePKs = prepareAddCompositePrimaryKeyMySql( + it.name, + it.addedCompositePKs, + prevFull, + curFull, + ); + deletedCompositePKs = prepareDeleteCompositePrimaryKeyMySql( + it.name, + it.deletedCompositePKs, + prevFull, + ); + // } + alteredCompositePKs = prepareAlterCompositePrimaryKeyMySql( + it.name, + it.alteredCompositePKs, + prevFull, + curFull, + ); + + // add logic for unique constraints + let addedUniqueConstraints: JsonCreateUniqueConstraint[] = []; + let deletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; + let alteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; + + addedUniqueConstraints = prepareAddUniqueConstraint( + it.name, + it.schema, + it.addedUniqueConstraints, + ); + deletedUniqueConstraints = prepareDeleteUniqueConstraint( + it.name, + it.schema, + it.deletedUniqueConstraints, + ); + if (it.alteredUniqueConstraints) { + const added: Record = {}; + const deleted: Record = {}; + for (const k of Object.keys(it.alteredUniqueConstraints)) { + added[k] = it.alteredUniqueConstraints[k].__new; + deleted[k] = it.alteredUniqueConstraints[k].__old; + } + addedUniqueConstraints.push( + ...prepareAddUniqueConstraint(it.name, it.schema, added), + ); + deletedUniqueConstraints.push( + ...prepareDeleteUniqueConstraint(it.name, it.schema, deleted), + ); + } + + jsonAddedCompositePKs.push(...addedCompositePKs); + jsonDeletedCompositePKs.push(...deletedCompositePKs); + jsonAlteredCompositePKs.push(...alteredCompositePKs); + + jsonAddedUniqueConstraints.push(...addedUniqueConstraints); + jsonDeletedUniqueConstraints.push(...deletedUniqueConstraints); + jsonAlteredUniqueConstraints.push(...alteredUniqueConstraints); + }); + + const rColumns = jsonRenameColumnsStatements.map((it) => { + const tableName = it.tableName; + const schema = it.schema; + return { + from: { schema, table: tableName, column: it.oldColumnName }, + to: { schema, table: tableName, column: it.newColumnName }, + }; + }); + + const jsonTableAlternations = alteredTables + .map((it) => { + return prepareAlterColumnsMysql( + it.name, + it.schema, + it.altered, + json1, + json2, + action, + ); + }) + .flat(); + + const jsonCreateIndexesForAllAlteredTables = alteredTables + .map((it) => { + return prepareCreateIndexesJson( + it.name, + it.schema, + it.addedIndexes || {}, + curFull.internal, + ); + }) + .flat(); + + const jsonDropIndexesForAllAlteredTables = alteredTables + .map((it) => { + return prepareDropIndexesJson( + it.name, + it.schema, + it.deletedIndexes || {}, + ); + }) + .flat(); + + alteredTables.forEach((it) => { + const droppedIndexes = Object.keys(it.alteredIndexes).reduce( + (current, item: string) => { + current[item] = it.alteredIndexes[item].__old; + return current; + }, + {} as Record, + ); + const createdIndexes = Object.keys(it.alteredIndexes).reduce( + (current, item: string) => { + current[item] = it.alteredIndexes[item].__new; + return current; + }, + {} as Record, + ); + + jsonCreateIndexesForAllAlteredTables.push( + ...prepareCreateIndexesJson(it.name, it.schema, createdIndexes || {}), + ); + jsonDropIndexesForAllAlteredTables.push( + ...prepareDropIndexesJson(it.name, it.schema, droppedIndexes || {}), + ); + }); + + const jsonCreateReferencesForCreatedTables: JsonCreateReferenceStatement[] = createdTables + .map((it) => { + return prepareCreateReferencesJson(it.name, it.schema, it.foreignKeys); + }) + .flat(); + + const jsonReferencesForAllAlteredTables: JsonReferenceStatement[] = alteredTables + .map((it) => { + const forAdded = prepareCreateReferencesJson( + it.name, + it.schema, + it.addedForeignKeys, + ); + + const forAltered = prepareDropReferencesJson( + it.name, + it.schema, + it.deletedForeignKeys, + ); + + const alteredFKs = prepareAlterReferencesJson( + it.name, + it.schema, + it.alteredForeignKeys, + ); + + return [...forAdded, ...forAltered, ...alteredFKs]; + }) + .flat(); + + const jsonCreatedReferencesForAlteredTables = jsonReferencesForAllAlteredTables.filter( + (t) => t.type === 'create_reference', + ); + const jsonDroppedReferencesForAlteredTables = jsonReferencesForAllAlteredTables.filter( + (t) => t.type === 'delete_reference', + ); + + const jsonMySqlCreateTables = createdTables.map((it) => { + return prepareMySqlCreateTableJson( + it, + curFull as MySqlSchema, + curFull.internal, + ); + }); + jsonStatements.push(...jsonMySqlCreateTables); + + jsonStatements.push(...jsonDropTables); + jsonStatements.push(...jsonRenameTables); + jsonStatements.push(...jsonRenameColumnsStatements); + + jsonStatements.push(...jsonDeletedUniqueConstraints); + + jsonStatements.push(...jsonDroppedReferencesForAlteredTables); + + // Will need to drop indexes before changing any columns in table + // Then should go column alternations and then index creation + jsonStatements.push(...jsonDropIndexesForAllAlteredTables); + + jsonStatements.push(...jsonDeletedCompositePKs); + jsonStatements.push(...jsonTableAlternations); + jsonStatements.push(...jsonAddedCompositePKs); + + jsonStatements.push(...jsonAddedUniqueConstraints); + jsonStatements.push(...jsonDeletedUniqueConstraints); + + jsonStatements.push(...jsonAddColumnsStatemets); + + jsonStatements.push(...jsonCreateReferencesForCreatedTables); + jsonStatements.push(...jsonCreateIndexesForCreatedTables); + + jsonStatements.push(...jsonCreatedReferencesForAlteredTables); + jsonStatements.push(...jsonCreateIndexesForAllAlteredTables); + + jsonStatements.push(...jsonDropColumnsStatemets); + + // jsonStatements.push(...jsonDeletedCompositePKs); + // jsonStatements.push(...jsonAddedCompositePKs); + jsonStatements.push(...jsonAlteredCompositePKs); + + jsonStatements.push(...jsonAddedUniqueConstraints); + + jsonStatements.push(...jsonAlteredUniqueConstraints); + + const sqlStatements = fromJson(jsonStatements, 'mysql'); + + const uniqueSqlStatements: string[] = []; + sqlStatements.forEach((ss) => { + if (!uniqueSqlStatements.includes(ss)) { + uniqueSqlStatements.push(ss); + } + }); + + const rTables = renamedTables.map((it) => { + return { from: it.from, to: it.to }; + }); + + const _meta = prepareMigrationMeta([], rTables, rColumns); + + return { + statements: jsonStatements, + sqlStatements: uniqueSqlStatements, + _meta, + }; +}; + +export const applySingleStoreSnapshotsDiff = async ( + json1: SingleStoreSchemaSquashed, + json2: SingleStoreSchemaSquashed, + tablesResolver: ( + input: ResolverInput
, + ) => Promise>, + columnsResolver: ( + input: ColumnsResolverInput, + ) => Promise>, + prevFull: SingleStoreSchema, + curFull: SingleStoreSchema, + action?: 'push' | undefined, +): Promise<{ + statements: JsonStatement[]; + sqlStatements: string[]; + _meta: + | { + schemas: {}; + tables: {}; + columns: {}; + } + | undefined; +}> => { + // squash indexes and fks + + // squash uniqueIndexes and uniqueConstraint into constraints object + // it should be done for singlestore only because it has no diffs for it + for (const tableName in json1.tables) { + const table = json1.tables[tableName]; + for (const indexName in table.indexes) { + const index = SingleStoreSquasher.unsquashIdx(table.indexes[indexName]); + if (index.isUnique) { + table.uniqueConstraints[indexName] = SingleStoreSquasher.squashUnique({ + name: index.name, + columns: index.columns, + }); + delete json1.tables[tableName].indexes[index.name]; + } + } + } + + for (const tableName in json2.tables) { + const table = json2.tables[tableName]; + for (const indexName in table.indexes) { + const index = SingleStoreSquasher.unsquashIdx(table.indexes[indexName]); + if (index.isUnique) { + table.uniqueConstraints[indexName] = SingleStoreSquasher.squashUnique({ + name: index.name, + columns: index.columns, + }); + delete json2.tables[tableName].indexes[index.name]; + } + } + } + + const tablesDiff = diffSchemasOrTables(json1.tables, json2.tables); + + const { + created: createdTables, + deleted: deletedTables, + renamed: renamedTables, // renamed or moved + } = await tablesResolver({ + created: tablesDiff.added, + deleted: tablesDiff.deleted, + }); + + const tablesPatchedSnap1 = copy(json1); + tablesPatchedSnap1.tables = mapEntries(tablesPatchedSnap1.tables, (_, it) => { + const { name } = nameChangeFor(it, renamedTables); + it.name = name; + return [name, it]; + }); + + const res = diffColumns(tablesPatchedSnap1.tables, json2.tables); + const columnRenames = [] as { + table: string; + renames: { from: Column; to: Column }[]; + }[]; + + const columnCreates = [] as { + table: string; + columns: Column[]; + }[]; + + const columnDeletes = [] as { + table: string; + columns: Column[]; + }[]; + + for (let entry of Object.values(res)) { + const { renamed, created, deleted } = await columnsResolver({ + tableName: entry.name, + schema: entry.schema, + deleted: entry.columns.deleted, + created: entry.columns.added, + }); + + if (created.length > 0) { + columnCreates.push({ + table: entry.name, + columns: created, + }); + } + + if (deleted.length > 0) { + columnDeletes.push({ + table: entry.name, + columns: deleted, + }); + } + + if (renamed.length > 0) { + columnRenames.push({ + table: entry.name, + renames: renamed, + }); + } + } + + const columnRenamesDict = columnRenames.reduce( + (acc, it) => { + acc[it.table] = it.renames; + return acc; + }, + {} as Record< + string, + { + from: Named; + to: Named; + }[] + >, + ); + + const columnsPatchedSnap1 = copy(tablesPatchedSnap1); + columnsPatchedSnap1.tables = mapEntries( + columnsPatchedSnap1.tables, + (tableKey, tableValue) => { + const patchedColumns = mapKeys( + tableValue.columns, + (columnKey, column) => { + const rens = columnRenamesDict[tableValue.name] || []; + const newName = columnChangeFor(columnKey, rens); + column.name = newName; + return newName; + }, + ); + + tableValue.columns = patchedColumns; + return [tableKey, tableValue]; + }, + ); + + const diffResult = applyJsonDiff(columnsPatchedSnap1, json2); + + const typedResult: DiffResultSingleStore = diffResultSchemeSingleStore.parse(diffResult); + + const jsonStatements: JsonStatement[] = []; + + const jsonCreateIndexesForCreatedTables = createdTables + .map((it) => { + return prepareCreateIndexesJson( + it.name, + it.schema, + it.indexes, + curFull.internal, + ); + }) + .flat(); + + const jsonDropTables = deletedTables.map((it) => { + return prepareDropTableJson(it); + }); + + const jsonRenameTables = renamedTables.map((it) => { + return prepareRenameTableJson(it.from, it.to); + }); + + const alteredTables = typedResult.alteredTablesWithColumns; + + const jsonAddedCompositePKs: JsonCreateCompositePK[] = []; + const jsonDeletedCompositePKs: JsonDeleteCompositePK[] = []; + const jsonAlteredCompositePKs: JsonAlterCompositePK[] = []; + + const jsonAddedUniqueConstraints: JsonCreateUniqueConstraint[] = []; + const jsonDeletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; + const jsonAlteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; + + const jsonRenameColumnsStatements: JsonRenameColumnStatement[] = columnRenames + .map((it) => prepareRenameColumns(it.table, '', it.renames)) + .flat(); + + const jsonAddColumnsStatemets: JsonAddColumnStatement[] = columnCreates + .map((it) => _prepareAddColumns(it.table, '', it.columns)) + .flat(); + + const jsonDropColumnsStatemets: JsonDropColumnStatement[] = columnDeletes + .map((it) => _prepareDropColumns(it.table, '', it.columns)) + .flat(); + + alteredTables.forEach((it) => { + // This part is needed to make sure that same columns in a table are not triggered for change + // there is a case where orm and kit are responsible for pk name generation and one of them is not sorting name + // We double-check that pk with same set of columns are both in added and deleted diffs + let addedColumns: string[] = []; + for (const addedPkName of Object.keys(it.addedCompositePKs)) { + const addedPkColumns = it.addedCompositePKs[addedPkName]; + addedColumns = SingleStoreSquasher.unsquashPK(addedPkColumns).columns; + } + + let deletedColumns: string[] = []; + for (const deletedPkName of Object.keys(it.deletedCompositePKs)) { + const deletedPkColumns = it.deletedCompositePKs[deletedPkName]; + deletedColumns = SingleStoreSquasher.unsquashPK(deletedPkColumns).columns; + } + + // Don't need to sort, but need to add tests for it + // addedColumns.sort(); + // deletedColumns.sort(); + const doPerformDeleteAndCreate = JSON.stringify(addedColumns) !== JSON.stringify(deletedColumns); + + let addedCompositePKs: JsonCreateCompositePK[] = []; + let deletedCompositePKs: JsonDeleteCompositePK[] = []; + let alteredCompositePKs: JsonAlterCompositePK[] = []; + + addedCompositePKs = prepareAddCompositePrimaryKeySingleStore( + it.name, + it.addedCompositePKs, + prevFull, + curFull, + ); + deletedCompositePKs = prepareDeleteCompositePrimaryKeySingleStore( + it.name, + it.deletedCompositePKs, + prevFull, + ); + // } + alteredCompositePKs = prepareAlterCompositePrimaryKeySingleStore( + it.name, + it.alteredCompositePKs, + prevFull, + curFull, + ); + + // add logic for unique constraints + let addedUniqueConstraints: JsonCreateUniqueConstraint[] = []; + let deletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; + let alteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; + + addedUniqueConstraints = prepareAddUniqueConstraint( + it.name, + it.schema, + it.addedUniqueConstraints, + ); + deletedUniqueConstraints = prepareDeleteUniqueConstraint( + it.name, + it.schema, + it.deletedUniqueConstraints, + ); + if (it.alteredUniqueConstraints) { + const added: Record = {}; + const deleted: Record = {}; + for (const k of Object.keys(it.alteredUniqueConstraints)) { + added[k] = it.alteredUniqueConstraints[k].__new; + deleted[k] = it.alteredUniqueConstraints[k].__old; + } + addedUniqueConstraints.push( + ...prepareAddUniqueConstraint(it.name, it.schema, added), + ); + deletedUniqueConstraints.push( + ...prepareDeleteUniqueConstraint(it.name, it.schema, deleted), + ); + } + + jsonAddedCompositePKs.push(...addedCompositePKs); + jsonDeletedCompositePKs.push(...deletedCompositePKs); + jsonAlteredCompositePKs.push(...alteredCompositePKs); + + jsonAddedUniqueConstraints.push(...addedUniqueConstraints); + jsonDeletedUniqueConstraints.push(...deletedUniqueConstraints); + jsonAlteredUniqueConstraints.push(...alteredUniqueConstraints); + }); + + const rColumns = jsonRenameColumnsStatements.map((it) => { + const tableName = it.tableName; + const schema = it.schema; + return { + from: { schema, table: tableName, column: it.oldColumnName }, + to: { schema, table: tableName, column: it.newColumnName }, + }; + }); + + const jsonTableAlternations = alteredTables + .map((it) => { + return prepareAlterColumnsSingleStore( + it.name, + it.schema, + it.altered, + json1, + json2, + action, + ); + }) + .flat(); + + const jsonCreateIndexesForAllAlteredTables = alteredTables + .map((it) => { + return prepareCreateIndexesJson( + it.name, + it.schema, + it.addedIndexes || {}, + curFull.internal, + ); + }) + .flat(); + + const jsonDropIndexesForAllAlteredTables = alteredTables + .map((it) => { + return prepareDropIndexesJson( + it.name, + it.schema, + it.deletedIndexes || {}, + ); + }) + .flat(); + + alteredTables.forEach((it) => { + const droppedIndexes = Object.keys(it.alteredIndexes).reduce( + (current, item: string) => { + current[item] = it.alteredIndexes[item].__old; + return current; + }, + {} as Record, + ); + const createdIndexes = Object.keys(it.alteredIndexes).reduce( + (current, item: string) => { + current[item] = it.alteredIndexes[item].__new; + return current; + }, + {} as Record, + ); + + jsonCreateIndexesForAllAlteredTables.push( + ...prepareCreateIndexesJson(it.name, it.schema, createdIndexes || {}), + ); + jsonDropIndexesForAllAlteredTables.push( + ...prepareDropIndexesJson(it.name, it.schema, droppedIndexes || {}), + ); + }); + + const jsonSingleStoreCreateTables = createdTables.map((it) => { + return prepareSingleStoreCreateTableJson( + it, + curFull as SingleStoreSchema, + curFull.internal, + ); + }); + jsonStatements.push(...jsonSingleStoreCreateTables); + + jsonStatements.push(...jsonDropTables); + jsonStatements.push(...jsonRenameTables); + jsonStatements.push(...jsonRenameColumnsStatements); + + jsonStatements.push(...jsonDeletedUniqueConstraints); + + // Will need to drop indexes before changing any columns in table + // Then should go column alternations and then index creation + jsonStatements.push(...jsonDropIndexesForAllAlteredTables); + + jsonStatements.push(...jsonDeletedCompositePKs); + jsonStatements.push(...jsonTableAlternations); + jsonStatements.push(...jsonAddedCompositePKs); + + jsonStatements.push(...jsonAddedUniqueConstraints); + jsonStatements.push(...jsonDeletedUniqueConstraints); + + jsonStatements.push(...jsonAddColumnsStatemets); + + jsonStatements.push(...jsonCreateIndexesForCreatedTables); + + jsonStatements.push(...jsonCreateIndexesForAllAlteredTables); + + jsonStatements.push(...jsonDropColumnsStatemets); + + // jsonStatements.push(...jsonDeletedCompositePKs); + // jsonStatements.push(...jsonAddedCompositePKs); + jsonStatements.push(...jsonAlteredCompositePKs); + + jsonStatements.push(...jsonAddedUniqueConstraints); + + jsonStatements.push(...jsonAlteredUniqueConstraints); + + const sqlStatements = fromJson(jsonStatements, 'singlestore'); + + const uniqueSqlStatements: string[] = []; + sqlStatements.forEach((ss) => { + if (!uniqueSqlStatements.includes(ss)) { + uniqueSqlStatements.push(ss); + } + }); + + const rTables = renamedTables.map((it) => { + return { from: it.from, to: it.to }; + }); + + const _meta = prepareMigrationMeta([], rTables, rColumns); + + return { + statements: jsonStatements, + sqlStatements: uniqueSqlStatements, + _meta, + }; +}; + +export const applySqliteSnapshotsDiff = async ( + json1: SQLiteSchemaSquashed, + json2: SQLiteSchemaSquashed, + tablesResolver: ( + input: ResolverInput
, + ) => Promise>, + columnsResolver: ( + input: ColumnsResolverInput, + ) => Promise>, + prevFull: SQLiteSchema, + curFull: SQLiteSchema, + action?: 'push' | undefined, +): Promise<{ + statements: JsonStatement[]; + sqlStatements: string[]; + _meta: + | { + schemas: {}; + tables: {}; + columns: {}; + } + | undefined; +}> => { + const tablesDiff = diffSchemasOrTables(json1.tables, json2.tables); + + const { + created: createdTables, + deleted: deletedTables, + renamed: renamedTables, + } = await tablesResolver({ + created: tablesDiff.added, + deleted: tablesDiff.deleted, + }); + + const tablesPatchedSnap1 = copy(json1); + tablesPatchedSnap1.tables = mapEntries(tablesPatchedSnap1.tables, (_, it) => { + const { name } = nameChangeFor(it, renamedTables); + it.name = name; + return [name, it]; + }); + + const res = diffColumns(tablesPatchedSnap1.tables, json2.tables); + + const columnRenames = [] as { + table: string; + renames: { from: Column; to: Column }[]; + }[]; + + const columnCreates = [] as { + table: string; + columns: Column[]; + }[]; + + const columnDeletes = [] as { + table: string; + columns: Column[]; + }[]; + + for (let entry of Object.values(res)) { + const { renamed, created, deleted } = await columnsResolver({ + tableName: entry.name, + schema: entry.schema, + deleted: entry.columns.deleted, + created: entry.columns.added, + }); + + if (created.length > 0) { + columnCreates.push({ + table: entry.name, + columns: created, + }); + } + + if (deleted.length > 0) { + columnDeletes.push({ + table: entry.name, + columns: deleted, + }); + } + + if (renamed.length > 0) { + columnRenames.push({ + table: entry.name, + renames: renamed, + }); + } + } + + const columnRenamesDict = columnRenames.reduce( + (acc, it) => { + acc[it.table] = it.renames; + return acc; + }, + {} as Record< + string, + { + from: Named; + to: Named; + }[] + >, + ); + + const columnsPatchedSnap1 = copy(tablesPatchedSnap1); + columnsPatchedSnap1.tables = mapEntries( + columnsPatchedSnap1.tables, + (tableKey, tableValue) => { + const patchedColumns = mapKeys( + tableValue.columns, + (columnKey, column) => { + const rens = columnRenamesDict[tableValue.name] || []; + const newName = columnChangeFor(columnKey, rens); + column.name = newName; + return newName; + }, + ); + + tableValue.columns = patchedColumns; + return [tableKey, tableValue]; + }, + ); + + const diffResult = applyJsonDiff(columnsPatchedSnap1, json2); + + const typedResult = diffResultSchemeSQLite.parse(diffResult); + + // Map array of objects to map + const tablesMap: { + [key: string]: (typeof typedResult.alteredTablesWithColumns)[number]; + } = {}; + + typedResult.alteredTablesWithColumns.forEach((obj) => { + tablesMap[obj.name] = obj; + }); + + const jsonCreateTables = createdTables.map((it) => { + return prepareSQLiteCreateTable(it, action); + }); + + const jsonCreateIndexesForCreatedTables = createdTables + .map((it) => { + return prepareCreateIndexesJson( + it.name, + it.schema, + it.indexes, + curFull.internal, + ); + }) + .flat(); + + const jsonDropTables = deletedTables.map((it) => { + return prepareDropTableJson(it); + }); + + const jsonRenameTables = renamedTables.map((it) => { + return prepareRenameTableJson(it.from, it.to); + }); + + const jsonRenameColumnsStatements: JsonRenameColumnStatement[] = columnRenames + .map((it) => prepareRenameColumns(it.table, '', it.renames)) + .flat(); + + const jsonDropColumnsStatemets: JsonDropColumnStatement[] = columnDeletes + .map((it) => _prepareDropColumns(it.table, '', it.columns)) + .flat(); + + const jsonAddColumnsStatemets: JsonSqliteAddColumnStatement[] = columnCreates + .map((it) => { + return _prepareSqliteAddColumns( + it.table, + it.columns, + tablesMap[it.table] && tablesMap[it.table].addedForeignKeys + ? Object.values(tablesMap[it.table].addedForeignKeys) + : [], + ); + }) + .flat(); + + const allAltered = typedResult.alteredTablesWithColumns; + + const jsonAddedCompositePKs: JsonCreateCompositePK[] = []; + const jsonDeletedCompositePKs: JsonDeleteCompositePK[] = []; + const jsonAlteredCompositePKs: JsonAlterCompositePK[] = []; + + const jsonAddedUniqueConstraints: JsonCreateUniqueConstraint[] = []; + const jsonDeletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; + const jsonAlteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; + + allAltered.forEach((it) => { + // This part is needed to make sure that same columns in a table are not triggered for change + // there is a case where orm and kit are responsible for pk name generation and one of them is not sorting name + // We double-check that pk with same set of columns are both in added and deleted diffs + let addedColumns: string[] = []; + for (const addedPkName of Object.keys(it.addedCompositePKs)) { + const addedPkColumns = it.addedCompositePKs[addedPkName]; + addedColumns = SQLiteSquasher.unsquashPK(addedPkColumns); + } + + let deletedColumns: string[] = []; + for (const deletedPkName of Object.keys(it.deletedCompositePKs)) { + const deletedPkColumns = it.deletedCompositePKs[deletedPkName]; + deletedColumns = SQLiteSquasher.unsquashPK(deletedPkColumns); + } + + // Don't need to sort, but need to add tests for it + // addedColumns.sort(); + // deletedColumns.sort(); + + const doPerformDeleteAndCreate = JSON.stringify(addedColumns) !== JSON.stringify(deletedColumns); + + let addedCompositePKs: JsonCreateCompositePK[] = []; + let deletedCompositePKs: JsonDeleteCompositePK[] = []; + let alteredCompositePKs: JsonAlterCompositePK[] = []; + if (doPerformDeleteAndCreate) { + addedCompositePKs = prepareAddCompositePrimaryKeySqlite( + it.name, + it.addedCompositePKs, + ); + deletedCompositePKs = prepareDeleteCompositePrimaryKeySqlite( + it.name, + it.deletedCompositePKs, + ); + } + alteredCompositePKs = prepareAlterCompositePrimaryKeySqlite( + it.name, + it.alteredCompositePKs, + ); + + // add logic for unique constraints + let addedUniqueConstraints: JsonCreateUniqueConstraint[] = []; + let deletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; + let alteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; + + addedUniqueConstraints = prepareAddUniqueConstraint( + it.name, + it.schema, + it.addedUniqueConstraints, + ); + deletedUniqueConstraints = prepareDeleteUniqueConstraint( + it.name, + it.schema, + it.deletedUniqueConstraints, + ); + if (it.alteredUniqueConstraints) { + const added: Record = {}; + const deleted: Record = {}; + for (const k of Object.keys(it.alteredUniqueConstraints)) { + added[k] = it.alteredUniqueConstraints[k].__new; + deleted[k] = it.alteredUniqueConstraints[k].__old; + } + addedUniqueConstraints.push( + ...prepareAddUniqueConstraint(it.name, it.schema, added), + ); + deletedUniqueConstraints.push( + ...prepareDeleteUniqueConstraint(it.name, it.schema, deleted), + ); + } + + jsonAddedCompositePKs.push(...addedCompositePKs); + jsonDeletedCompositePKs.push(...deletedCompositePKs); + jsonAlteredCompositePKs.push(...alteredCompositePKs); + + jsonAddedUniqueConstraints.push(...addedUniqueConstraints); + jsonDeletedUniqueConstraints.push(...deletedUniqueConstraints); + jsonAlteredUniqueConstraints.push(...alteredUniqueConstraints); + }); + + const rColumns = jsonRenameColumnsStatements.map((it) => { + const tableName = it.tableName; + const schema = it.schema; + return { + from: { schema, table: tableName, column: it.oldColumnName }, + to: { schema, table: tableName, column: it.newColumnName }, + }; + }); + + const jsonTableAlternations = allAltered + .map((it) => { + return prepareSqliteAlterColumns(it.name, it.schema, it.altered, json2); + }) + .flat(); + + const jsonCreateIndexesForAllAlteredTables = allAltered + .map((it) => { + return prepareCreateIndexesJson( + it.name, + it.schema, + it.addedIndexes || {}, + curFull.internal, + ); + }) + .flat(); + + const jsonDropIndexesForAllAlteredTables = allAltered + .map((it) => { + return prepareDropIndexesJson( + it.name, + it.schema, + it.deletedIndexes || {}, + ); + }) + .flat(); + + allAltered.forEach((it) => { + const droppedIndexes = Object.keys(it.alteredIndexes).reduce( + (current, item: string) => { + current[item] = it.alteredIndexes[item].__old; + return current; + }, + {} as Record, + ); + const createdIndexes = Object.keys(it.alteredIndexes).reduce( + (current, item: string) => { + current[item] = it.alteredIndexes[item].__new; + return current; + }, + {} as Record, + ); + + jsonCreateIndexesForAllAlteredTables.push( + ...prepareCreateIndexesJson( + it.name, + it.schema, + createdIndexes || {}, + curFull.internal, + ), + ); + jsonDropIndexesForAllAlteredTables.push( + ...prepareDropIndexesJson(it.name, it.schema, droppedIndexes || {}), + ); + }); + + const jsonReferencesForAllAlteredTables: JsonReferenceStatement[] = allAltered + .map((it) => { + const forAdded = prepareCreateReferencesJson( + it.name, + it.schema, + it.addedForeignKeys, + ); + + const forAltered = prepareDropReferencesJson( + it.name, + it.schema, + it.deletedForeignKeys, + ); + + const alteredFKs = prepareAlterReferencesJson( + it.name, + it.schema, + it.alteredForeignKeys, + ); + + return [...forAdded, ...forAltered, ...alteredFKs]; + }) + .flat(); + + const jsonCreatedReferencesForAlteredTables = jsonReferencesForAllAlteredTables.filter( + (t) => t.type === 'create_reference', + ); + const jsonDroppedReferencesForAlteredTables = jsonReferencesForAllAlteredTables.filter( + (t) => t.type === 'delete_reference', + ); + + const jsonStatements: JsonStatement[] = []; + jsonStatements.push(...jsonCreateTables); + + jsonStatements.push(...jsonDropTables); + jsonStatements.push(...jsonRenameTables); + jsonStatements.push(...jsonRenameColumnsStatements); + + jsonStatements.push(...jsonDroppedReferencesForAlteredTables); + + // Will need to drop indexes before changing any columns in table + // Then should go column alternations and then index creation + jsonStatements.push(...jsonDropIndexesForAllAlteredTables); + + jsonStatements.push(...jsonDeletedCompositePKs); + jsonStatements.push(...jsonTableAlternations); + jsonStatements.push(...jsonAddedCompositePKs); + jsonStatements.push(...jsonAddColumnsStatemets); + + jsonStatements.push(...jsonCreateIndexesForCreatedTables); + jsonStatements.push(...jsonCreateIndexesForAllAlteredTables); + + jsonStatements.push(...jsonCreatedReferencesForAlteredTables); + + jsonStatements.push(...jsonDropColumnsStatemets); + + // jsonStatements.push(...jsonDeletedCompositePKs); + // jsonStatements.push(...jsonAddedCompositePKs); + jsonStatements.push(...jsonAlteredCompositePKs); + + jsonStatements.push(...jsonAlteredUniqueConstraints); + + const sqlStatements = fromJson(jsonStatements, 'sqlite'); + + const uniqueSqlStatements: string[] = []; + sqlStatements.forEach((ss) => { + if (!uniqueSqlStatements.includes(ss)) { + uniqueSqlStatements.push(ss); + } + }); + + const rTables = renamedTables.map((it) => { + return { from: it.from, to: it.to }; + }); + + const _meta = prepareMigrationMeta([], rTables, rColumns); + + return { + statements: jsonStatements, + sqlStatements: uniqueSqlStatements, + _meta, + }; +}; + +// explicitely ask if tables were renamed, if yes - add those to altered tables, otherwise - deleted +// double check if user wants to delete particular table and warn him on data loss diff --git a/drizzle-kit/src/sqlgenerator.ts b/drizzle-kit/src/sqlgenerator.ts new file mode 100644 index 000000000..07b24b6c9 --- /dev/null +++ b/drizzle-kit/src/sqlgenerator.ts @@ -0,0 +1,3284 @@ +import { BREAKPOINT } from './cli/commands/migrate'; +import { + JsonAddColumnStatement, + JsonAddValueToEnumStatement, + JsonAlterColumnAlterGeneratedStatement, + JsonAlterColumnAlterIdentityStatement, + JsonAlterColumnDropAutoincrementStatement, + JsonAlterColumnDropDefaultStatement, + JsonAlterColumnDropGeneratedStatement, + JsonAlterColumnDropIdentityStatement, + JsonAlterColumnDropNotNullStatement, + JsonAlterColumnDropOnUpdateStatement, + JsonAlterColumnDropPrimaryKeyStatement, + JsonAlterColumnSetAutoincrementStatement, + JsonAlterColumnSetDefaultStatement, + JsonAlterColumnSetGeneratedStatement, + JsonAlterColumnSetIdentityStatement, + JsonAlterColumnSetNotNullStatement, + JsonAlterColumnSetOnUpdateStatement, + JsonAlterColumnSetPrimaryKeyStatement, + JsonAlterColumnTypeStatement, + JsonAlterCompositePK, + JsonAlterReferenceStatement, + JsonAlterSequenceStatement, + JsonAlterTableRemoveFromSchema, + JsonAlterTableSetNewSchema, + JsonAlterTableSetSchema, + JsonCreateCompositePK, + JsonCreateEnumStatement, + JsonCreateIndexStatement, + JsonCreateReferenceStatement, + JsonCreateSchema, + JsonCreateSequenceStatement, + JsonCreateTableStatement, + JsonCreateUniqueConstraint, + JsonDeleteCompositePK, + JsonDeleteReferenceStatement, + JsonDeleteUniqueConstraint, + JsonDropColumnStatement, + JsonDropIndexStatement, + JsonDropSequenceStatement, + JsonDropTableStatement, + JsonMoveSequenceStatement, + JsonPgCreateIndexStatement, + JsonRenameColumnStatement, + JsonRenameSchema, + JsonRenameSequenceStatement, + JsonRenameTableStatement, + JsonSqliteAddColumnStatement, + JsonSqliteCreateTableStatement, + JsonStatement, +} from './jsonStatements'; +import { Dialect } from './schemaValidator'; +import { MySqlSquasher } from './serializer/mysqlSchema'; +import { PgSquasher } from './serializer/pgSchema'; +import { SingleStoreSquasher } from './serializer/singlestoreSchema'; +import { SQLiteSquasher } from './serializer/sqliteSchema'; + +export const pgNativeTypes = new Set([ + 'uuid', + 'smallint', + 'integer', + 'bigint', + 'boolean', + 'text', + 'varchar', + 'serial', + 'bigserial', + 'decimal', + 'numeric', + 'real', + 'json', + 'jsonb', + 'time', + 'time with time zone', + 'time without time zone', + 'time', + 'timestamp', + 'timestamp with time zone', + 'timestamp without time zone', + 'date', + 'interval', + 'bigint', + 'bigserial', + 'double precision', + 'interval year', + 'interval month', + 'interval day', + 'interval hour', + 'interval minute', + 'interval second', + 'interval year to month', + 'interval day to hour', + 'interval day to minute', + 'interval day to second', + 'interval hour to minute', + 'interval hour to second', + 'interval minute to second', +]); + +const isPgNativeType = (it: string) => { + if (pgNativeTypes.has(it)) return true; + const toCheck = it.replace(/ /g, ''); + return ( + toCheck.startsWith('varchar(') + || toCheck.startsWith('char(') + || toCheck.startsWith('numeric(') + || toCheck.startsWith('timestamp(') + || toCheck.startsWith('doubleprecision[') + || toCheck.startsWith('intervalyear(') + || toCheck.startsWith('intervalmonth(') + || toCheck.startsWith('intervalday(') + || toCheck.startsWith('intervalhour(') + || toCheck.startsWith('intervalminute(') + || toCheck.startsWith('intervalsecond(') + || toCheck.startsWith('intervalyeartomonth(') + || toCheck.startsWith('intervaldaytohour(') + || toCheck.startsWith('intervaldaytominute(') + || toCheck.startsWith('intervaldaytosecond(') + || toCheck.startsWith('intervalhourtominute(') + || toCheck.startsWith('intervalhourtosecond(') + || toCheck.startsWith('intervalminutetosecond(') + || toCheck.startsWith('vector(') + || toCheck.startsWith('geometry(') + || /^(\w+)(\[\d*])+$/.test(it) + ); +}; + +abstract class Convertor { + abstract can(statement: JsonStatement, dialect: Dialect): boolean; + abstract convert(statement: JsonStatement): string | string[]; +} + +class PgCreateTableConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_table' && dialect === 'postgresql'; + } + + convert(st: JsonCreateTableStatement) { + const { tableName, schema, columns, compositePKs, uniqueConstraints } = st; + + let statement = ''; + const name = schema ? `"${schema}"."${tableName}"` : `"${tableName}"`; + + statement += `CREATE TABLE IF NOT EXISTS ${name} (\n`; + for (let i = 0; i < columns.length; i++) { + const column = columns[i]; + + const primaryKeyStatement = column.primaryKey ? ' PRIMARY KEY' : ''; + const notNullStatement = column.notNull && !column.identity ? ' NOT NULL' : ''; + const defaultStatement = column.default !== undefined ? ` DEFAULT ${column.default}` : ''; + + const uniqueConstraint = column.isUnique + ? ` CONSTRAINT "${column.uniqueName}" UNIQUE${column.nullsNotDistinct ? ' NULLS NOT DISTINCT' : ''}` + : ''; + + const schemaPrefix = column.typeSchema && column.typeSchema !== 'public' + ? `"${column.typeSchema}".` + : ''; + + const type = isPgNativeType(column.type) + ? column.type + : `${schemaPrefix}"${column.type}"`; + const generated = column.generated; + + const generatedStatement = generated ? ` GENERATED ALWAYS AS (${generated?.as}) STORED` : ''; + + const unsquashedIdentity = column.identity + ? PgSquasher.unsquashIdentity(column.identity) + : undefined; + + const identityWithSchema = schema + ? `"${schema}"."${unsquashedIdentity?.name}"` + : `"${unsquashedIdentity?.name}"`; + + const identity = unsquashedIdentity + ? ` GENERATED ${ + unsquashedIdentity.type === 'always' ? 'ALWAYS' : 'BY DEFAULT' + } AS IDENTITY (sequence name ${identityWithSchema}${ + unsquashedIdentity.increment + ? ` INCREMENT BY ${unsquashedIdentity.increment}` + : '' + }${ + unsquashedIdentity.minValue + ? ` MINVALUE ${unsquashedIdentity.minValue}` + : '' + }${ + unsquashedIdentity.maxValue + ? ` MAXVALUE ${unsquashedIdentity.maxValue}` + : '' + }${ + unsquashedIdentity.startWith + ? ` START WITH ${unsquashedIdentity.startWith}` + : '' + }${unsquashedIdentity.cache ? ` CACHE ${unsquashedIdentity.cache}` : ''}${ + unsquashedIdentity.cycle ? ` CYCLE` : '' + })` + : ''; + + statement += '\t' + + `"${column.name}" ${type}${primaryKeyStatement}${defaultStatement}${generatedStatement}${notNullStatement}${uniqueConstraint}${identity}`; + statement += i === columns.length - 1 ? '' : ',\n'; + } + + if (typeof compositePKs !== 'undefined' && compositePKs.length > 0) { + statement += ',\n'; + const compositePK = PgSquasher.unsquashPK(compositePKs[0]); + statement += `\tCONSTRAINT "${st.compositePkName}" PRIMARY KEY(\"${compositePK.columns.join(`","`)}\")`; + // statement += `\n`; + } + + if ( + typeof uniqueConstraints !== 'undefined' + && uniqueConstraints.length > 0 + ) { + for (const uniqueConstraint of uniqueConstraints) { + statement += ',\n'; + const unsquashedUnique = PgSquasher.unsquashUnique(uniqueConstraint); + statement += `\tCONSTRAINT "${unsquashedUnique.name}" UNIQUE${ + unsquashedUnique.nullsNotDistinct ? ' NULLS NOT DISTINCT' : '' + }(\"${unsquashedUnique.columns.join(`","`)}\")`; + // statement += `\n`; + } + } + statement += `\n);`; + statement += `\n`; + + return statement; + } +} + +class MySqlCreateTableConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_table' && dialect === 'mysql'; + } + + convert(st: JsonCreateTableStatement) { + const { + tableName, + columns, + schema, + compositePKs, + uniqueConstraints, + internals, + } = st; + + let statement = ''; + statement += `CREATE TABLE \`${tableName}\` (\n`; + for (let i = 0; i < columns.length; i++) { + const column = columns[i]; + + const primaryKeyStatement = column.primaryKey ? ' PRIMARY KEY' : ''; + const notNullStatement = column.notNull ? ' NOT NULL' : ''; + const defaultStatement = column.default !== undefined ? ` DEFAULT ${column.default}` : ''; + + const onUpdateStatement = column.onUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + + const autoincrementStatement = column.autoincrement + ? ' AUTO_INCREMENT' + : ''; + + const generatedStatement = column.generated + ? ` GENERATED ALWAYS AS (${column.generated?.as}) ${column.generated?.type.toUpperCase()}` + : ''; + + statement += '\t' + + `\`${column.name}\` ${column.type}${autoincrementStatement}${primaryKeyStatement}${generatedStatement}${notNullStatement}${defaultStatement}${onUpdateStatement}`; + statement += i === columns.length - 1 ? '' : ',\n'; + } + + if (typeof compositePKs !== 'undefined' && compositePKs.length > 0) { + statement += ',\n'; + const compositePK = MySqlSquasher.unsquashPK(compositePKs[0]); + statement += `\tCONSTRAINT \`${st.compositePkName}\` PRIMARY KEY(\`${compositePK.columns.join(`\`,\``)}\`)`; + } + + if ( + typeof uniqueConstraints !== 'undefined' + && uniqueConstraints.length > 0 + ) { + for (const uniqueConstraint of uniqueConstraints) { + statement += ',\n'; + const unsquashedUnique = MySqlSquasher.unsquashUnique(uniqueConstraint); + + const uniqueString = unsquashedUnique.columns + .map((it) => { + return internals?.indexes + ? internals?.indexes[unsquashedUnique.name]?.columns[it] + ?.isExpression + ? it + : `\`${it}\`` + : `\`${it}\``; + }) + .join(','); + + statement += `\tCONSTRAINT \`${unsquashedUnique.name}\` UNIQUE(${uniqueString})`; + } + } + + statement += `\n);`; + statement += `\n`; + return statement; + } +} +class SingleStoreCreateTableConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_table' && dialect === 'singlestore'; + } + + convert(st: JsonCreateTableStatement) { + const { + tableName, + columns, + schema, + compositePKs, + uniqueConstraints, + internals, + } = st; + + let statement = ''; + statement += `CREATE TABLE \`${tableName}\` (\n`; + for (let i = 0; i < columns.length; i++) { + const column = columns[i]; + + const primaryKeyStatement = column.primaryKey ? ' PRIMARY KEY' : ''; + const notNullStatement = column.notNull ? ' NOT NULL' : ''; + const defaultStatement = column.default !== undefined ? ` DEFAULT ${column.default}` : ''; + + const onUpdateStatement = column.onUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + + const autoincrementStatement = column.autoincrement + ? ' AUTO_INCREMENT' + : ''; + + const generatedStatement = column.generated + ? ` GENERATED ALWAYS AS (${column.generated?.as}) ${column.generated?.type.toUpperCase()}` + : ''; + + statement += '\t' + + `\`${column.name}\` ${column.type}${autoincrementStatement}${primaryKeyStatement}${notNullStatement}${defaultStatement}${onUpdateStatement}${generatedStatement}`; + statement += i === columns.length - 1 ? '' : ',\n'; + } + + if (typeof compositePKs !== 'undefined' && compositePKs.length > 0) { + statement += ',\n'; + const compositePK = SingleStoreSquasher.unsquashPK(compositePKs[0]); + statement += `\tCONSTRAINT \`${st.compositePkName}\` PRIMARY KEY(\`${compositePK.columns.join(`\`,\``)}\`)`; + } + + if ( + typeof uniqueConstraints !== 'undefined' + && uniqueConstraints.length > 0 + ) { + for (const uniqueConstraint of uniqueConstraints) { + statement += ',\n'; + const unsquashedUnique = SingleStoreSquasher.unsquashUnique(uniqueConstraint); + + const uniqueString = unsquashedUnique.columns + .map((it) => { + return internals?.indexes + ? internals?.indexes[unsquashedUnique.name]?.columns[it] + ?.isExpression + ? it + : `\`${it}\`` + : `\`${it}\``; + }) + .join(','); + + statement += `\tCONSTRAINT \`${unsquashedUnique.name}\` UNIQUE(${uniqueString})`; + } + } + + statement += `\n);`; + statement += `\n`; + return statement; + } +} + +export class SQLiteCreateTableConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'sqlite_create_table' && dialect === 'sqlite'; + } + + convert(st: JsonSqliteCreateTableStatement) { + const { + tableName, + columns, + referenceData, + compositePKs, + uniqueConstraints, + } = st; + + let statement = ''; + statement += `CREATE TABLE \`${tableName}\` (\n`; + for (let i = 0; i < columns.length; i++) { + const column = columns[i]; + + const primaryKeyStatement = column.primaryKey ? ' PRIMARY KEY' : ''; + const notNullStatement = column.notNull ? ' NOT NULL' : ''; + const defaultStatement = column.default !== undefined ? ` DEFAULT ${column.default}` : ''; + + const autoincrementStatement = column.autoincrement + ? ' AUTOINCREMENT' + : ''; + + const generatedStatement = column.generated + ? ` GENERATED ALWAYS AS ${column.generated.as} ${column.generated.type.toUpperCase()}` + : ''; + + statement += '\t'; + statement += + `\`${column.name}\` ${column.type}${primaryKeyStatement}${autoincrementStatement}${defaultStatement}${generatedStatement}${notNullStatement}`; + + statement += i === columns.length - 1 ? '' : ',\n'; + } + + compositePKs.forEach((it) => { + statement += ',\n\t'; + statement += `PRIMARY KEY(${it.map((it) => `\`${it}\``).join(', ')})`; + }); + + for (let i = 0; i < referenceData.length; i++) { + const { + name, + tableFrom, + tableTo, + columnsFrom, + columnsTo, + onDelete, + onUpdate, + } = referenceData[i]; + + const onDeleteStatement = onDelete ? ` ON DELETE ${onDelete}` : ''; + const onUpdateStatement = onUpdate ? ` ON UPDATE ${onUpdate}` : ''; + const fromColumnsString = columnsFrom.map((it) => `\`${it}\``).join(','); + const toColumnsString = columnsTo.map((it) => `\`${it}\``).join(','); + + statement += ','; + statement += '\n\t'; + statement += + `FOREIGN KEY (${fromColumnsString}) REFERENCES \`${tableTo}\`(${toColumnsString})${onUpdateStatement}${onDeleteStatement}`; + } + + if ( + typeof uniqueConstraints !== 'undefined' + && uniqueConstraints.length > 0 + ) { + for (const uniqueConstraint of uniqueConstraints) { + statement += ',\n'; + const unsquashedUnique = MySqlSquasher.unsquashUnique(uniqueConstraint); + statement += `\tCONSTRAINT ${unsquashedUnique.name} UNIQUE(\`${unsquashedUnique.columns.join(`\`,\``)}\`)`; + } + } + + statement += `\n`; + statement += `);`; + statement += `\n`; + return statement; + } +} + +class PgAlterTableAlterColumnSetGenerated extends Convertor { + override can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_set_identity' + && dialect === 'postgresql' + ); + } + override convert( + statement: JsonAlterColumnSetIdentityStatement, + ): string | string[] { + const { identity, tableName, columnName, schema } = statement; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + const unsquashedIdentity = PgSquasher.unsquashIdentity(identity); + + const identityWithSchema = schema + ? `"${schema}"."${unsquashedIdentity?.name}"` + : `"${unsquashedIdentity?.name}"`; + + const identityStatement = unsquashedIdentity + ? ` GENERATED ${ + unsquashedIdentity.type === 'always' ? 'ALWAYS' : 'BY DEFAULT' + } AS IDENTITY (sequence name ${identityWithSchema}${ + unsquashedIdentity.increment + ? ` INCREMENT BY ${unsquashedIdentity.increment}` + : '' + }${ + unsquashedIdentity.minValue + ? ` MINVALUE ${unsquashedIdentity.minValue}` + : '' + }${ + unsquashedIdentity.maxValue + ? ` MAXVALUE ${unsquashedIdentity.maxValue}` + : '' + }${ + unsquashedIdentity.startWith + ? ` START WITH ${unsquashedIdentity.startWith}` + : '' + }${unsquashedIdentity.cache ? ` CACHE ${unsquashedIdentity.cache}` : ''}${ + unsquashedIdentity.cycle ? ` CYCLE` : '' + })` + : ''; + + return `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" ADD${identityStatement};`; + } +} + +class PgAlterTableAlterColumnDropGenerated extends Convertor { + override can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_drop_identity' + && dialect === 'postgresql' + ); + } + override convert( + statement: JsonAlterColumnDropIdentityStatement, + ): string | string[] { + const { tableName, columnName, schema } = statement; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" DROP IDENTITY;`; + } +} + +class PgAlterTableAlterColumnAlterGenerated extends Convertor { + override can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_change_identity' + && dialect === 'postgresql' + ); + } + override convert( + statement: JsonAlterColumnAlterIdentityStatement, + ): string | string[] { + const { identity, oldIdentity, tableName, columnName, schema } = statement; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + const unsquashedIdentity = PgSquasher.unsquashIdentity(identity); + const unsquashedOldIdentity = PgSquasher.unsquashIdentity(oldIdentity); + + const statementsToReturn: string[] = []; + + if (unsquashedOldIdentity.type !== unsquashedIdentity.type) { + statementsToReturn.push( + `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET GENERATED ${ + unsquashedIdentity.type === 'always' ? 'ALWAYS' : 'BY DEFAULT' + };`, + ); + } + + if (unsquashedOldIdentity.minValue !== unsquashedIdentity.minValue) { + statementsToReturn.push( + `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET MINVALUE ${unsquashedIdentity.minValue};`, + ); + } + + if (unsquashedOldIdentity.maxValue !== unsquashedIdentity.maxValue) { + statementsToReturn.push( + `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET MAXVALUE ${unsquashedIdentity.maxValue};`, + ); + } + + if (unsquashedOldIdentity.increment !== unsquashedIdentity.increment) { + statementsToReturn.push( + `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET INCREMENT BY ${unsquashedIdentity.increment};`, + ); + } + + if (unsquashedOldIdentity.startWith !== unsquashedIdentity.startWith) { + statementsToReturn.push( + `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET START WITH ${unsquashedIdentity.startWith};`, + ); + } + + if (unsquashedOldIdentity.cache !== unsquashedIdentity.cache) { + statementsToReturn.push( + `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET CACHE ${unsquashedIdentity.cache};`, + ); + } + + if (unsquashedOldIdentity.cycle !== unsquashedIdentity.cycle) { + statementsToReturn.push( + `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET ${ + unsquashedIdentity.cycle ? `CYCLE` : 'NO CYCLE' + };`, + ); + } + + return statementsToReturn; + } +} + +class PgAlterTableAddUniqueConstraintConvertor extends Convertor { + can(statement: JsonCreateUniqueConstraint, dialect: Dialect): boolean { + return ( + statement.type === 'create_unique_constraint' && dialect === 'postgresql' + ); + } + convert(statement: JsonCreateUniqueConstraint): string { + const unsquashed = PgSquasher.unsquashUnique(statement.data); + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT "${unsquashed.name}" UNIQUE${ + unsquashed.nullsNotDistinct ? ' NULLS NOT DISTINCT' : '' + }("${unsquashed.columns.join('","')}");`; + } +} + +class PgAlterTableDropUniqueConstraintConvertor extends Convertor { + can(statement: JsonDeleteUniqueConstraint, dialect: Dialect): boolean { + return ( + statement.type === 'delete_unique_constraint' && dialect === 'postgresql' + ); + } + convert(statement: JsonDeleteUniqueConstraint): string { + const unsquashed = PgSquasher.unsquashUnique(statement.data); + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${unsquashed.name}";`; + } +} + +class MySQLAlterTableAddUniqueConstraintConvertor extends Convertor { + can(statement: JsonCreateUniqueConstraint, dialect: Dialect): boolean { + return statement.type === 'create_unique_constraint' && dialect === 'mysql'; + } + convert(statement: JsonCreateUniqueConstraint): string { + const unsquashed = MySqlSquasher.unsquashUnique(statement.data); + + return `ALTER TABLE \`${statement.tableName}\` ADD CONSTRAINT \`${unsquashed.name}\` UNIQUE(\`${ + unsquashed.columns.join('`,`') + }\`);`; + } +} + +class MySQLAlterTableDropUniqueConstraintConvertor extends Convertor { + can(statement: JsonDeleteUniqueConstraint, dialect: Dialect): boolean { + return statement.type === 'delete_unique_constraint' && dialect === 'mysql'; + } + convert(statement: JsonDeleteUniqueConstraint): string { + const unsquashed = MySqlSquasher.unsquashUnique(statement.data); + + return `ALTER TABLE \`${statement.tableName}\` DROP INDEX \`${unsquashed.name}\`;`; + } +} + +class SingleStoreAlterTableAddUniqueConstraintConvertor extends Convertor { + can(statement: JsonCreateUniqueConstraint, dialect: Dialect): boolean { + return statement.type === 'create_unique_constraint' && dialect === 'singlestore'; + } + convert(statement: JsonCreateUniqueConstraint): string { + const unsquashed = SingleStoreSquasher.unsquashUnique(statement.data); + + return `ALTER TABLE \`${statement.tableName}\` ADD CONSTRAINT \`${unsquashed.name}\` UNIQUE(\`${ + unsquashed.columns.join('`,`') + }\`);`; + } +} +class SingleStoreAlterTableDropUniqueConstraintConvertor extends Convertor { + can(statement: JsonDeleteUniqueConstraint, dialect: Dialect): boolean { + return statement.type === 'delete_unique_constraint' && dialect === 'singlestore'; + } + convert(statement: JsonDeleteUniqueConstraint): string { + const unsquashed = SingleStoreSquasher.unsquashUnique(statement.data); + + return `ALTER TABLE \`${statement.tableName}\` DROP INDEX \`${unsquashed.name}\`;`; + } +} + +class SQLiteAlterTableAddUniqueConstraintConvertor extends Convertor { + can(statement: JsonCreateUniqueConstraint, dialect: Dialect): boolean { + return ( + statement.type === 'create_unique_constraint' && dialect === 'sqlite' + ); + } + convert(statement: JsonCreateUniqueConstraint): string { + return ( + '/*\n SQLite does not support "Adding unique constraint to an existing table" out of the box, we do not generate automatic migration for that, so it has to be done manually' + + '\n Please refer to: https://www.techonthenet.com/sqlite/unique.php' + + "\n\n Due to that we don't generate migration automatically and it has to be done manually" + + '\n*/' + ); + } +} + +class SQLiteAlterTableDropUniqueConstraintConvertor extends Convertor { + can(statement: JsonDeleteUniqueConstraint, dialect: Dialect): boolean { + return ( + statement.type === 'delete_unique_constraint' && dialect === 'sqlite' + ); + } + convert(statement: JsonDeleteUniqueConstraint): string { + return ( + '/*\n SQLite does not support "Dropping unique constraint from an existing table" out of the box, we do not generate automatic migration for that, so it has to be done manually' + + '\n Please refer to: https://www.techonthenet.com/sqlite/unique.php' + + "\n\n Due to that we don't generate migration automatically and it has to be done manually" + + '\n*/' + ); + } +} + +class CreatePgSequenceConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_sequence' && dialect === 'postgresql'; + } + + convert(st: JsonCreateSequenceStatement) { + const { name, values, schema } = st; + + const sequenceWithSchema = schema ? `"${schema}"."${name}"` : `"${name}"`; + + return `CREATE SEQUENCE ${sequenceWithSchema}${values.increment ? ` INCREMENT BY ${values.increment}` : ''}${ + values.minValue ? ` MINVALUE ${values.minValue}` : '' + }${values.maxValue ? ` MAXVALUE ${values.maxValue}` : ''}${ + values.startWith ? ` START WITH ${values.startWith}` : '' + }${values.cache ? ` CACHE ${values.cache}` : ''}${values.cycle ? ` CYCLE` : ''};`; + } +} + +class DropPgSequenceConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'drop_sequence' && dialect === 'postgresql'; + } + + convert(st: JsonDropSequenceStatement) { + const { name, schema } = st; + + const sequenceWithSchema = schema ? `"${schema}"."${name}"` : `"${name}"`; + + return `DROP SEQUENCE ${sequenceWithSchema};`; + } +} + +class RenamePgSequenceConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'rename_sequence' && dialect === 'postgresql'; + } + + convert(st: JsonRenameSequenceStatement) { + const { nameFrom, nameTo, schema } = st; + + const sequenceWithSchemaFrom = schema + ? `"${schema}"."${nameFrom}"` + : `"${nameFrom}"`; + const sequenceWithSchemaTo = schema + ? `"${schema}"."${nameTo}"` + : `"${nameTo}"`; + + return `ALTER SEQUENCE ${sequenceWithSchemaFrom} RENAME TO "${nameTo}";`; + } +} + +class MovePgSequenceConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'move_sequence' && dialect === 'postgresql'; + } + + convert(st: JsonMoveSequenceStatement) { + const { schemaFrom, schemaTo, name } = st; + + const sequenceWithSchema = schemaFrom + ? `"${schemaFrom}"."${name}"` + : `"${name}"`; + + const seqSchemaTo = schemaTo ? `"${schemaTo}"` : `public`; + + return `ALTER SEQUENCE ${sequenceWithSchema} SET SCHEMA ${seqSchemaTo};`; + } +} + +class AlterPgSequenceConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_sequence' && dialect === 'postgresql'; + } + + convert(st: JsonAlterSequenceStatement) { + const { name, schema, values } = st; + + const { increment, minValue, maxValue, startWith, cache, cycle } = values; + + const sequenceWithSchema = schema ? `"${schema}"."${name}"` : `"${name}"`; + + return `ALTER SEQUENCE ${sequenceWithSchema}${increment ? ` INCREMENT BY ${increment}` : ''}${ + minValue ? ` MINVALUE ${minValue}` : '' + }${maxValue ? ` MAXVALUE ${maxValue}` : ''}${startWith ? ` START WITH ${startWith}` : ''}${ + cache ? ` CACHE ${cache}` : '' + }${cycle ? ` CYCLE` : ''};`; + } +} + +class CreateTypeEnumConvertor extends Convertor { + can(statement: JsonStatement): boolean { + return statement.type === 'create_type_enum'; + } + + convert(st: JsonCreateEnumStatement) { + const { name, values, schema } = st; + + const tableNameWithSchema = schema ? `"${schema}"."${name}"` : `"${name}"`; + + let valuesStatement = '('; + valuesStatement += values.map((it) => `'${it}'`).join(', '); + valuesStatement += ')'; + + let statement = 'DO $$ BEGIN'; + statement += '\n'; + statement += ` CREATE TYPE ${tableNameWithSchema} AS ENUM${valuesStatement};`; + statement += '\n'; + statement += 'EXCEPTION'; + statement += '\n'; + statement += ' WHEN duplicate_object THEN null;'; + statement += '\n'; + statement += 'END $$;'; + statement += '\n'; + return statement; + } +} + +class AlterTypeAddValueConvertor extends Convertor { + can(statement: JsonStatement): boolean { + return statement.type === 'alter_type_add_value'; + } + + convert(st: JsonAddValueToEnumStatement) { + const { name, schema, value } = st; + const schemaPrefix = schema && schema !== 'public' ? `"${schema}".` : ''; + return `ALTER TYPE ${schemaPrefix}"${name}" ADD VALUE '${value}';`; + } +} + +class PgDropTableConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'drop_table' && dialect === 'postgresql'; + } + + convert(statement: JsonDropTableStatement) { + const { tableName, schema } = statement; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + return `DROP TABLE ${tableNameWithSchema};`; + } +} + +class MySQLDropTableConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'drop_table' && dialect === 'mysql'; + } + + convert(statement: JsonDropTableStatement) { + const { tableName } = statement; + return `DROP TABLE \`${tableName}\`;`; + } +} + +class SingleStoreDropTableConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'drop_table' && dialect === 'singlestore'; + } + + convert(statement: JsonDropTableStatement) { + const { tableName } = statement; + return `DROP TABLE \`${tableName}\`;`; + } +} + +export class SQLiteDropTableConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'drop_table' && dialect === 'sqlite'; + } + + convert(statement: JsonDropTableStatement) { + const { tableName } = statement; + return `DROP TABLE \`${tableName}\`;`; + } +} + +class PgRenameTableConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'rename_table' && dialect === 'postgresql'; + } + + convert(statement: JsonRenameTableStatement) { + const { tableNameFrom, tableNameTo, toSchema, fromSchema } = statement; + const from = fromSchema + ? `"${fromSchema}"."${tableNameFrom}"` + : `"${tableNameFrom}"`; + const to = `"${tableNameTo}"`; + return `ALTER TABLE ${from} RENAME TO ${to};`; + } +} + +export class SqliteRenameTableConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'rename_table' && dialect === 'sqlite'; + } + + convert(statement: JsonRenameTableStatement) { + const { tableNameFrom, tableNameTo } = statement; + return `ALTER TABLE \`${tableNameFrom}\` RENAME TO \`${tableNameTo}\`;`; + } +} + +class MySqlRenameTableConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'rename_table' && dialect === 'mysql'; + } + + convert(statement: JsonRenameTableStatement) { + const { tableNameFrom, tableNameTo } = statement; + return `RENAME TABLE \`${tableNameFrom}\` TO \`${tableNameTo}\`;`; + } +} + +class SingleStoreRenameTableConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'rename_table' && dialect === 'singlestore'; + } + + convert(statement: JsonRenameTableStatement) { + const { tableNameFrom, tableNameTo } = statement; + return `RENAME TABLE \`${tableNameFrom}\` TO \`${tableNameTo}\`;`; + } +} + +class PgAlterTableRenameColumnConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_rename_column' && dialect === 'postgresql' + ); + } + + convert(statement: JsonRenameColumnStatement) { + const { tableName, oldColumnName, newColumnName, schema } = statement; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} RENAME COLUMN "${oldColumnName}" TO "${newColumnName}";`; + } +} + +class MySqlAlterTableRenameColumnConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_rename_column' && dialect === 'mysql' + ); + } + + convert(statement: JsonRenameColumnStatement) { + const { tableName, oldColumnName, newColumnName } = statement; + return `ALTER TABLE \`${tableName}\` RENAME COLUMN \`${oldColumnName}\` TO \`${newColumnName}\`;`; + } +} + +class SingleStoreAlterTableRenameColumnConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_rename_column' && dialect === 'singlestore' + ); + } + + convert(statement: JsonRenameColumnStatement) { + const { tableName, oldColumnName, newColumnName } = statement; + return `ALTER TABLE \`${tableName}\` RENAME COLUMN \`${oldColumnName}\` TO \`${newColumnName}\`;`; + } +} + +class SQLiteAlterTableRenameColumnConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_rename_column' && dialect === 'sqlite' + ); + } + + convert(statement: JsonRenameColumnStatement) { + const { tableName, oldColumnName, newColumnName } = statement; + return `ALTER TABLE \`${tableName}\` RENAME COLUMN \`${oldColumnName}\` TO \`${newColumnName}\`;`; + } +} + +class PgAlterTableDropColumnConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_drop_column' && dialect === 'postgresql' + ); + } + + convert(statement: JsonDropColumnStatement) { + const { tableName, columnName, schema } = statement; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} DROP COLUMN IF EXISTS "${columnName}";`; + } +} + +class MySqlAlterTableDropColumnConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_table_drop_column' && dialect === 'mysql'; + } + + convert(statement: JsonDropColumnStatement) { + const { tableName, columnName } = statement; + return `ALTER TABLE \`${tableName}\` DROP COLUMN \`${columnName}\`;`; + } +} + +class SingleStoreAlterTableDropColumnConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_table_drop_column' && dialect === 'singlestore'; + } + + convert(statement: JsonDropColumnStatement) { + const { tableName, columnName } = statement; + return `ALTER TABLE \`${tableName}\` DROP COLUMN \`${columnName}\`;`; + } +} + +class SQLiteAlterTableDropColumnConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_table_drop_column' && dialect === 'sqlite'; + } + + convert(statement: JsonDropColumnStatement) { + const { tableName, columnName } = statement; + return `ALTER TABLE \`${tableName}\` DROP COLUMN \`${columnName}\`;`; + } +} + +class PgAlterTableAddColumnConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_add_column' && dialect === 'postgresql' + ); + } + + convert(statement: JsonAddColumnStatement) { + const { tableName, column, schema } = statement; + const { name, type, notNull, generated, primaryKey, identity } = column; + + const primaryKeyStatement = primaryKey ? ' PRIMARY KEY' : ''; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + const defaultStatement = `${column.default !== undefined ? ` DEFAULT ${column.default}` : ''}`; + + const schemaPrefix = column.typeSchema && column.typeSchema !== 'public' + ? `"${column.typeSchema}".` + : ''; + + const fixedType = isPgNativeType(column.type) + ? column.type + : `${schemaPrefix}"${column.type}"`; + + const notNullStatement = `${notNull ? ' NOT NULL' : ''}`; + + const unsquashedIdentity = identity + ? PgSquasher.unsquashIdentity(identity) + : undefined; + + const identityWithSchema = schema + ? `"${schema}"."${unsquashedIdentity?.name}"` + : `"${unsquashedIdentity?.name}"`; + + const identityStatement = unsquashedIdentity + ? ` GENERATED ${ + unsquashedIdentity.type === 'always' ? 'ALWAYS' : 'BY DEFAULT' + } AS IDENTITY (sequence name ${identityWithSchema}${ + unsquashedIdentity.increment + ? ` INCREMENT BY ${unsquashedIdentity.increment}` + : '' + }${ + unsquashedIdentity.minValue + ? ` MINVALUE ${unsquashedIdentity.minValue}` + : '' + }${ + unsquashedIdentity.maxValue + ? ` MAXVALUE ${unsquashedIdentity.maxValue}` + : '' + }${ + unsquashedIdentity.startWith + ? ` START WITH ${unsquashedIdentity.startWith}` + : '' + }${unsquashedIdentity.cache ? ` CACHE ${unsquashedIdentity.cache}` : ''}${ + unsquashedIdentity.cycle ? ` CYCLE` : '' + })` + : ''; + + const generatedStatement = generated ? ` GENERATED ALWAYS AS (${generated?.as}) STORED` : ''; + + return `ALTER TABLE ${tableNameWithSchema} ADD COLUMN "${name}" ${fixedType}${primaryKeyStatement}${defaultStatement}${generatedStatement}${notNullStatement}${identityStatement};`; + } +} + +class MySqlAlterTableAddColumnConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_table_add_column' && dialect === 'mysql'; + } + + convert(statement: JsonAddColumnStatement) { + const { tableName, column } = statement; + const { + name, + type, + notNull, + primaryKey, + autoincrement, + onUpdate, + generated, + } = column; + + const defaultStatement = `${column.default !== undefined ? ` DEFAULT ${column.default}` : ''}`; + const notNullStatement = `${notNull ? ' NOT NULL' : ''}`; + const primaryKeyStatement = `${primaryKey ? ' PRIMARY KEY' : ''}`; + const autoincrementStatement = `${autoincrement ? ' AUTO_INCREMENT' : ''}`; + const onUpdateStatement = `${onUpdate ? ' ON UPDATE CURRENT_TIMESTAMP' : ''}`; + + const generatedStatement = generated + ? ` GENERATED ALWAYS AS (${generated?.as}) ${generated?.type.toUpperCase()}` + : ''; + + return `ALTER TABLE \`${tableName}\` ADD \`${name}\` ${type}${primaryKeyStatement}${autoincrementStatement}${defaultStatement}${generatedStatement}${notNullStatement}${onUpdateStatement};`; + } +} + +class SingleStoreAlterTableAddColumnConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_table_add_column' && dialect === 'singlestore'; + } + + convert(statement: JsonAddColumnStatement) { + const { tableName, column } = statement; + const { + name, + type, + notNull, + primaryKey, + autoincrement, + onUpdate, + generated, + } = column; + + const defaultStatement = `${column.default !== undefined ? ` DEFAULT ${column.default}` : ''}`; + const notNullStatement = `${notNull ? ' NOT NULL' : ''}`; + const primaryKeyStatement = `${primaryKey ? ' PRIMARY KEY' : ''}`; + const autoincrementStatement = `${autoincrement ? ' AUTO_INCREMENT' : ''}`; + const onUpdateStatement = `${onUpdate ? ' ON UPDATE CURRENT_TIMESTAMP' : ''}`; + + const generatedStatement = generated + ? ` GENERATED ALWAYS AS (${generated?.as}) ${generated?.type.toUpperCase()}` + : ''; + + return `ALTER TABLE \`${tableName}\` ADD \`${name}\` ${type}${primaryKeyStatement}${autoincrementStatement}${defaultStatement}${notNullStatement}${onUpdateStatement}${generatedStatement};`; + } +} + +export class SQLiteAlterTableAddColumnConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'sqlite_alter_table_add_column' && dialect === 'sqlite' + ); + } + + convert(statement: JsonSqliteAddColumnStatement) { + const { tableName, column, referenceData } = statement; + const { name, type, notNull, primaryKey, generated } = column; + + const defaultStatement = `${column.default !== undefined ? ` DEFAULT ${column.default}` : ''}`; + const notNullStatement = `${notNull ? ' NOT NULL' : ''}`; + const primaryKeyStatement = `${primaryKey ? ' PRIMARY KEY' : ''}`; + const referenceAsObject = referenceData + ? SQLiteSquasher.unsquashFK(referenceData) + : undefined; + const referenceStatement = `${ + referenceAsObject + ? ` REFERENCES ${referenceAsObject.tableTo}(${referenceAsObject.columnsTo})` + : '' + }`; + // const autoincrementStatement = `${autoincrement ? 'AUTO_INCREMENT' : ''}` + const generatedStatement = generated + ? ` GENERATED ALWAYS AS ${generated.as} ${generated.type.toUpperCase()}` + : ''; + + return `ALTER TABLE \`${tableName}\` ADD \`${name}\` ${type}${primaryKeyStatement}${defaultStatement}${generatedStatement}${notNullStatement}${referenceStatement};`; + } +} + +class PgAlterTableAlterColumnSetTypeConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_set_type' + && dialect === 'postgresql' + ); + } + + convert(statement: JsonAlterColumnTypeStatement) { + const { tableName, columnName, newDataType, schema } = statement; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET DATA TYPE ${newDataType};`; + } +} + +class SQLiteAlterTableAlterColumnSetTypeConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_set_type' + && dialect === 'sqlite' + ); + } + + convert(statement: JsonAlterColumnTypeStatement) { + return ( + '/*\n SQLite does not support "Changing existing column type" out of the box, we do not generate automatic migration for that, so it has to be done manually' + + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php' + + '\n https://www.sqlite.org/lang_altertable.html' + + '\n https://stackoverflow.com/questions/2083543/modify-a-columns-type-in-sqlite3' + + "\n\n Due to that we don't generate migration automatically and it has to be done manually" + + '\n*/' + ); + } +} + +class PgAlterTableAlterColumnSetDefaultConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_set_default' + && dialect === 'postgresql' + ); + } + + convert(statement: JsonAlterColumnSetDefaultStatement) { + const { tableName, columnName, schema } = statement; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET DEFAULT ${statement.newDefaultValue};`; + } +} + +class SqliteAlterTableAlterColumnSetDefaultConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_set_default' + && dialect === 'sqlite' + ); + } + + convert(statement: JsonAlterColumnSetDefaultStatement) { + return ( + '/*\n SQLite does not support "Set default to column" out of the box, we do not generate automatic migration for that, so it has to be done manually' + + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php' + + '\n https://www.sqlite.org/lang_altertable.html' + + '\n https://stackoverflow.com/questions/2083543/modify-a-columns-type-in-sqlite3' + + "\n\n Due to that we don't generate migration automatically and it has to be done manually" + + '\n*/' + ); + } +} + +class PgAlterTableAlterColumnDropDefaultConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_drop_default' + && dialect === 'postgresql' + ); + } + + convert(statement: JsonAlterColumnDropDefaultStatement) { + const { tableName, columnName, schema } = statement; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" DROP DEFAULT;`; + } +} + +class PgAlterTableAlterColumnDropGeneratedConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_drop_generated' + && dialect === 'postgresql' + ); + } + + convert(statement: JsonAlterColumnDropGeneratedStatement) { + const { tableName, columnName, schema } = statement; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" DROP EXPRESSION;`; + } +} + +class PgAlterTableAlterColumnSetExpressionConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_set_generated' + && dialect === 'postgresql' + ); + } + + convert(statement: JsonAlterColumnSetGeneratedStatement) { + const { + tableName, + columnName, + schema, + columnNotNull: notNull, + columnDefault, + columnOnUpdate, + columnAutoIncrement, + columnPk, + columnGenerated, + } = statement; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + const addColumnStatement = new PgAlterTableAddColumnConvertor().convert({ + schema, + tableName, + column: { + name: columnName, + type: statement.newDataType, + notNull, + default: columnDefault, + onUpdate: columnOnUpdate, + autoincrement: columnAutoIncrement, + primaryKey: columnPk, + generated: columnGenerated, + }, + type: 'alter_table_add_column', + }); + + return [ + `ALTER TABLE ${tableNameWithSchema} drop column "${columnName}";`, + addColumnStatement, + ]; + } +} + +class PgAlterTableAlterColumnAlterrGeneratedConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_alter_generated' + && dialect === 'postgresql' + ); + } + + convert(statement: JsonAlterColumnAlterGeneratedStatement) { + const { + tableName, + columnName, + schema, + columnNotNull: notNull, + columnDefault, + columnOnUpdate, + columnAutoIncrement, + columnPk, + columnGenerated, + } = statement; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + const addColumnStatement = new PgAlterTableAddColumnConvertor().convert({ + schema, + tableName, + column: { + name: columnName, + type: statement.newDataType, + notNull, + default: columnDefault, + onUpdate: columnOnUpdate, + autoincrement: columnAutoIncrement, + primaryKey: columnPk, + generated: columnGenerated, + }, + type: 'alter_table_add_column', + }); + + return [ + `ALTER TABLE ${tableNameWithSchema} drop column "${columnName}";`, + addColumnStatement, + ]; + } +} + +//// +class SqliteAlterTableAlterColumnDropGeneratedConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_drop_generated' + && dialect === 'sqlite' + ); + } + + convert(statement: JsonAlterColumnDropGeneratedStatement) { + const { + tableName, + columnName, + schema, + columnDefault, + columnOnUpdate, + columnAutoIncrement, + columnPk, + columnGenerated, + columnNotNull, + } = statement; + + const addColumnStatement = new SQLiteAlterTableAddColumnConvertor().convert( + { + tableName, + column: { + name: columnName, + type: statement.newDataType, + notNull: columnNotNull, + default: columnDefault, + onUpdate: columnOnUpdate, + autoincrement: columnAutoIncrement, + primaryKey: columnPk, + generated: columnGenerated, + }, + type: 'sqlite_alter_table_add_column', + }, + ); + + const dropColumnStatement = new SQLiteAlterTableDropColumnConvertor().convert({ + tableName, + columnName, + schema, + type: 'alter_table_drop_column', + }); + + return [dropColumnStatement, addColumnStatement]; + } +} + +class SqliteAlterTableAlterColumnSetExpressionConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_set_generated' + && dialect === 'sqlite' + ); + } + + convert(statement: JsonAlterColumnSetGeneratedStatement) { + const { + tableName, + columnName, + schema, + columnNotNull: notNull, + columnDefault, + columnOnUpdate, + columnAutoIncrement, + columnPk, + columnGenerated, + } = statement; + + const addColumnStatement = new SQLiteAlterTableAddColumnConvertor().convert( + { + tableName, + column: { + name: columnName, + type: statement.newDataType, + notNull, + default: columnDefault, + onUpdate: columnOnUpdate, + autoincrement: columnAutoIncrement, + primaryKey: columnPk, + generated: columnGenerated, + }, + type: 'sqlite_alter_table_add_column', + }, + ); + + const dropColumnStatement = new SQLiteAlterTableDropColumnConvertor().convert({ + tableName, + columnName, + schema, + type: 'alter_table_drop_column', + }); + + return [dropColumnStatement, addColumnStatement]; + } +} + +class SqliteAlterTableAlterColumnAlterGeneratedConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_alter_generated' + && dialect === 'sqlite' + ); + } + + convert(statement: JsonAlterColumnAlterGeneratedStatement) { + const { + tableName, + columnName, + schema, + columnNotNull, + columnDefault, + columnOnUpdate, + columnAutoIncrement, + columnPk, + columnGenerated, + } = statement; + + const addColumnStatement = new SQLiteAlterTableAddColumnConvertor().convert( + { + tableName, + column: { + name: columnName, + type: statement.newDataType, + notNull: columnNotNull, + default: columnDefault, + onUpdate: columnOnUpdate, + autoincrement: columnAutoIncrement, + primaryKey: columnPk, + generated: columnGenerated, + }, + type: 'sqlite_alter_table_add_column', + }, + ); + + const dropColumnStatement = new SQLiteAlterTableDropColumnConvertor().convert({ + tableName, + columnName, + schema, + type: 'alter_table_drop_column', + }); + + return [dropColumnStatement, addColumnStatement]; + } +} + +//// + +class MySqlAlterTableAlterColumnAlterrGeneratedConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_alter_generated' + && dialect === 'mysql' + ); + } + + convert(statement: JsonAlterColumnAlterGeneratedStatement) { + const { + tableName, + columnName, + schema, + columnNotNull: notNull, + columnDefault, + columnOnUpdate, + columnAutoIncrement, + columnPk, + columnGenerated, + } = statement; + + const tableNameWithSchema = schema + ? `\`${schema}\`.\`${tableName}\`` + : `\`${tableName}\``; + + const addColumnStatement = new MySqlAlterTableAddColumnConvertor().convert({ + schema, + tableName, + column: { + name: columnName, + type: statement.newDataType, + notNull, + default: columnDefault, + onUpdate: columnOnUpdate, + autoincrement: columnAutoIncrement, + primaryKey: columnPk, + generated: columnGenerated, + }, + type: 'alter_table_add_column', + }); + + return [ + `ALTER TABLE ${tableNameWithSchema} drop column \`${columnName}\`;`, + addColumnStatement, + ]; + } +} + +class MySqlAlterTableAlterColumnSetDefaultConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_set_default' + && dialect === 'mysql' + ); + } + + convert(statement: JsonAlterColumnSetDefaultStatement) { + const { tableName, columnName } = statement; + return `ALTER TABLE \`${tableName}\` ALTER COLUMN \`${columnName}\` SET DEFAULT ${statement.newDefaultValue};`; + } +} + +class MySqlAlterTableAlterColumnDropDefaultConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_drop_default' + && dialect === 'mysql' + ); + } + + convert(statement: JsonAlterColumnDropDefaultStatement) { + const { tableName, columnName } = statement; + return `ALTER TABLE \`${tableName}\` ALTER COLUMN \`${columnName}\` DROP DEFAULT;`; + } +} + +class MySqlAlterTableAddPk extends Convertor { + can(statement: JsonStatement, dialect: string): boolean { + return ( + statement.type === 'alter_table_alter_column_set_pk' + && dialect === 'mysql' + ); + } + convert(statement: JsonAlterColumnSetPrimaryKeyStatement): string { + return `ALTER TABLE \`${statement.tableName}\` ADD PRIMARY KEY (\`${statement.columnName}\`);`; + } +} + +class MySqlAlterTableDropPk extends Convertor { + can(statement: JsonStatement, dialect: string): boolean { + return ( + statement.type === 'alter_table_alter_column_drop_pk' + && dialect === 'mysql' + ); + } + convert(statement: JsonAlterColumnDropPrimaryKeyStatement): string { + return `ALTER TABLE \`${statement.tableName}\` DROP PRIMARY KEY`; + } +} + +type MySqlModifyColumnStatement = + | JsonAlterColumnDropNotNullStatement + | JsonAlterColumnSetNotNullStatement + | JsonAlterColumnTypeStatement + | JsonAlterColumnDropOnUpdateStatement + | JsonAlterColumnSetOnUpdateStatement + | JsonAlterColumnDropAutoincrementStatement + | JsonAlterColumnSetAutoincrementStatement + | JsonAlterColumnSetDefaultStatement + | JsonAlterColumnDropDefaultStatement + | JsonAlterColumnSetGeneratedStatement + | JsonAlterColumnDropGeneratedStatement; + +class MySqlModifyColumn extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + (statement.type === 'alter_table_alter_column_set_type' + || statement.type === 'alter_table_alter_column_set_notnull' + || statement.type === 'alter_table_alter_column_drop_notnull' + || statement.type === 'alter_table_alter_column_drop_on_update' + || statement.type === 'alter_table_alter_column_set_on_update' + || statement.type === 'alter_table_alter_column_set_autoincrement' + || statement.type === 'alter_table_alter_column_drop_autoincrement' + || statement.type === 'alter_table_alter_column_set_default' + || statement.type === 'alter_table_alter_column_drop_default' + || statement.type === 'alter_table_alter_column_set_generated' + || statement.type === 'alter_table_alter_column_drop_generated') + && dialect === 'mysql' + ); + } + + convert(statement: MySqlModifyColumnStatement) { + const { tableName, columnName } = statement; + let columnType = ``; + let columnDefault: any = ''; + let columnNotNull = ''; + let columnOnUpdate = ''; + let columnAutoincrement = ''; + let primaryKey = statement.columnPk ? ' PRIMARY KEY' : ''; + let columnGenerated = ''; + + if (statement.type === 'alter_table_alter_column_drop_notnull') { + columnType = ` ${statement.newDataType}`; + columnDefault = statement.columnDefault + ? ` DEFAULT ${statement.columnDefault}` + : ''; + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnOnUpdate = statement.columnOnUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + columnAutoincrement = statement.columnAutoIncrement + ? ' AUTO_INCREMENT' + : ''; + } else if (statement.type === 'alter_table_alter_column_set_notnull') { + columnNotNull = ` NOT NULL`; + columnType = ` ${statement.newDataType}`; + columnDefault = statement.columnDefault + ? ` DEFAULT ${statement.columnDefault}` + : ''; + columnOnUpdate = statement.columnOnUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + columnAutoincrement = statement.columnAutoIncrement + ? ' AUTO_INCREMENT' + : ''; + } else if (statement.type === 'alter_table_alter_column_drop_on_update') { + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnType = ` ${statement.newDataType}`; + columnDefault = statement.columnDefault + ? ` DEFAULT ${statement.columnDefault}` + : ''; + columnOnUpdate = ''; + columnAutoincrement = statement.columnAutoIncrement + ? ' AUTO_INCREMENT' + : ''; + } else if (statement.type === 'alter_table_alter_column_set_on_update') { + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnOnUpdate = ` ON UPDATE CURRENT_TIMESTAMP`; + columnType = ` ${statement.newDataType}`; + columnDefault = statement.columnDefault + ? ` DEFAULT ${statement.columnDefault}` + : ''; + columnAutoincrement = statement.columnAutoIncrement + ? ' AUTO_INCREMENT' + : ''; + } else if ( + statement.type === 'alter_table_alter_column_set_autoincrement' + ) { + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnOnUpdate = columnOnUpdate = statement.columnOnUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + columnType = ` ${statement.newDataType}`; + columnDefault = statement.columnDefault + ? ` DEFAULT ${statement.columnDefault}` + : ''; + columnAutoincrement = ' AUTO_INCREMENT'; + } else if ( + statement.type === 'alter_table_alter_column_drop_autoincrement' + ) { + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnOnUpdate = columnOnUpdate = statement.columnOnUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + columnType = ` ${statement.newDataType}`; + columnDefault = statement.columnDefault + ? ` DEFAULT ${statement.columnDefault}` + : ''; + columnAutoincrement = ''; + } else if (statement.type === 'alter_table_alter_column_set_default') { + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnOnUpdate = columnOnUpdate = statement.columnOnUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + columnType = ` ${statement.newDataType}`; + columnDefault = ` DEFAULT ${statement.newDefaultValue}`; + columnAutoincrement = statement.columnAutoIncrement + ? ' AUTO_INCREMENT' + : ''; + } else if (statement.type === 'alter_table_alter_column_drop_default') { + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnOnUpdate = columnOnUpdate = statement.columnOnUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + columnType = ` ${statement.newDataType}`; + columnDefault = ''; + columnAutoincrement = statement.columnAutoIncrement + ? ' AUTO_INCREMENT' + : ''; + } else if (statement.type === 'alter_table_alter_column_set_generated') { + columnType = ` ${statement.newDataType}`; + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnOnUpdate = columnOnUpdate = statement.columnOnUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + columnDefault = statement.columnDefault + ? ` DEFAULT ${statement.columnDefault}` + : ''; + columnAutoincrement = statement.columnAutoIncrement + ? ' AUTO_INCREMENT' + : ''; + + if (statement.columnGenerated?.type === 'virtual') { + return [ + new MySqlAlterTableDropColumnConvertor().convert({ + type: 'alter_table_drop_column', + tableName: statement.tableName, + columnName: statement.columnName, + schema: statement.schema, + }), + new MySqlAlterTableAddColumnConvertor().convert({ + tableName, + column: { + name: columnName, + type: statement.newDataType, + notNull: statement.columnNotNull, + default: statement.columnDefault, + onUpdate: statement.columnOnUpdate, + autoincrement: statement.columnAutoIncrement, + primaryKey: statement.columnPk, + generated: statement.columnGenerated, + }, + schema: statement.schema, + type: 'alter_table_add_column', + }), + ]; + } else { + columnGenerated = statement.columnGenerated + ? ` GENERATED ALWAYS AS (${statement.columnGenerated?.as}) ${statement.columnGenerated?.type.toUpperCase()}` + : ''; + } + } else if (statement.type === 'alter_table_alter_column_drop_generated') { + columnType = ` ${statement.newDataType}`; + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnOnUpdate = columnOnUpdate = statement.columnOnUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + columnDefault = statement.columnDefault + ? ` DEFAULT ${statement.columnDefault}` + : ''; + columnAutoincrement = statement.columnAutoIncrement + ? ' AUTO_INCREMENT' + : ''; + + if (statement.oldColumn?.generated?.type === 'virtual') { + return [ + new MySqlAlterTableDropColumnConvertor().convert({ + type: 'alter_table_drop_column', + tableName: statement.tableName, + columnName: statement.columnName, + schema: statement.schema, + }), + new MySqlAlterTableAddColumnConvertor().convert({ + tableName, + column: { + name: columnName, + type: statement.newDataType, + notNull: statement.columnNotNull, + default: statement.columnDefault, + onUpdate: statement.columnOnUpdate, + autoincrement: statement.columnAutoIncrement, + primaryKey: statement.columnPk, + generated: statement.columnGenerated, + }, + schema: statement.schema, + type: 'alter_table_add_column', + }), + ]; + } + } else { + columnType = ` ${statement.newDataType}`; + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnOnUpdate = columnOnUpdate = statement.columnOnUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + columnDefault = statement.columnDefault + ? ` DEFAULT ${statement.columnDefault}` + : ''; + columnAutoincrement = statement.columnAutoIncrement + ? ' AUTO_INCREMENT' + : ''; + columnGenerated = statement.columnGenerated + ? ` GENERATED ALWAYS AS (${statement.columnGenerated?.as}) ${statement.columnGenerated?.type.toUpperCase()}` + : ''; + } + + // Seems like getting value from simple json2 shanpshot makes dates be dates + columnDefault = columnDefault instanceof Date + ? columnDefault.toISOString() + : columnDefault; + + return `ALTER TABLE \`${tableName}\` MODIFY COLUMN \`${columnName}\`${columnType}${columnAutoincrement}${columnGenerated}${columnNotNull}${columnDefault}${columnOnUpdate};`; + } +} + +class SingleStoreAlterTableAlterColumnAlterrGeneratedConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_alter_generated' + && dialect === 'singlestore' + ); + } + + convert(statement: JsonAlterColumnAlterGeneratedStatement) { + const { + tableName, + columnName, + schema, + columnNotNull: notNull, + columnDefault, + columnOnUpdate, + columnAutoIncrement, + columnPk, + columnGenerated, + } = statement; + + const tableNameWithSchema = schema + ? `\`${schema}\`.\`${tableName}\`` + : `\`${tableName}\``; + + const addColumnStatement = new SingleStoreAlterTableAddColumnConvertor().convert({ + schema, + tableName, + column: { + name: columnName, + type: statement.newDataType, + notNull, + default: columnDefault, + onUpdate: columnOnUpdate, + autoincrement: columnAutoIncrement, + primaryKey: columnPk, + generated: columnGenerated, + }, + type: 'alter_table_add_column', + }); + + return [ + `ALTER TABLE ${tableNameWithSchema} drop column \`${columnName}\`;`, + addColumnStatement, + ]; + } +} + +class SingleStoreAlterTableAlterColumnSetDefaultConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_set_default' + && dialect === 'singlestore' + ); + } + + convert(statement: JsonAlterColumnSetDefaultStatement) { + const { tableName, columnName } = statement; + return `ALTER TABLE \`${tableName}\` ALTER COLUMN \`${columnName}\` SET DEFAULT ${statement.newDefaultValue};`; + } +} + +class SingleStoreAlterTableAlterColumnDropDefaultConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_drop_default' + && dialect === 'singlestore' + ); + } + + convert(statement: JsonAlterColumnDropDefaultStatement) { + const { tableName, columnName } = statement; + return `ALTER TABLE \`${tableName}\` ALTER COLUMN \`${columnName}\` DROP DEFAULT;`; + } +} + +class SingleStoreAlterTableAddPk extends Convertor { + can(statement: JsonStatement, dialect: string): boolean { + return ( + statement.type === 'alter_table_alter_column_set_pk' + && dialect === 'singlestore' + ); + } + convert(statement: JsonAlterColumnSetPrimaryKeyStatement): string { + return `ALTER TABLE \`${statement.tableName}\` ADD PRIMARY KEY (\`${statement.columnName}\`);`; + } +} + +class SingleStoreAlterTableDropPk extends Convertor { + can(statement: JsonStatement, dialect: string): boolean { + return ( + statement.type === 'alter_table_alter_column_drop_pk' + && dialect === 'singlestore' + ); + } + convert(statement: JsonAlterColumnDropPrimaryKeyStatement): string { + return `ALTER TABLE \`${statement.tableName}\` DROP PRIMARY KEY`; + } +} + +type SingleStoreModifyColumnStatement = + | JsonAlterColumnDropNotNullStatement + | JsonAlterColumnSetNotNullStatement + | JsonAlterColumnTypeStatement + | JsonAlterColumnDropOnUpdateStatement + | JsonAlterColumnSetOnUpdateStatement + | JsonAlterColumnDropAutoincrementStatement + | JsonAlterColumnSetAutoincrementStatement + | JsonAlterColumnSetDefaultStatement + | JsonAlterColumnDropDefaultStatement + | JsonAlterColumnSetGeneratedStatement + | JsonAlterColumnDropGeneratedStatement; + +class SingleStoreModifyColumn extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + (statement.type === 'alter_table_alter_column_set_type' + || statement.type === 'alter_table_alter_column_set_notnull' + || statement.type === 'alter_table_alter_column_drop_notnull' + || statement.type === 'alter_table_alter_column_drop_on_update' + || statement.type === 'alter_table_alter_column_set_on_update' + || statement.type === 'alter_table_alter_column_set_autoincrement' + || statement.type === 'alter_table_alter_column_drop_autoincrement' + || statement.type === 'alter_table_alter_column_set_default' + || statement.type === 'alter_table_alter_column_drop_default' + || statement.type === 'alter_table_alter_column_set_generated' + || statement.type === 'alter_table_alter_column_drop_generated') + && dialect === 'singlestore' + ); + } + + convert(statement: SingleStoreModifyColumnStatement) { + const { tableName, columnName } = statement; + let columnType = ``; + let columnDefault: any = ''; + let columnNotNull = ''; + let columnOnUpdate = ''; + let columnAutoincrement = ''; + let primaryKey = statement.columnPk ? ' PRIMARY KEY' : ''; + let columnGenerated = ''; + + if (statement.type === 'alter_table_alter_column_drop_notnull') { + columnType = ` ${statement.newDataType}`; + columnDefault = statement.columnDefault + ? ` DEFAULT ${statement.columnDefault}` + : ''; + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnOnUpdate = statement.columnOnUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + columnAutoincrement = statement.columnAutoIncrement + ? ' AUTO_INCREMENT' + : ''; + } else if (statement.type === 'alter_table_alter_column_set_notnull') { + columnNotNull = ` NOT NULL`; + columnType = ` ${statement.newDataType}`; + columnDefault = statement.columnDefault + ? ` DEFAULT ${statement.columnDefault}` + : ''; + columnOnUpdate = statement.columnOnUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + columnAutoincrement = statement.columnAutoIncrement + ? ' AUTO_INCREMENT' + : ''; + } else if (statement.type === 'alter_table_alter_column_drop_on_update') { + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnType = ` ${statement.newDataType}`; + columnDefault = statement.columnDefault + ? ` DEFAULT ${statement.columnDefault}` + : ''; + columnOnUpdate = ''; + columnAutoincrement = statement.columnAutoIncrement + ? ' AUTO_INCREMENT' + : ''; + } else if (statement.type === 'alter_table_alter_column_set_on_update') { + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnOnUpdate = ` ON UPDATE CURRENT_TIMESTAMP`; + columnType = ` ${statement.newDataType}`; + columnDefault = statement.columnDefault + ? ` DEFAULT ${statement.columnDefault}` + : ''; + columnAutoincrement = statement.columnAutoIncrement + ? ' AUTO_INCREMENT' + : ''; + } else if ( + statement.type === 'alter_table_alter_column_set_autoincrement' + ) { + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnOnUpdate = columnOnUpdate = statement.columnOnUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + columnType = ` ${statement.newDataType}`; + columnDefault = statement.columnDefault + ? ` DEFAULT ${statement.columnDefault}` + : ''; + columnAutoincrement = ' AUTO_INCREMENT'; + } else if ( + statement.type === 'alter_table_alter_column_drop_autoincrement' + ) { + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnOnUpdate = columnOnUpdate = statement.columnOnUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + columnType = ` ${statement.newDataType}`; + columnDefault = statement.columnDefault + ? ` DEFAULT ${statement.columnDefault}` + : ''; + columnAutoincrement = ''; + } else if (statement.type === 'alter_table_alter_column_set_default') { + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnOnUpdate = columnOnUpdate = statement.columnOnUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + columnType = ` ${statement.newDataType}`; + columnDefault = ` DEFAULT ${statement.newDefaultValue}`; + columnAutoincrement = statement.columnAutoIncrement + ? ' AUTO_INCREMENT' + : ''; + } else if (statement.type === 'alter_table_alter_column_drop_default') { + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnOnUpdate = columnOnUpdate = statement.columnOnUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + columnType = ` ${statement.newDataType}`; + columnDefault = ''; + columnAutoincrement = statement.columnAutoIncrement + ? ' AUTO_INCREMENT' + : ''; + } else if (statement.type === 'alter_table_alter_column_set_generated') { + columnType = ` ${statement.newDataType}`; + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnOnUpdate = columnOnUpdate = statement.columnOnUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + columnDefault = statement.columnDefault + ? ` DEFAULT ${statement.columnDefault}` + : ''; + columnAutoincrement = statement.columnAutoIncrement + ? ' AUTO_INCREMENT' + : ''; + + if (statement.columnGenerated?.type === 'virtual') { + return [ + new SingleStoreAlterTableDropColumnConvertor().convert({ + type: 'alter_table_drop_column', + tableName: statement.tableName, + columnName: statement.columnName, + schema: statement.schema, + }), + new SingleStoreAlterTableAddColumnConvertor().convert({ + tableName, + column: { + name: columnName, + type: statement.newDataType, + notNull: statement.columnNotNull, + default: statement.columnDefault, + onUpdate: statement.columnOnUpdate, + autoincrement: statement.columnAutoIncrement, + primaryKey: statement.columnPk, + generated: statement.columnGenerated, + }, + schema: statement.schema, + type: 'alter_table_add_column', + }), + ]; + } else { + columnGenerated = statement.columnGenerated + ? ` GENERATED ALWAYS AS (${statement.columnGenerated?.as}) ${statement.columnGenerated?.type.toUpperCase()}` + : ''; + } + } else if (statement.type === 'alter_table_alter_column_drop_generated') { + columnType = ` ${statement.newDataType}`; + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnOnUpdate = columnOnUpdate = statement.columnOnUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + columnDefault = statement.columnDefault + ? ` DEFAULT ${statement.columnDefault}` + : ''; + columnAutoincrement = statement.columnAutoIncrement + ? ' AUTO_INCREMENT' + : ''; + + if (statement.oldColumn?.generated?.type === 'virtual') { + return [ + new SingleStoreAlterTableDropColumnConvertor().convert({ + type: 'alter_table_drop_column', + tableName: statement.tableName, + columnName: statement.columnName, + schema: statement.schema, + }), + new SingleStoreAlterTableAddColumnConvertor().convert({ + tableName, + column: { + name: columnName, + type: statement.newDataType, + notNull: statement.columnNotNull, + default: statement.columnDefault, + onUpdate: statement.columnOnUpdate, + autoincrement: statement.columnAutoIncrement, + primaryKey: statement.columnPk, + generated: statement.columnGenerated, + }, + schema: statement.schema, + type: 'alter_table_add_column', + }), + ]; + } + } else { + columnType = ` ${statement.newDataType}`; + columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; + columnOnUpdate = columnOnUpdate = statement.columnOnUpdate + ? ` ON UPDATE CURRENT_TIMESTAMP` + : ''; + columnDefault = statement.columnDefault + ? ` DEFAULT ${statement.columnDefault}` + : ''; + columnAutoincrement = statement.columnAutoIncrement + ? ' AUTO_INCREMENT' + : ''; + columnGenerated = statement.columnGenerated + ? ` GENERATED ALWAYS AS (${statement.columnGenerated?.as}) ${statement.columnGenerated?.type.toUpperCase()}` + : ''; + } + + // Seems like getting value from simple json2 shanpshot makes dates be dates + columnDefault = columnDefault instanceof Date + ? columnDefault.toISOString() + : columnDefault; + + return `ALTER TABLE \`${tableName}\` MODIFY COLUMN \`${columnName}\`${columnType}${columnAutoincrement}${columnNotNull}${columnDefault}${columnOnUpdate}${columnGenerated};`; + } +} +class SqliteAlterTableAlterColumnDropDefaultConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_drop_default' + && dialect === 'sqlite' + ); + } + + convert(statement: JsonAlterColumnDropDefaultStatement) { + return ( + '/*\n SQLite does not support "Drop default from column" out of the box, we do not generate automatic migration for that, so it has to be done manually' + + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php' + + '\n https://www.sqlite.org/lang_altertable.html' + + '\n https://stackoverflow.com/questions/2083543/modify-a-columns-type-in-sqlite3' + + "\n\n Due to that we don't generate migration automatically and it has to be done manually" + + '\n*/' + ); + } +} + +class PgAlterTableCreateCompositePrimaryKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_composite_pk' && dialect === 'postgresql'; + } + + convert(statement: JsonCreateCompositePK) { + const { name, columns } = PgSquasher.unsquashPK(statement.data); + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT "${statement.constraintName}" PRIMARY KEY("${ + columns.join('","') + }");`; + } +} + +class PgAlterTableDeleteCompositePrimaryKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'delete_composite_pk' && dialect === 'postgresql'; + } + + convert(statement: JsonDeleteCompositePK) { + const { name, columns } = PgSquasher.unsquashPK(statement.data); + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${statement.constraintName}";`; + } +} + +class PgAlterTableAlterCompositePrimaryKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_composite_pk' && dialect === 'postgresql'; + } + + convert(statement: JsonAlterCompositePK) { + const { name, columns } = PgSquasher.unsquashPK(statement.old); + const { name: newName, columns: newColumns } = PgSquasher.unsquashPK( + statement.new, + ); + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT ${statement.oldConstraintName};\n${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT ${statement.newConstraintName} PRIMARY KEY(${ + newColumns.join(',') + });`; + } +} + +class MySqlAlterTableCreateCompositePrimaryKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_composite_pk' && dialect === 'mysql'; + } + + convert(statement: JsonCreateCompositePK) { + const { name, columns } = MySqlSquasher.unsquashPK(statement.data); + return `ALTER TABLE \`${statement.tableName}\` ADD PRIMARY KEY(\`${columns.join('`,`')}\`);`; + } +} + +class MySqlAlterTableDeleteCompositePrimaryKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'delete_composite_pk' && dialect === 'mysql'; + } + + convert(statement: JsonDeleteCompositePK) { + const { name, columns } = MySqlSquasher.unsquashPK(statement.data); + return `ALTER TABLE \`${statement.tableName}\` DROP PRIMARY KEY;`; + } +} + +class MySqlAlterTableAlterCompositePrimaryKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_composite_pk' && dialect === 'mysql'; + } + + convert(statement: JsonAlterCompositePK) { + const { name, columns } = MySqlSquasher.unsquashPK(statement.old); + const { name: newName, columns: newColumns } = MySqlSquasher.unsquashPK( + statement.new, + ); + return `ALTER TABLE \`${statement.tableName}\` DROP PRIMARY KEY, ADD PRIMARY KEY(\`${newColumns.join('`,`')}\`);`; + } +} + +class SingleStoreAlterTableCreateCompositePrimaryKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_composite_pk' && dialect === 'singlestore'; + } + + convert(statement: JsonCreateCompositePK) { + const { name, columns } = SingleStoreSquasher.unsquashPK(statement.data); + return `ALTER TABLE \`${statement.tableName}\` ADD PRIMARY KEY(\`${columns.join('`,`')}\`);`; + } +} + +class SingleStoreAlterTableDeleteCompositePrimaryKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'delete_composite_pk' && dialect === 'singlestore'; + } + + convert(statement: JsonDeleteCompositePK) { + const { name, columns } = SingleStoreSquasher.unsquashPK(statement.data); + return `ALTER TABLE \`${statement.tableName}\` DROP PRIMARY KEY;`; + } +} + +class SingleStoreAlterTableAlterCompositePrimaryKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_composite_pk' && dialect === 'singlestore'; + } + + convert(statement: JsonAlterCompositePK) { + const { name, columns } = SingleStoreSquasher.unsquashPK(statement.old); + const { name: newName, columns: newColumns } = SingleStoreSquasher.unsquashPK( + statement.new, + ); + return `ALTER TABLE \`${statement.tableName}\` DROP PRIMARY KEY, ADD PRIMARY KEY(\`${newColumns.join('`,`')}\`);`; + } +} + +class SqliteAlterTableCreateCompositePrimaryKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_composite_pk' && dialect === 'sqlite'; + } + + convert(statement: JsonCreateCompositePK) { + let msg = '/*\n'; + msg += `You're trying to add PRIMARY KEY(${statement.data}) to '${statement.tableName}' table\n`; + msg += 'SQLite does not support adding primary key to an already created table\n'; + msg += 'You can do it in 3 steps with drizzle orm:\n'; + msg += ' - create new mirror table with needed pk, rename current table to old_table, generate SQL\n'; + msg += ' - migrate old data from one table to another\n'; + msg += ' - delete old_table in schema, generate sql\n\n'; + msg += 'or create manual migration like below:\n\n'; + msg += 'ALTER TABLE table_name RENAME TO old_table;\n'; + msg += 'CREATE TABLE table_name (\n'; + msg += '\tcolumn1 datatype [ NULL | NOT NULL ],\n'; + msg += '\tcolumn2 datatype [ NULL | NOT NULL ],\n'; + msg += '\t...\n'; + msg += '\tPRIMARY KEY (pk_col1, pk_col2, ... pk_col_n)\n'; + msg += ' );\n'; + msg += 'INSERT INTO table_name SELECT * FROM old_table;\n\n'; + msg += "Due to that we don't generate migration automatically and it has to be done manually\n"; + msg += '*/\n'; + return msg; + } +} +class SqliteAlterTableDeleteCompositePrimaryKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'delete_composite_pk' && dialect === 'sqlite'; + } + + convert(statement: JsonDeleteCompositePK) { + let msg = '/*\n'; + msg += `You're trying to delete PRIMARY KEY(${statement.data}) from '${statement.tableName}' table\n`; + msg += 'SQLite does not supportprimary key deletion from existing table\n'; + msg += 'You can do it in 3 steps with drizzle orm:\n'; + msg += ' - create new mirror table table without pk, rename current table to old_table, generate SQL\n'; + msg += ' - migrate old data from one table to another\n'; + msg += ' - delete old_table in schema, generate sql\n\n'; + msg += 'or create manual migration like below:\n\n'; + msg += 'ALTER TABLE table_name RENAME TO old_table;\n'; + msg += 'CREATE TABLE table_name (\n'; + msg += '\tcolumn1 datatype [ NULL | NOT NULL ],\n'; + msg += '\tcolumn2 datatype [ NULL | NOT NULL ],\n'; + msg += '\t...\n'; + msg += '\tPRIMARY KEY (pk_col1, pk_col2, ... pk_col_n)\n'; + msg += ' );\n'; + msg += 'INSERT INTO table_name SELECT * FROM old_table;\n\n'; + msg += "Due to that we don't generate migration automatically and it has to be done manually\n"; + msg += '*/\n'; + return msg; + } +} + +class SqliteAlterTableAlterCompositePrimaryKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_composite_pk' && dialect === 'sqlite'; + } + + convert(statement: JsonAlterCompositePK) { + let msg = '/*\n'; + msg += 'SQLite does not support altering primary key\n'; + msg += 'You can do it in 3 steps with drizzle orm:\n'; + msg += ' - create new mirror table with needed pk, rename current table to old_table, generate SQL\n'; + msg += ' - migrate old data from one table to another\n'; + msg += ' - delete old_table in schema, generate sql\n\n'; + msg += 'or create manual migration like below:\n\n'; + msg += 'ALTER TABLE table_name RENAME TO old_table;\n'; + msg += 'CREATE TABLE table_name (\n'; + msg += '\tcolumn1 datatype [ NULL | NOT NULL ],\n'; + msg += '\tcolumn2 datatype [ NULL | NOT NULL ],\n'; + msg += '\t...\n'; + msg += '\tPRIMARY KEY (pk_col1, pk_col2, ... pk_col_n)\n'; + msg += ' );\n'; + msg += 'INSERT INTO table_name SELECT * FROM old_table;\n\n'; + msg += "Due to that we don't generate migration automatically and it has to be done manually\n"; + msg += '*/\n'; + + return msg; + } +} + +class PgAlterTableAlterColumnSetPrimaryKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_set_pk' + && dialect === 'postgresql' + ); + } + + convert(statement: JsonAlterColumnSetPrimaryKeyStatement) { + const { tableName, columnName } = statement; + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} ADD PRIMARY KEY ("${columnName}");`; + } +} + +class PgAlterTableAlterColumnDropPrimaryKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_drop_pk' + && dialect === 'postgresql' + ); + } + + convert(statement: JsonAlterColumnDropPrimaryKeyStatement) { + const { tableName, columnName, schema } = statement; + return `/* + Unfortunately in current drizzle-kit version we can't automatically get name for primary key. + We are working on making it available! + + Meanwhile you can: + 1. Check pk name in your database, by running + SELECT constraint_name FROM information_schema.table_constraints + WHERE table_schema = '${typeof schema === 'undefined' || schema === '' ? 'public' : schema}' + AND table_name = '${tableName}' + AND constraint_type = 'PRIMARY KEY'; + 2. Uncomment code below and paste pk name manually + + Hope to release this update as soon as possible +*/ + +-- ALTER TABLE "${tableName}" DROP CONSTRAINT "";`; + } +} + +class PgAlterTableAlterColumnSetNotNullConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_set_notnull' + && dialect === 'postgresql' + ); + } + + convert(statement: JsonAlterColumnSetNotNullStatement) { + const { tableName, columnName } = statement; + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET NOT NULL;`; + } +} + +class SqliteAlterTableAlterColumnSetNotNullConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_set_notnull' + && dialect === 'sqlite' + ); + } + + convert(statement: JsonAlterColumnSetNotNullStatement) { + return ( + '/*\n SQLite does not support "Set not null to column" out of the box, we do not generate automatic migration for that, so it has to be done manually' + + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php' + + '\n https://www.sqlite.org/lang_altertable.html' + + '\n https://stackoverflow.com/questions/2083543/modify-a-columns-type-in-sqlite3' + + "\n\n Due to that we don't generate migration automatically and it has to be done manually" + + '\n*/' + ); + } +} + +class SqliteAlterTableAlterColumnSetAutoincrementConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_set_autoincrement' + && dialect === 'sqlite' + ); + } + + convert(statement: JsonAlterColumnSetAutoincrementStatement) { + return ( + '/*\n SQLite does not support "Set autoincrement to a column" out of the box, we do not generate automatic migration for that, so it has to be done manually' + + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php' + + '\n https://www.sqlite.org/lang_altertable.html' + + '\n https://stackoverflow.com/questions/2083543/modify-a-columns-type-in-sqlite3' + + "\n\n Due to that we don't generate migration automatically and it has to be done manually" + + '\n*/' + ); + } +} + +class SqliteAlterTableAlterColumnDropAutoincrementConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_drop_autoincrement' + && dialect === 'sqlite' + ); + } + + convert(statement: JsonAlterColumnDropAutoincrementStatement) { + return ( + '/*\n SQLite does not support "Drop autoincrement from a column" out of the box, we do not generate automatic migration for that, so it has to be done manually' + + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php' + + '\n https://www.sqlite.org/lang_altertable.html' + + '\n https://stackoverflow.com/questions/2083543/modify-a-columns-type-in-sqlite3' + + "\n\n Due to that we don't generate migration automatically and it has to be done manually" + + '\n*/' + ); + } +} + +class PgAlterTableAlterColumnDropNotNullConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_drop_notnull' + && dialect === 'postgresql' + ); + } + + convert(statement: JsonAlterColumnDropNotNullStatement) { + const { tableName, columnName } = statement; + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" DROP NOT NULL;`; + } +} + +class SqliteAlterTableAlterColumnDropNotNullConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_drop_notnull' + && dialect === 'sqlite' + ); + } + + convert(statement: JsonAlterColumnDropNotNullStatement) { + return ( + '/*\n SQLite does not support "Drop not null from column" out of the box, we do not generate automatic migration for that, so it has to be done manually' + + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php' + + '\n https://www.sqlite.org/lang_altertable.html' + + '\n https://stackoverflow.com/questions/2083543/modify-a-columns-type-in-sqlite3' + + "\n\n Due to that we don't generate migration automatically and it has to be done manually" + + '\n*/' + ); + } +} + +// FK +class PgCreateForeignKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_reference' && dialect === 'postgresql'; + } + + convert(statement: JsonCreateReferenceStatement): string { + const { + name, + tableFrom, + tableTo, + columnsFrom, + columnsTo, + onDelete, + onUpdate, + schemaTo, + } = PgSquasher.unsquashFK(statement.data); + const onDeleteStatement = onDelete ? ` ON DELETE ${onDelete}` : ''; + const onUpdateStatement = onUpdate ? ` ON UPDATE ${onUpdate}` : ''; + const fromColumnsString = columnsFrom.map((it) => `"${it}"`).join(','); + const toColumnsString = columnsTo.map((it) => `"${it}"`).join(','); + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${tableFrom}"` + : `"${tableFrom}"`; + + const tableToNameWithSchema = schemaTo + ? `"${schemaTo}"."${tableTo}"` + : `"${tableTo}"`; + + const alterStatement = + `ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT "${name}" FOREIGN KEY (${fromColumnsString}) REFERENCES ${tableToNameWithSchema}(${toColumnsString})${onDeleteStatement}${onUpdateStatement}`; + + let sql = 'DO $$ BEGIN\n'; + sql += ' ' + alterStatement + ';\n'; + sql += 'EXCEPTION\n'; + sql += ' WHEN duplicate_object THEN null;\n'; + sql += 'END $$;\n'; + return sql; + } +} + +class SqliteCreateForeignKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_reference' && dialect === 'sqlite'; + } + + convert(statement: JsonCreateReferenceStatement): string { + return ( + '/*\n SQLite does not support "Creating foreign key on existing column" out of the box, we do not generate automatic migration for that, so it has to be done manually' + + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php' + + '\n https://www.sqlite.org/lang_altertable.html' + + "\n\n Due to that we don't generate migration automatically and it has to be done manually" + + '\n*/' + ); + } +} + +class MySqlCreateForeignKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_reference' && dialect === 'mysql'; + } + + convert(statement: JsonCreateReferenceStatement): string { + const { + name, + tableFrom, + tableTo, + columnsFrom, + columnsTo, + onDelete, + onUpdate, + } = MySqlSquasher.unsquashFK(statement.data); + const onDeleteStatement = onDelete ? ` ON DELETE ${onDelete}` : ''; + const onUpdateStatement = onUpdate ? ` ON UPDATE ${onUpdate}` : ''; + const fromColumnsString = columnsFrom.map((it) => `\`${it}\``).join(','); + const toColumnsString = columnsTo.map((it) => `\`${it}\``).join(','); + + return `ALTER TABLE \`${tableFrom}\` ADD CONSTRAINT \`${name}\` FOREIGN KEY (${fromColumnsString}) REFERENCES \`${tableTo}\`(${toColumnsString})${onDeleteStatement}${onUpdateStatement};`; + } +} + +class PgAlterForeignKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_reference' && dialect === 'postgresql'; + } + + convert(statement: JsonAlterReferenceStatement): string { + const newFk = PgSquasher.unsquashFK(statement.data); + const oldFk = PgSquasher.unsquashFK(statement.oldFkey); + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${oldFk.tableFrom}"` + : `"${oldFk.tableFrom}"`; + + let sql = `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${oldFk.name}";\n`; + + const onDeleteStatement = newFk.onDelete + ? ` ON DELETE ${newFk.onDelete}` + : ''; + const onUpdateStatement = newFk.onUpdate + ? ` ON UPDATE ${newFk.onUpdate}` + : ''; + + const fromColumnsString = newFk.columnsFrom + .map((it) => `"${it}"`) + .join(','); + const toColumnsString = newFk.columnsTo.map((it) => `"${it}"`).join(','); + + const tableFromNameWithSchema = oldFk.schemaTo + ? `"${oldFk.schemaTo}"."${oldFk.tableFrom}"` + : `"${oldFk.tableFrom}"`; + + const tableToNameWithSchema = newFk.schemaTo + ? `"${newFk.schemaTo}"."${newFk.tableFrom}"` + : `"${newFk.tableFrom}"`; + + const alterStatement = + `ALTER TABLE ${tableFromNameWithSchema} ADD CONSTRAINT "${newFk.name}" FOREIGN KEY (${fromColumnsString}) REFERENCES ${tableToNameWithSchema}(${toColumnsString})${onDeleteStatement}${onUpdateStatement}`; + + sql += 'DO $$ BEGIN\n'; + sql += ' ' + alterStatement + ';\n'; + sql += 'EXCEPTION\n'; + sql += ' WHEN duplicate_object THEN null;\n'; + sql += 'END $$;\n'; + return sql; + } +} + +class SqliteAlterForeignKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_reference' && dialect === 'sqlite'; + } + + convert(statement: JsonAlterReferenceStatement): string { + return ( + '/*\n SQLite does not support "Changing existing foreign key" out of the box, we do not generate automatic migration for that, so it has to be done manually' + + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php' + + '\n https://www.sqlite.org/lang_altertable.html' + + "\n\n Due to that we don't generate migration automatically and it has to be done manually" + + '\n*/' + ); + } +} + +class PgDeleteForeignKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'delete_reference' && dialect === 'postgresql'; + } + + convert(statement: JsonDeleteReferenceStatement): string { + const tableFrom = statement.tableName; // delete fk from renamed table case + const { name } = PgSquasher.unsquashFK(statement.data); + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${tableFrom}"` + : `"${tableFrom}"`; + + return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${name}";\n`; + } +} + +class SqliteDeleteForeignKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'delete_reference' && dialect === 'sqlite'; + } + + convert(statement: JsonDeleteReferenceStatement): string { + return ( + '/*\n SQLite does not support "Dropping foreign key" out of the box, we do not generate automatic migration for that, so it has to be done manually' + + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php' + + '\n https://www.sqlite.org/lang_altertable.html' + + "\n\n Due to that we don't generate migration automatically and it has to be done manually" + + '\n*/' + ); + } +} + +class MySqlDeleteForeignKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'delete_reference' && dialect === 'mysql'; + } + + convert(statement: JsonDeleteReferenceStatement): string { + const tableFrom = statement.tableName; // delete fk from renamed table case + const { name } = MySqlSquasher.unsquashFK(statement.data); + return `ALTER TABLE \`${tableFrom}\` DROP FOREIGN KEY \`${name}\`;\n`; + } +} + +class CreatePgIndexConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_index_pg' && dialect === 'postgresql'; + } + + convert(statement: JsonPgCreateIndexStatement): string { + const { + name, + columns, + isUnique, + concurrently, + with: withMap, + method, + where, + } = statement.data; + // // since postgresql 9.5 + const indexPart = isUnique ? 'UNIQUE INDEX' : 'INDEX'; + const value = columns + .map( + (it) => + `${it.isExpression ? it.expression : `"${it.expression}"`}${ + it.opclass ? ` ${it.opclass}` : it.asc ? '' : ' DESC' + }${ + (it.asc && it.nulls && it.nulls === 'last') || it.opclass + ? '' + : ` NULLS ${it.nulls!.toUpperCase()}` + }`, + ) + .join(','); + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + function reverseLogic(mappedWith: Record): string { + let reversedString = ''; + for (const key in mappedWith) { + if (mappedWith.hasOwnProperty(key)) { + reversedString += `${key}=${mappedWith[key]},`; + } + } + reversedString = reversedString.slice(0, -1); + return reversedString; + } + + return `CREATE ${indexPart}${ + concurrently ? ' CONCURRENTLY' : '' + } IF NOT EXISTS "${name}" ON ${tableNameWithSchema} USING ${method} (${value})${ + Object.keys(withMap!).length !== 0 + ? ` WITH (${reverseLogic(withMap!)})` + : '' + }${where ? ` WHERE ${where}` : ''};`; + } +} + +class CreateMySqlIndexConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_index' && dialect === 'mysql'; + } + + convert(statement: JsonCreateIndexStatement): string { + // should be changed + const { name, columns, isUnique } = MySqlSquasher.unsquashIdx( + statement.data, + ); + const indexPart = isUnique ? 'UNIQUE INDEX' : 'INDEX'; + + const uniqueString = columns + .map((it) => { + return statement.internal?.indexes + ? statement.internal?.indexes[name]?.columns[it]?.isExpression + ? it + : `\`${it}\`` + : `\`${it}\``; + }) + .join(','); + + return `CREATE ${indexPart} \`${name}\` ON \`${statement.tableName}\` (${uniqueString});`; + } +} + +class CreateSingleStoreIndexConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_index' && dialect === 'singlestore'; + } + + convert(statement: JsonCreateIndexStatement): string { + // should be changed + const { name, columns, isUnique } = SingleStoreSquasher.unsquashIdx( + statement.data, + ); + const indexPart = isUnique ? 'UNIQUE INDEX' : 'INDEX'; + + const uniqueString = columns + .map((it) => { + return statement.internal?.indexes + ? statement.internal?.indexes[name]?.columns[it]?.isExpression + ? it + : `\`${it}\`` + : `\`${it}\``; + }) + .join(','); + + return `CREATE ${indexPart} \`${name}\` ON \`${statement.tableName}\` (${uniqueString});`; + } +} + +export class CreateSqliteIndexConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_index' && dialect === 'sqlite'; + } + + convert(statement: JsonCreateIndexStatement): string { + // should be changed + const { name, columns, isUnique, where } = SQLiteSquasher.unsquashIdx( + statement.data, + ); + // // since postgresql 9.5 + const indexPart = isUnique ? 'UNIQUE INDEX' : 'INDEX'; + const whereStatement = where ? ` WHERE ${where}` : ''; + const uniqueString = columns + .map((it) => { + return statement.internal?.indexes + ? statement.internal?.indexes[name]?.columns[it]?.isExpression + ? it + : `\`${it}\`` + : `\`${it}\``; + }) + .join(','); + return `CREATE ${indexPart} \`${name}\` ON \`${statement.tableName}\` (${uniqueString})${whereStatement};`; + } +} + +class PgDropIndexConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'drop_index' && dialect === 'postgresql'; + } + + convert(statement: JsonDropIndexStatement): string { + const { name } = PgSquasher.unsquashIdx(statement.data); + return `DROP INDEX IF EXISTS "${name}";`; + } +} + +class PgCreateSchemaConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_schema' && dialect === 'postgresql'; + } + + convert(statement: JsonCreateSchema) { + const { name } = statement; + return `CREATE SCHEMA "${name}";\n`; + } +} + +class PgRenameSchemaConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'rename_schema' && dialect === 'postgresql'; + } + + convert(statement: JsonRenameSchema) { + const { from, to } = statement; + return `ALTER SCHEMA "${from}" RENAME TO "${to}";\n`; + } +} + +class PgDropSchemaConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'drop_schema' && dialect === 'postgresql'; + } + + convert(statement: JsonCreateSchema) { + const { name } = statement; + return `DROP SCHEMA "${name}";\n`; + } +} + +class PgAlterTableSetSchemaConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_set_schema' && dialect === 'postgresql' + ); + } + + convert(statement: JsonAlterTableSetSchema) { + const { tableName, schemaFrom, schemaTo } = statement; + + return `ALTER TABLE "${schemaFrom}"."${tableName}" SET SCHEMA "${schemaTo}";\n`; + } +} + +class PgAlterTableSetNewSchemaConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_set_new_schema' + && dialect === 'postgresql' + ); + } + + convert(statement: JsonAlterTableSetNewSchema) { + const { tableName, to, from } = statement; + + const tableNameWithSchema = from + ? `"${from}"."${tableName}"` + : `"${tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} SET SCHEMA "${to}";\n`; + } +} + +class PgAlterTableRemoveFromSchemaConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_remove_from_schema' + && dialect === 'postgresql' + ); + } + + convert(statement: JsonAlterTableRemoveFromSchema) { + const { tableName, schema } = statement; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} SET SCHEMA public;\n`; + } +} + +export class SqliteDropIndexConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'drop_index' && dialect === 'sqlite'; + } + + convert(statement: JsonDropIndexStatement): string { + const { name } = PgSquasher.unsquashIdx(statement.data); + return `DROP INDEX IF EXISTS \`${name}\`;`; + } +} + +class MySqlDropIndexConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'drop_index' && dialect === 'mysql'; + } + + convert(statement: JsonDropIndexStatement): string { + const { name } = MySqlSquasher.unsquashIdx(statement.data); + return `DROP INDEX \`${name}\` ON \`${statement.tableName}\`;`; + } +} + +class SingleStoreDropIndexConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'drop_index' && dialect === 'singlestore'; + } + + convert(statement: JsonDropIndexStatement): string { + const { name } = SingleStoreSquasher.unsquashIdx(statement.data); + return `DROP INDEX \`${name}\` ON \`${statement.tableName}\`;`; + } +} + +const convertors: Convertor[] = []; +convertors.push(new PgCreateTableConvertor()); +convertors.push(new MySqlCreateTableConvertor()); +convertors.push(new SingleStoreCreateTableConvertor()); +convertors.push(new SQLiteCreateTableConvertor()); + +convertors.push(new CreateTypeEnumConvertor()); + +convertors.push(new CreatePgSequenceConvertor()); +convertors.push(new DropPgSequenceConvertor()); +convertors.push(new RenamePgSequenceConvertor()); +convertors.push(new MovePgSequenceConvertor()); +convertors.push(new AlterPgSequenceConvertor()); + +convertors.push(new PgDropTableConvertor()); +convertors.push(new MySQLDropTableConvertor()); +convertors.push(new SingleStoreDropTableConvertor()); +convertors.push(new SQLiteDropTableConvertor()); + +convertors.push(new PgRenameTableConvertor()); +convertors.push(new MySqlRenameTableConvertor()); +convertors.push(new SingleStoreRenameTableConvertor()); +convertors.push(new SqliteRenameTableConvertor()); + +convertors.push(new PgAlterTableRenameColumnConvertor()); +convertors.push(new MySqlAlterTableRenameColumnConvertor()); +convertors.push(new SingleStoreAlterTableRenameColumnConvertor()); +convertors.push(new SQLiteAlterTableRenameColumnConvertor()); + +convertors.push(new PgAlterTableDropColumnConvertor()); +convertors.push(new MySqlAlterTableDropColumnConvertor()); +convertors.push(new SingleStoreAlterTableDropColumnConvertor()); +convertors.push(new SQLiteAlterTableDropColumnConvertor()); + +convertors.push(new PgAlterTableAddColumnConvertor()); +convertors.push(new MySqlAlterTableAddColumnConvertor()); +convertors.push(new SingleStoreAlterTableAddColumnConvertor()); +convertors.push(new SQLiteAlterTableAddColumnConvertor()); + +convertors.push(new PgAlterTableAlterColumnSetTypeConvertor()); + +convertors.push(new PgAlterTableAddUniqueConstraintConvertor()); +convertors.push(new PgAlterTableDropUniqueConstraintConvertor()); + +convertors.push(new MySQLAlterTableAddUniqueConstraintConvertor()); +convertors.push(new MySQLAlterTableDropUniqueConstraintConvertor()); + +convertors.push(new SingleStoreAlterTableAddUniqueConstraintConvertor()); +convertors.push(new SingleStoreAlterTableDropUniqueConstraintConvertor()); + +convertors.push(new CreatePgIndexConvertor()); +convertors.push(new CreateMySqlIndexConvertor()); +convertors.push(new CreateSingleStoreIndexConvertor()); +convertors.push(new CreateSqliteIndexConvertor()); + +convertors.push(new PgDropIndexConvertor()); +convertors.push(new SqliteDropIndexConvertor()); +convertors.push(new MySqlDropIndexConvertor()); +convertors.push(new SingleStoreDropIndexConvertor()); + +convertors.push(new AlterTypeAddValueConvertor()); + +convertors.push(new PgAlterTableAlterColumnSetPrimaryKeyConvertor()); +convertors.push(new PgAlterTableAlterColumnDropPrimaryKeyConvertor()); +convertors.push(new PgAlterTableAlterColumnSetNotNullConvertor()); +convertors.push(new PgAlterTableAlterColumnDropNotNullConvertor()); +convertors.push(new PgAlterTableAlterColumnSetDefaultConvertor()); +convertors.push(new PgAlterTableAlterColumnDropDefaultConvertor()); + +/// generated +convertors.push(new PgAlterTableAlterColumnSetExpressionConvertor()); +convertors.push(new PgAlterTableAlterColumnDropGeneratedConvertor()); +convertors.push(new PgAlterTableAlterColumnAlterrGeneratedConvertor()); + +convertors.push(new MySqlAlterTableAlterColumnAlterrGeneratedConvertor()); + +convertors.push(new SingleStoreAlterTableAlterColumnAlterrGeneratedConvertor()); + +convertors.push(new SqliteAlterTableAlterColumnDropGeneratedConvertor()); +convertors.push(new SqliteAlterTableAlterColumnAlterGeneratedConvertor()); +convertors.push(new SqliteAlterTableAlterColumnSetExpressionConvertor()); + +convertors.push(new MySqlModifyColumn()); +// convertors.push(new MySqlAlterTableAlterColumnSetDefaultConvertor()); +// convertors.push(new MySqlAlterTableAlterColumnDropDefaultConvertor()); + +convertors.push(new SingleStoreModifyColumn()); + +convertors.push(new PgCreateForeignKeyConvertor()); +convertors.push(new MySqlCreateForeignKeyConvertor()); + +convertors.push(new PgAlterForeignKeyConvertor()); + +convertors.push(new PgDeleteForeignKeyConvertor()); +convertors.push(new MySqlDeleteForeignKeyConvertor()); + +convertors.push(new PgCreateSchemaConvertor()); +convertors.push(new PgRenameSchemaConvertor()); +convertors.push(new PgDropSchemaConvertor()); +convertors.push(new PgAlterTableSetSchemaConvertor()); +convertors.push(new PgAlterTableSetNewSchemaConvertor()); +convertors.push(new PgAlterTableRemoveFromSchemaConvertor()); + +// Unhandled sqlite queries, so they will appear last +convertors.push(new SQLiteAlterTableAlterColumnSetTypeConvertor()); +convertors.push(new SqliteAlterForeignKeyConvertor()); +convertors.push(new SqliteDeleteForeignKeyConvertor()); +convertors.push(new SqliteCreateForeignKeyConvertor()); + +convertors.push(new SQLiteAlterTableAddUniqueConstraintConvertor()); +convertors.push(new SQLiteAlterTableDropUniqueConstraintConvertor()); + +convertors.push(new PgAlterTableAlterColumnDropGenerated()); +convertors.push(new PgAlterTableAlterColumnSetGenerated()); +convertors.push(new PgAlterTableAlterColumnAlterGenerated()); + +convertors.push(new SqliteAlterTableAlterColumnSetNotNullConvertor()); +convertors.push(new SqliteAlterTableAlterColumnDropNotNullConvertor()); +convertors.push(new SqliteAlterTableAlterColumnSetDefaultConvertor()); +convertors.push(new SqliteAlterTableAlterColumnDropDefaultConvertor()); + +convertors.push(new SqliteAlterTableAlterColumnSetAutoincrementConvertor()); +convertors.push(new SqliteAlterTableAlterColumnDropAutoincrementConvertor()); + +convertors.push(new SqliteAlterTableCreateCompositePrimaryKeyConvertor()); +convertors.push(new SqliteAlterTableDeleteCompositePrimaryKeyConvertor()); +convertors.push(new SqliteAlterTableAlterCompositePrimaryKeyConvertor()); + +convertors.push(new PgAlterTableCreateCompositePrimaryKeyConvertor()); +convertors.push(new PgAlterTableDeleteCompositePrimaryKeyConvertor()); +convertors.push(new PgAlterTableAlterCompositePrimaryKeyConvertor()); + +convertors.push(new MySqlAlterTableDeleteCompositePrimaryKeyConvertor()); +convertors.push(new MySqlAlterTableDropPk()); +convertors.push(new MySqlAlterTableCreateCompositePrimaryKeyConvertor()); +convertors.push(new MySqlAlterTableAddPk()); +convertors.push(new MySqlAlterTableAlterCompositePrimaryKeyConvertor()); + +convertors.push(new SingleStoreAlterTableDeleteCompositePrimaryKeyConvertor()); +convertors.push(new SingleStoreAlterTableDropPk()); +convertors.push(new SingleStoreAlterTableCreateCompositePrimaryKeyConvertor()); +convertors.push(new SingleStoreAlterTableAddPk()); +convertors.push(new SingleStoreAlterTableAlterCompositePrimaryKeyConvertor()); + +export const fromJson = (statements: JsonStatement[], dialect: Dialect) => { + const result = statements + .flatMap((statement) => { + const filtered = convertors.filter((it) => { + // console.log(statement, dialect) + return it.can(statement, dialect); + }); + + const convertor = filtered.length === 1 ? filtered[0] : undefined; + + if (!convertor) { + // console.log("no convertor:", statement.type, dialect); + return ''; + } + + return convertor.convert(statement); + }) + .filter((it) => it !== ''); + return result; +}; + +// blog.yo1.dog/updating-enum-values-in-postgresql-the-safe-and-easy-way/ +// test case for enum altering +https: ` +create table users ( + id int, + name character varying(128) +); + +create type venum as enum('one', 'two', 'three'); +alter table users add column typed venum; + +insert into users(id, name, typed) values (1, 'name1', 'one'); +insert into users(id, name, typed) values (2, 'name2', 'two'); +insert into users(id, name, typed) values (3, 'name3', 'three'); + +alter type venum rename to __venum; +create type venum as enum ('one', 'two', 'three', 'four', 'five'); + +ALTER TABLE users ALTER COLUMN typed TYPE venum USING typed::text::venum; + +insert into users(id, name, typed) values (4, 'name4', 'four'); +insert into users(id, name, typed) values (5, 'name5', 'five'); + +drop type __venum; +`; diff --git a/drizzle-kit/src/utils.ts b/drizzle-kit/src/utils.ts new file mode 100644 index 000000000..b14bad5b2 --- /dev/null +++ b/drizzle-kit/src/utils.ts @@ -0,0 +1,346 @@ +import type { RunResult } from 'better-sqlite3'; +import chalk from 'chalk'; +import { existsSync, mkdirSync, readdirSync, readFileSync, writeFileSync } from 'fs'; +import { join } from 'path'; +import { parse } from 'url'; +import type { NamedWithSchema } from './cli/commands/migrate'; +import { info } from './cli/views'; +import { assertUnreachable, snapshotVersion } from './global'; +import type { Dialect } from './schemaValidator'; +import { backwardCompatibleMysqlSchema } from './serializer/mysqlSchema'; +import { backwardCompatiblePgSchema } from './serializer/pgSchema'; +import { backwardCompatibleSingleStoreSchema } from './serializer/singlestoreSchema'; +import { backwardCompatibleSqliteSchema } from './serializer/sqliteSchema'; +import type { ProxyParams } from './serializer/studio'; + +export type Proxy = (params: ProxyParams) => Promise; + +export type SqliteProxy = { + proxy: (params: ProxyParams) => Promise; +}; + +export type DB = { + query: (sql: string, params?: any[]) => Promise; +}; + +export type SQLiteDB = { + query: (sql: string, params?: any[]) => Promise; + run(query: string): Promise; + batch?( + queries: { query: string; values?: any[] | undefined }[], + ): Promise; +}; + +export const copy = (it: T): T => { + return JSON.parse(JSON.stringify(it)); +}; + +export const objectValues = (obj: T): Array => { + return Object.values(obj); +}; + +export const assertV1OutFolder = (out: string) => { + if (!existsSync(out)) return; + + const oldMigrationFolders = readdirSync(out).filter( + (it) => it.length === 14 && /^\d+$/.test(it), + ); + + if (oldMigrationFolders.length > 0) { + console.log( + `Your migrations folder format is outdated, please run ${ + chalk.green.bold( + `drizzle-kit up`, + ) + }`, + ); + process.exit(1); + } +}; + +export type Journal = { + version: string; + dialect: Dialect; + entries: { + idx: number; + version: string; + when: number; + tag: string; + breakpoints: boolean; + }[]; +}; + +export const dryJournal = (dialect: Dialect): Journal => { + return { + version: snapshotVersion, + dialect, + entries: [], + }; +}; + +// export const preparePushFolder = (dialect: Dialect) => { +// const out = ".drizzle"; +// let snapshot: string = ""; +// if (!existsSync(join(out))) { +// mkdirSync(out); +// snapshot = JSON.stringify(dryJournal(dialect)); +// } else { +// snapshot = readdirSync(out)[0]; +// } + +// return { snapshot }; +// }; + +export const prepareOutFolder = (out: string, dialect: Dialect) => { + const meta = join(out, 'meta'); + const journalPath = join(meta, '_journal.json'); + + if (!existsSync(join(out, 'meta'))) { + mkdirSync(meta, { recursive: true }); + writeFileSync(journalPath, JSON.stringify(dryJournal(dialect))); + } + + const journal = JSON.parse(readFileSync(journalPath).toString()); + + const snapshots = readdirSync(meta) + .filter((it) => !it.startsWith('_')) + .map((it) => join(meta, it)); + + snapshots.sort(); + return { meta, snapshots, journal }; +}; + +const validatorForDialect = (dialect: Dialect) => { + switch (dialect) { + case 'postgresql': + return { validator: backwardCompatiblePgSchema, version: 7 }; + case 'sqlite': + return { validator: backwardCompatibleSqliteSchema, version: 6 }; + case 'mysql': + return { validator: backwardCompatibleMysqlSchema, version: 5 }; + case 'singlestore': + return { validator: backwardCompatibleSingleStoreSchema, version: 1 }; + } +}; + +export const validateWithReport = (snapshots: string[], dialect: Dialect) => { + // ✅ check if drizzle-kit can handle snapshot version + // ✅ check if snapshot is of the last version + // ✅ check if id of the snapshot is valid + // ✅ collect {} of prev id -> snapshotName[], if there's more than one - tell about collision + const { validator, version } = validatorForDialect(dialect); + + const result = snapshots.reduce( + (accum, it) => { + const raw = JSON.parse(readFileSync(`./${it}`).toString()); + + accum.rawMap[it] = raw; + + if (raw['version'] && Number(raw['version']) > version) { + console.log( + info( + `${it} snapshot is of unsupported version, please update drizzle-kit`, + ), + ); + process.exit(0); + } + + const result = validator.safeParse(raw); + if (!result.success) { + accum.malformed.push(it); + return accum; + } + + const snapshot = result.data; + if (snapshot.version !== String(version)) { + accum.nonLatest.push(it); + return accum; + } + + // only if latest version here + const idEntry = accum.idsMap[snapshot['prevId']] ?? { + parent: it, + snapshots: [], + }; + idEntry.snapshots.push(it); + accum.idsMap[snapshot['prevId']] = idEntry; + + return accum; + }, + { + malformed: [], + nonLatest: [], + idToNameMap: {}, + idsMap: {}, + rawMap: {}, + } as { + malformed: string[]; + nonLatest: string[]; + idsMap: Record; + rawMap: Record; + }, + ); + + return result; +}; + +export const prepareMigrationFolder = ( + outFolder: string = 'drizzle', + dialect: Dialect, +) => { + const { snapshots, journal } = prepareOutFolder(outFolder, dialect); + const report = validateWithReport(snapshots, dialect); + if (report.nonLatest.length > 0) { + console.log( + report.nonLatest + .map((it) => { + return `${it}/snapshot.json is not of the latest version`; + }) + .concat(`Run ${chalk.green.bold(`drizzle-kit up`)}`) + .join('\n'), + ); + process.exit(0); + } + + if (report.malformed.length) { + const message = report.malformed + .map((it) => { + return `${it} data is malformed`; + }) + .join('\n'); + console.log(message); + } + + const collisionEntries = Object.entries(report.idsMap).filter( + (it) => it[1].snapshots.length > 1, + ); + + const message = collisionEntries + .map((it) => { + const data = it[1]; + return `[${ + data.snapshots.join( + ', ', + ) + }] are pointing to a parent snapshot: ${data.parent}/snapshot.json which is a collision.`; + }) + .join('\n') + .trim(); + if (message) { + console.log(chalk.red.bold('Error:'), message); + } + + const abort = report.malformed.length!! || collisionEntries.length > 0; + + if (abort) { + process.exit(0); + } + + return { snapshots, journal }; +}; + +export const prepareMigrationMeta = ( + schemas: { from: string; to: string }[], + tables: { from: NamedWithSchema; to: NamedWithSchema }[], + columns: { + from: { table: string; schema: string; column: string }; + to: { table: string; schema: string; column: string }; + }[], +) => { + const _meta = { + schemas: {} as Record, + tables: {} as Record, + columns: {} as Record, + }; + + schemas.forEach((it) => { + const from = schemaRenameKey(it.from); + const to = schemaRenameKey(it.to); + _meta.schemas[from] = to; + }); + tables.forEach((it) => { + const from = tableRenameKey(it.from); + const to = tableRenameKey(it.to); + _meta.tables[from] = to; + }); + + columns.forEach((it) => { + const from = columnRenameKey(it.from.table, it.from.schema, it.from.column); + const to = columnRenameKey(it.to.table, it.to.schema, it.to.column); + _meta.columns[from] = to; + }); + + return _meta; +}; + +export const schemaRenameKey = (it: string) => { + return it; +}; + +export const tableRenameKey = (it: NamedWithSchema) => { + const out = it.schema ? `"${it.schema}"."${it.name}"` : `"${it.name}"`; + return out; +}; + +export const columnRenameKey = ( + table: string, + schema: string, + column: string, +) => { + const out = schema + ? `"${schema}"."${table}"."${column}"` + : `"${table}"."${column}"`; + return out; +}; + +export const kloudMeta = () => { + return { + pg: [5], + mysql: [] as number[], + sqlite: [] as number[], + }; +}; + +export const normaliseSQLiteUrl = ( + it: string, + type: 'libsql' | 'better-sqlite', +) => { + if (type === 'libsql') { + if (it.startsWith('file:')) { + return it; + } + try { + const url = parse(it); + if (url.protocol === null) { + return `file:${it}`; + } + return it; + } catch (e) { + return `file:${it}`; + } + } + + if (type === 'better-sqlite') { + if (it.startsWith('file:')) { + return it.substring(5); + } + + return it; + } + + assertUnreachable(type); +}; + +export const normalisePGliteUrl = ( + it: string, +) => { + if (it.startsWith('file:')) { + return it.substring(5); + } + + return it; +}; + +export function isPgArrayType(sqlType: string) { + return sqlType.match(/.*\[\d*\].*|.*\[\].*/g) !== null; +} diff --git a/drizzle-kit/src/utils/certs.ts b/drizzle-kit/src/utils/certs.ts new file mode 100644 index 000000000..b9a6d4de8 --- /dev/null +++ b/drizzle-kit/src/utils/certs.ts @@ -0,0 +1,37 @@ +import envPaths from 'env-paths'; +import { mkdirSync } from 'fs'; +import { access, readFile } from 'fs/promises'; +import { join } from 'path'; +import { $ } from 'zx'; + +const p = envPaths('drizzle-studio', { + suffix: '', +}); + +$.verbose = false; +$.cwd = p.data; +mkdirSync(p.data, { recursive: true }); + +export const certs = async () => { + const res = await $`mkcert --help`.nothrow(); + + // ~/.local/share/drizzle-studio + const keyPath = join(p.data, 'localhost-key.pem'); + const certPath = join(p.data, 'localhost.pem'); + + if (res.exitCode === 0) { + try { + await Promise.all([access(keyPath), access(certPath)]); + } catch (e) { + await $`mkcert localhost`.nothrow(); + } + const [key, cert] = await Promise.all([ + readFile(keyPath, { encoding: 'utf-8' }), + readFile(certPath, { encoding: 'utf-8' }), + ]); + return key && cert ? { key, cert } : null; + } + return null; +}; + +certs(); diff --git a/drizzle-kit/src/utils/words.ts b/drizzle-kit/src/utils/words.ts new file mode 100644 index 000000000..b0c686659 --- /dev/null +++ b/drizzle-kit/src/utils/words.ts @@ -0,0 +1,1333 @@ +import type { Prefix } from '../cli/validations/common'; + +export const prepareMigrationMetadata = ( + idx: number, + prefixMode: Prefix, + name?: string, +) => { + const prefix = prefixMode === 'index' + ? idx.toFixed(0).padStart(4, '0') + : prefixMode === 'timestamp' || prefixMode === 'supabase' + ? new Date() + .toISOString() + .replace('T', '') + .replaceAll('-', '') + .replaceAll(':', '') + .slice(0, 14) + : prefixMode === 'unix' + ? Math.floor(Date.now() / 1000) + : ''; + + const suffix = name || `${adjectives.random()}_${heroes.random()}`; + const tag = `${prefix}_${suffix}`; + return { prefix, suffix, tag }; +}; + +export const adjectives = [ + 'abandoned', + 'aberrant', + 'abnormal', + 'absent', + 'absurd', + 'acoustic', + 'adorable', + 'amazing', + 'ambiguous', + 'ambitious', + 'amused', + 'amusing', + 'ancient', + 'aromatic', + 'aspiring', + 'awesome', + 'bent', + 'big', + 'bitter', + 'bizarre', + 'black', + 'blue', + 'blushing', + 'bored', + 'boring', + 'bouncy', + 'brainy', + 'brave', + 'breezy', + 'brief', + 'bright', + 'broad', + 'broken', + 'brown', + 'bumpy', + 'burly', + 'busy', + 'calm', + 'careful', + 'careless', + 'certain', + 'charming', + 'cheerful', + 'chemical', + 'chief', + 'chilly', + 'chubby', + 'chunky', + 'clammy', + 'classy', + 'clean', + 'clear', + 'clever', + 'cloudy', + 'closed', + 'clumsy', + 'cold', + 'colorful', + 'colossal', + 'common', + 'complete', + 'complex', + 'concerned', + 'condemned', + 'confused', + 'conscious', + 'cooing', + 'cool', + 'crazy', + 'cuddly', + 'cultured', + 'curious', + 'curly', + 'curved', + 'curvy', + 'cute', + 'cynical', + 'daffy', + 'daily', + 'damp', + 'dapper', + 'dark', + 'dashing', + 'dazzling', + 'dear', + 'deep', + 'demonic', + 'dizzy', + 'dry', + 'dusty', + 'eager', + 'early', + 'easy', + 'elite', + 'eminent', + 'empty', + 'equal', + 'even', + 'exotic', + 'fair', + 'faithful', + 'familiar', + 'famous', + 'fancy', + 'fantastic', + 'far', + 'fast', + 'fat', + 'faulty', + 'fearless', + 'fine', + 'first', + 'fixed', + 'flaky', + 'flashy', + 'flat', + 'flawless', + 'flimsy', + 'flippant', + 'flowery', + 'fluffy', + 'foamy', + 'free', + 'freezing', + 'fresh', + 'friendly', + 'funny', + 'furry', + 'futuristic', + 'fuzzy', + 'giant', + 'gifted', + 'gigantic', + 'glamorous', + 'glorious', + 'glossy', + 'good', + 'goofy', + 'gorgeous', + 'graceful', + 'gray', + 'great', + 'greedy', + 'green', + 'grey', + 'groovy', + 'handy', + 'happy', + 'hard', + 'harsh', + 'heavy', + 'hesitant', + 'high', + 'hot', + 'huge', + 'icy', + 'illegal', + 'jazzy', + 'jittery', + 'keen', + 'kind', + 'known', + 'lame', + 'large', + 'last', + 'late', + 'lazy', + 'lean', + 'left', + 'legal', + 'lethal', + 'light', + 'little', + 'lively', + 'living', + 'lonely', + 'long', + 'loose', + 'loud', + 'lovely', + 'loving', + 'low', + 'lowly', + 'lucky', + 'lumpy', + 'lush', + 'luxuriant', + 'lying', + 'lyrical', + 'magenta', + 'magical', + 'majestic', + 'many', + 'massive', + 'married', + 'marvelous', + 'material', + 'mature', + 'mean', + 'medical', + 'melodic', + 'melted', + 'messy', + 'mighty', + 'military', + 'milky', + 'minor', + 'misty', + 'mixed', + 'moaning', + 'modern', + 'motionless', + 'mushy', + 'mute', + 'mysterious', + 'naive', + 'nappy', + 'narrow', + 'nasty', + 'natural', + 'neat', + 'nebulous', + 'needy', + 'nervous', + 'new', + 'next', + 'nice', + 'nifty', + 'noisy', + 'normal', + 'nostalgic', + 'nosy', + 'numerous', + 'odd', + 'old', + 'omniscient', + 'open', + 'opposite', + 'optimal', + 'orange', + 'ordinary', + 'organic', + 'outgoing', + 'outstanding', + 'oval', + 'overconfident', + 'overjoyed', + 'overrated', + 'pale', + 'panoramic', + 'parallel', + 'parched', + 'past', + 'peaceful', + 'perfect', + 'perpetual', + 'petite', + 'pink', + 'plain', + 'polite', + 'powerful', + 'premium', + 'pretty', + 'previous', + 'productive', + 'public', + 'purple', + 'puzzling', + 'quick', + 'quiet', + 'rainy', + 'rapid', + 'rare', + 'real', + 'red', + 'redundant', + 'reflective', + 'regular', + 'remarkable', + 'rich', + 'right', + 'robust', + 'romantic', + 'round', + 'sad', + 'safe', + 'salty', + 'same', + 'secret', + 'serious', + 'shallow', + 'sharp', + 'shiny', + 'shocking', + 'short', + 'silent', + 'silky', + 'silly', + 'simple', + 'skinny', + 'sleepy', + 'slim', + 'slimy', + 'slippery', + 'sloppy', + 'slow', + 'small', + 'smart', + 'smiling', + 'smooth', + 'soft', + 'solid', + 'sour', + 'sparkling', + 'special', + 'spicy', + 'spooky', + 'spotty', + 'square', + 'stale', + 'steady', + 'steep', + 'sticky', + 'stiff', + 'stormy', + 'strange', + 'striped', + 'strong', + 'sturdy', + 'sudden', + 'superb', + 'supreme', + 'sweet', + 'swift', + 'talented', + 'tan', + 'tearful', + 'tense', + 'thankful', + 'thick', + 'thin', + 'third', + 'tidy', + 'tiny', + 'tired', + 'tiresome', + 'tough', + 'tranquil', + 'tricky', + 'true', + 'typical', + 'uneven', + 'unique', + 'unknown', + 'unusual', + 'useful', + 'vengeful', + 'violet', + 'volatile', + 'wakeful', + 'wandering', + 'warm', + 'watery', + 'wealthy', + 'wet', + 'white', + 'whole', + 'wide', + 'wild', + 'windy', + 'wise', + 'wonderful', + 'wooden', + 'woozy', + 'workable', + 'worried', + 'worthless', + 'yellow', + 'yielding', + 'young', + 'youthful', + 'yummy', + 'zippy', +]; + +export const heroes = [ + 'aaron_stack', + 'abomination', + 'absorbing_man', + 'adam_destine', + 'adam_warlock', + 'agent_brand', + 'agent_zero', + 'albert_cleary', + 'alex_power', + 'alex_wilder', + 'alice', + 'amazoness', + 'amphibian', + 'angel', + 'anita_blake', + 'annihilus', + 'anthem', + 'apocalypse', + 'aqueduct', + 'arachne', + 'archangel', + 'arclight', + 'ares', + 'argent', + 'avengers', + 'azazel', + 'banshee', + 'baron_strucker', + 'baron_zemo', + 'barracuda', + 'bastion', + 'beast', + 'bedlam', + 'ben_grimm', + 'ben_parker', + 'ben_urich', + 'betty_brant', + 'betty_ross', + 'beyonder', + 'big_bertha', + 'bill_hollister', + 'bishop', + 'black_bird', + 'black_bolt', + 'black_cat', + 'black_crow', + 'black_knight', + 'black_panther', + 'black_queen', + 'black_tarantula', + 'black_tom', + 'black_widow', + 'blackheart', + 'blacklash', + 'blade', + 'blazing_skull', + 'blindfold', + 'blink', + 'blizzard', + 'blob', + 'blockbuster', + 'blonde_phantom', + 'bloodaxe', + 'bloodscream', + 'bloodstorm', + 'bloodstrike', + 'blue_blade', + 'blue_marvel', + 'blue_shield', + 'blur', + 'boom_boom', + 'boomer', + 'boomerang', + 'bromley', + 'brood', + 'brother_voodoo', + 'bruce_banner', + 'bucky', + 'bug', + 'bulldozer', + 'bullseye', + 'bushwacker', + 'butterfly', + 'cable', + 'callisto', + 'calypso', + 'cammi', + 'cannonball', + 'captain_america', + 'captain_britain', + 'captain_cross', + 'captain_flint', + 'captain_marvel', + 'captain_midlands', + 'captain_stacy', + 'captain_universe', + 'cardiac', + 'caretaker', + 'cargill', + 'carlie_cooper', + 'carmella_unuscione', + 'carnage', + 'cassandra_nova', + 'catseye', + 'celestials', + 'centennial', + 'cerebro', + 'cerise', + 'chamber', + 'chameleon', + 'champions', + 'changeling', + 'charles_xavier', + 'chat', + 'chimera', + 'christian_walker', + 'chronomancer', + 'clea', + 'clint_barton', + 'cloak', + 'cobalt_man', + 'colleen_wing', + 'colonel_america', + 'colossus', + 'corsair', + 'crusher_hogan', + 'crystal', + 'cyclops', + 'dagger', + 'daimon_hellstrom', + 'dakota_north', + 'daredevil', + 'dark_beast', + 'dark_phoenix', + 'darkhawk', + 'darkstar', + 'darwin', + 'dazzler', + 'deadpool', + 'deathbird', + 'deathstrike', + 'demogoblin', + 'devos', + 'dexter_bennett', + 'diamondback', + 'doctor_doom', + 'doctor_faustus', + 'doctor_octopus', + 'doctor_spectrum', + 'doctor_strange', + 'domino', + 'donald_blake', + 'doomsday', + 'doorman', + 'dorian_gray', + 'dormammu', + 'dracula', + 'dragon_lord', + 'dragon_man', + 'drax', + 'dreadnoughts', + 'dreaming_celestial', + 'dust', + 'earthquake', + 'echo', + 'eddie_brock', + 'edwin_jarvis', + 'ego', + 'electro', + 'elektra', + 'emma_frost', + 'enchantress', + 'ender_wiggin', + 'energizer', + 'epoch', + 'eternals', + 'eternity', + 'excalibur', + 'exiles', + 'exodus', + 'expediter', + 'ezekiel', + 'ezekiel_stane', + 'fabian_cortez', + 'falcon', + 'fallen_one', + 'famine', + 'fantastic_four', + 'fat_cobra', + 'felicia_hardy', + 'fenris', + 'firebird', + 'firebrand', + 'firedrake', + 'firelord', + 'firestar', + 'fixer', + 'flatman', + 'forge', + 'forgotten_one', + 'frank_castle', + 'franklin_richards', + 'franklin_storm', + 'freak', + 'frightful_four', + 'frog_thor', + 'gabe_jones', + 'galactus', + 'gambit', + 'gamma_corps', + 'gamora', + 'gargoyle', + 'garia', + 'gateway', + 'gauntlet', + 'genesis', + 'george_stacy', + 'gertrude_yorkes', + 'ghost_rider', + 'giant_girl', + 'giant_man', + 'gideon', + 'gladiator', + 'glorian', + 'goblin_queen', + 'golden_guardian', + 'goliath', + 'gorgon', + 'gorilla_man', + 'grandmaster', + 'gravity', + 'green_goblin', + 'gressill', + 'grey_gargoyle', + 'greymalkin', + 'grim_reaper', + 'groot', + 'guardian', + 'guardsmen', + 'gunslinger', + 'gwen_stacy', + 'hairball', + 'hammerhead', + 'hannibal_king', + 'hardball', + 'harpoon', + 'harrier', + 'harry_osborn', + 'havok', + 'hawkeye', + 'hedge_knight', + 'hellcat', + 'hellfire_club', + 'hellion', + 'hemingway', + 'hercules', + 'hex', + 'hiroim', + 'hitman', + 'hobgoblin', + 'hulk', + 'human_cannonball', + 'human_fly', + 'human_robot', + 'human_torch', + 'husk', + 'hydra', + 'iceman', + 'ikaris', + 'imperial_guard', + 'impossible_man', + 'inertia', + 'infant_terrible', + 'inhumans', + 'ink', + 'invaders', + 'invisible_woman', + 'iron_fist', + 'iron_lad', + 'iron_man', + 'iron_monger', + 'iron_patriot', + 'ironclad', + 'jack_flag', + 'jack_murdock', + 'jack_power', + 'jackal', + 'jackpot', + 'james_howlett', + 'jamie_braddock', + 'jane_foster', + 'jasper_sitwell', + 'jazinda', + 'jean_grey', + 'jetstream', + 'jigsaw', + 'jimmy_woo', + 'jocasta', + 'johnny_blaze', + 'johnny_storm', + 'joseph', + 'joshua_kane', + 'joystick', + 'jubilee', + 'juggernaut', + 'junta', + 'justice', + 'justin_hammer', + 'kabuki', + 'kang', + 'karen_page', + 'karma', + 'karnak', + 'kat_farrell', + 'kate_bishop', + 'katie_power', + 'ken_ellis', + 'khan', + 'kid_colt', + 'killer_shrike', + 'killmonger', + 'killraven', + 'king_bedlam', + 'king_cobra', + 'kingpin', + 'kinsey_walden', + 'kitty_pryde', + 'klaw', + 'komodo', + 'korath', + 'korg', + 'korvac', + 'kree', + 'krista_starr', + 'kronos', + 'kulan_gath', + 'kylun', + 'la_nuit', + 'lady_bullseye', + 'lady_deathstrike', + 'lady_mastermind', + 'lady_ursula', + 'lady_vermin', + 'lake', + 'landau', + 'layla_miller', + 'leader', + 'leech', + 'legion', + 'lenny_balinger', + 'leo', + 'leopardon', + 'leper_queen', + 'lester', + 'lethal_legion', + 'lifeguard', + 'lightspeed', + 'lila_cheney', + 'lilandra', + 'lilith', + 'lily_hollister', + 'lionheart', + 'living_lightning', + 'living_mummy', + 'living_tribunal', + 'liz_osborn', + 'lizard', + 'loa', + 'lockheed', + 'lockjaw', + 'logan', + 'loki', + 'loners', + 'longshot', + 'lord_hawal', + 'lord_tyger', + 'lorna_dane', + 'luckman', + 'lucky_pierre', + 'luke_cage', + 'luminals', + 'lyja', + 'ma_gnuci', + 'mac_gargan', + 'mach_iv', + 'machine_man', + 'mad_thinker', + 'madame_hydra', + 'madame_masque', + 'madame_web', + 'maddog', + 'madelyne_pryor', + 'madripoor', + 'madrox', + 'maelstrom', + 'maestro', + 'magdalene', + 'maggott', + 'magik', + 'maginty', + 'magma', + 'magneto', + 'magus', + 'major_mapleleaf', + 'makkari', + 'malcolm_colcord', + 'malice', + 'mandarin', + 'mandrill', + 'mandroid', + 'manta', + 'mantis', + 'marauders', + 'maria_hill', + 'mariko_yashida', + 'marrow', + 'marten_broadcloak', + 'martin_li', + 'marvel_apes', + 'marvel_boy', + 'marvel_zombies', + 'marvex', + 'masked_marvel', + 'masque', + 'master_chief', + 'master_mold', + 'mastermind', + 'mathemanic', + 'matthew_murdock', + 'mattie_franklin', + 'mauler', + 'maverick', + 'maximus', + 'may_parker', + 'medusa', + 'meggan', + 'meltdown', + 'menace', + 'mentallo', + 'mentor', + 'mephisto', + 'mephistopheles', + 'mercury', + 'mesmero', + 'metal_master', + 'meteorite', + 'micromacro', + 'microbe', + 'microchip', + 'micromax', + 'midnight', + 'miek', + 'mikhail_rasputin', + 'millenium_guard', + 'mimic', + 'mindworm', + 'miracleman', + 'miss_america', + 'mister_fear', + 'mister_sinister', + 'misty_knight', + 'mockingbird', + 'moira_mactaggert', + 'mojo', + 'mole_man', + 'molecule_man', + 'molly_hayes', + 'molten_man', + 'mongoose', + 'mongu', + 'monster_badoon', + 'moon_knight', + 'moondragon', + 'moonstone', + 'morbius', + 'mordo', + 'morg', + 'morgan_stark', + 'morlocks', + 'morlun', + 'morph', + 'mother_askani', + 'mulholland_black', + 'multiple_man', + 'mysterio', + 'mystique', + 'namor', + 'namora', + 'namorita', + 'naoko', + 'natasha_romanoff', + 'nebula', + 'nehzno', + 'nekra', + 'nemesis', + 'network', + 'newton_destine', + 'next_avengers', + 'nextwave', + 'nick_fury', + 'nico_minoru', + 'nicolaos', + 'night_nurse', + 'night_thrasher', + 'nightcrawler', + 'nighthawk', + 'nightmare', + 'nightshade', + 'nitro', + 'nocturne', + 'nomad', + 'norman_osborn', + 'norrin_radd', + 'northstar', + 'nova', + 'nuke', + 'obadiah_stane', + 'odin', + 'ogun', + 'old_lace', + 'omega_flight', + 'omega_red', + 'omega_sentinel', + 'onslaught', + 'oracle', + 'orphan', + 'otto_octavius', + 'outlaw_kid', + 'overlord', + 'owl', + 'ozymandias', + 'paibok', + 'paladin', + 'pandemic', + 'paper_doll', + 'patch', + 'patriot', + 'payback', + 'penance', + 'pepper_potts', + 'pestilence', + 'pet_avengers', + 'pete_wisdom', + 'peter_parker', + 'peter_quill', + 'phalanx', + 'phantom_reporter', + 'phil_sheldon', + 'photon', + 'piledriver', + 'pixie', + 'plazm', + 'polaris', + 'post', + 'power_man', + 'power_pack', + 'praxagora', + 'preak', + 'pretty_boy', + 'pride', + 'prima', + 'princess_powerful', + 'prism', + 'prodigy', + 'proemial_gods', + 'professor_monster', + 'proteus', + 'proudstar', + 'prowler', + 'psylocke', + 'psynapse', + 'puck', + 'puff_adder', + 'puma', + 'punisher', + 'puppet_master', + 'purifiers', + 'purple_man', + 'pyro', + 'quasar', + 'quasimodo', + 'queen_noir', + 'quentin_quire', + 'quicksilver', + 'rachel_grey', + 'radioactive_man', + 'rafael_vega', + 'rage', + 'raider', + 'randall', + 'randall_flagg', + 'random', + 'rattler', + 'ravenous', + 'rawhide_kid', + 'raza', + 'reaper', + 'reavers', + 'red_ghost', + 'red_hulk', + 'red_shift', + 'red_skull', + 'red_wolf', + 'redwing', + 'reptil', + 'retro_girl', + 'revanche', + 'rhino', + 'rhodey', + 'richard_fisk', + 'rick_jones', + 'ricochet', + 'rictor', + 'riptide', + 'risque', + 'robbie_robertson', + 'robin_chapel', + 'rocket_raccoon', + 'rocket_racer', + 'rockslide', + 'rogue', + 'roland_deschain', + 'romulus', + 'ronan', + 'roughhouse', + 'roulette', + 'roxanne_simpson', + 'rumiko_fujikawa', + 'runaways', + 'sabra', + 'sabretooth', + 'sage', + 'sally_floyd', + 'salo', + 'sandman', + 'santa_claus', + 'saracen', + 'sasquatch', + 'satana', + 'sauron', + 'scalphunter', + 'scarecrow', + 'scarlet_spider', + 'scarlet_witch', + 'scorpion', + 'scourge', + 'scrambler', + 'scream', + 'screwball', + 'sebastian_shaw', + 'secret_warriors', + 'selene', + 'senator_kelly', + 'sentinel', + 'sentinels', + 'sentry', + 'ser_duncan', + 'serpent_society', + 'sersi', + 'shadow_king', + 'shadowcat', + 'shaman', + 'shape', + 'shard', + 'sharon_carter', + 'sharon_ventura', + 'shatterstar', + 'shen', + 'sheva_callister', + 'shinko_yamashiro', + 'shinobi_shaw', + 'shiva', + 'shiver_man', + 'shocker', + 'shockwave', + 'shooting_star', + 'shotgun', + 'shriek', + 'silhouette', + 'silk_fever', + 'silver_centurion', + 'silver_fox', + 'silver_sable', + 'silver_samurai', + 'silver_surfer', + 'silverclaw', + 'silvermane', + 'sinister_six', + 'sir_ram', + 'siren', + 'sister_grimm', + 'skaar', + 'skin', + 'skreet', + 'skrulls', + 'skullbuster', + 'slapstick', + 'slayback', + 'sleeper', + 'sleepwalker', + 'slipstream', + 'slyde', + 'smasher', + 'smiling_tiger', + 'snowbird', + 'solo', + 'songbird', + 'spacker_dave', + 'spectrum', + 'speed', + 'speed_demon', + 'speedball', + 'spencer_smythe', + 'sphinx', + 'spiral', + 'spirit', + 'spitfire', + 'spot', + 'sprite', + 'spyke', + 'squadron_sinister', + 'squadron_supreme', + 'squirrel_girl', + 'star_brand', + 'starbolt', + 'stardust', + 'starfox', + 'starhawk', + 'starjammers', + 'stark_industries', + 'stature', + 'steel_serpent', + 'stellaris', + 'stepford_cuckoos', + 'stephen_strange', + 'steve_rogers', + 'stick', + 'stingray', + 'stone_men', + 'storm', + 'stranger', + 'strong_guy', + 'stryfe', + 'sue_storm', + 'sugar_man', + 'sumo', + 'sunfire', + 'sunset_bain', + 'sunspot', + 'supernaut', + 'supreme_intelligence', + 'surge', + 'susan_delgado', + 'swarm', + 'sway', + 'switch', + 'swordsman', + 'synch', + 'tag', + 'talisman', + 'talkback', + 'talon', + 'talos', + 'tana_nile', + 'tarantula', + 'tarot', + 'taskmaster', + 'tattoo', + 'ted_forrester', + 'tempest', + 'tenebrous', + 'terrax', + 'terror', + 'texas_twister', + 'thaddeus_ross', + 'thanos', + 'the_anarchist', + 'the_call', + 'the_captain', + 'the_enforcers', + 'the_executioner', + 'the_fallen', + 'the_fury', + 'the_hand', + 'the_hood', + 'the_hunter', + 'the_initiative', + 'the_leader', + 'the_liberteens', + 'the_order', + 'the_phantom', + 'the_professor', + 'the_renegades', + 'the_santerians', + 'the_spike', + 'the_stranger', + 'the_twelve', + 'the_watchers', + 'thena', + 'thing', + 'thor', + 'thor_girl', + 'thunderball', + 'thunderbird', + 'thunderbolt', + 'thunderbolt_ross', + 'thunderbolts', + 'thundra', + 'tiger_shark', + 'tigra', + 'timeslip', + 'tinkerer', + 'titania', + 'titanium_man', + 'toad', + 'toad_men', + 'tomas', + 'tombstone', + 'tomorrow_man', + 'tony_stark', + 'toro', + 'toxin', + 'trauma', + 'triathlon', + 'trish_tilby', + 'triton', + 'true_believers', + 'turbo', + 'tusk', + 'tyger_tiger', + 'typhoid_mary', + 'tyrannus', + 'ulik', + 'ultimates', + 'ultimatum', + 'ultimo', + 'ultragirl', + 'ultron', + 'umar', + 'unicorn', + 'union_jack', + 'unus', + 'valeria_richards', + 'valkyrie', + 'vampiro', + 'vance_astro', + 'vanisher', + 'vapor', + 'vargas', + 'vector', + 'veda', + 'vengeance', + 'venom', + 'venus', + 'vermin', + 'vertigo', + 'victor_mancha', + 'vin_gonzales', + 'vindicator', + 'violations', + 'viper', + 'virginia_dare', + 'vision', + 'vivisector', + 'vulcan', + 'vulture', + 'wallflower', + 'wallop', + 'wallow', + 'war_machine', + 'warbird', + 'warbound', + 'warhawk', + 'warlock', + 'warpath', + 'warstar', + 'wasp', + 'weapon_omega', + 'wendell_rand', + 'wendell_vaughn', + 'wendigo', + 'whiplash', + 'whirlwind', + 'whistler', + 'white_queen', + 'white_tiger', + 'whizzer', + 'wiccan', + 'wild_child', + 'wild_pack', + 'wildside', + 'william_stryker', + 'wilson_fisk', + 'wind_dancer', + 'winter_soldier', + 'wither', + 'wolf_cub', + 'wolfpack', + 'wolfsbane', + 'wolverine', + 'wonder_man', + 'wong', + 'wraith', + 'wrecker', + 'wrecking_crew', + 'xavin', + 'xorn', + 'yellow_claw', + 'yellowjacket', + 'young_avengers', + 'zaladane', + 'zaran', + 'zarda', + 'zarek', + 'zeigeist', + 'zemo', + 'zodiak', + 'zombie', + 'zuras', + 'zzzax', +]; diff --git a/drizzle-kit/tests/cli-generate.test.ts b/drizzle-kit/tests/cli-generate.test.ts new file mode 100644 index 000000000..3e5c0fc22 --- /dev/null +++ b/drizzle-kit/tests/cli-generate.test.ts @@ -0,0 +1,222 @@ +import { test as brotest } from '@drizzle-team/brocli'; +import { assert, expect, test } from 'vitest'; +import { generate } from '../src/cli/schema'; + +// good: +// #1 drizzle-kit generate --dialect=postgresql --schema=schema.ts +// #2 drizzle-kit generate --dialect=postgresql --schema=schema.ts --out=out +// #3 drizzle-kit generate +// #4 drizzle-kit generate --custom +// #5 drizzle-kit generate --name=custom +// #6 drizzle-kit generate --prefix=timestamp +// #7 drizzle-kit generate --prefix=timestamp --name=custom --custom +// #8 drizzle-kit generate --config=drizzle1.config.ts +// #9 drizzle-kit generate --dialect=postgresql --schema=schema.ts --out=out --prefix=timestamp --name=custom --custom + +// errors: +// #1 drizzle-kit generate --schema=src/schema.ts +// #2 drizzle-kit generate --dialect=postgresql +// #3 drizzle-kit generate --dialect=postgresql2 +// #4 drizzle-kit generate --driver=expo +// #5 drizzle-kit generate --dialect=postgresql --out=out +// #6 drizzle-kit generate --config=drizzle.config.ts --out=out +// #7 drizzle-kit generate --config=drizzle.config.ts --schema=schema.ts +// #8 drizzle-kit generate --config=drizzle.config.ts --dialect=postgresql + +test('generate #1', async (t) => { + const res = await brotest( + generate, + '--dialect=postgresql --schema=schema.ts', + ); + if (res.type !== 'handler') assert.fail(res.type, 'handler'); + expect(res.options).toStrictEqual({ + dialect: 'postgresql', + name: undefined, + custom: false, + prefix: 'index', + breakpoints: true, + schema: 'schema.ts', + out: 'drizzle', + bundle: false, + }); +}); + +test('generate #2', async (t) => { + const res = await brotest( + generate, + '--dialect=postgresql --schema=schema.ts --out=out', + ); + + if (res.type !== 'handler') assert.fail(res.type, 'handler'); + expect(res.options).toStrictEqual({ + dialect: 'postgresql', + name: undefined, + custom: false, + prefix: 'index', + breakpoints: true, + schema: 'schema.ts', + out: 'out', + bundle: false, + }); +}); + +test('generate #3', async (t) => { + const res = await brotest(generate, ''); + if (res.type !== 'handler') assert.fail(res.type, 'handler'); + expect(res.options).toStrictEqual({ + dialect: 'postgresql', + name: undefined, + custom: false, + prefix: 'index', + breakpoints: true, + schema: './schema.ts', + out: 'drizzle', + bundle: false, + }); +}); + +// config | pass through custom +test('generate #4', async (t) => { + const res = await brotest(generate, '--custom'); + + if (res.type !== 'handler') assert.fail(res.type, 'handler'); + expect(res.options).toStrictEqual({ + dialect: 'postgresql', + name: undefined, + custom: true, + prefix: 'index', + breakpoints: true, + schema: './schema.ts', + out: 'drizzle', + bundle: false, + }); +}); + +// config | pass through name +test('generate #5', async (t) => { + const res = await brotest(generate, '--name=custom'); + if (res.type !== 'handler') assert.fail(res.type, 'handler'); + expect(res.options).toStrictEqual({ + dialect: 'postgresql', + name: 'custom', + custom: false, + prefix: 'index', + breakpoints: true, + schema: './schema.ts', + out: 'drizzle', + bundle: false, + }); +}); + +// config | pass through prefix +test('generate #6', async (t) => { + const res = await brotest(generate, '--prefix=timestamp'); + if (res.type !== 'handler') assert.fail(res.type, 'handler'); + expect(res.options).toStrictEqual({ + dialect: 'postgresql', + name: undefined, + custom: false, + prefix: 'timestamp', + breakpoints: true, + schema: './schema.ts', + out: 'drizzle', + bundle: false, + }); +}); + +// config | pass through name, prefix and custom +test('generate #7', async (t) => { + const res = await brotest( + generate, + '--prefix=timestamp --name=custom --custom', + ); + if (res.type !== 'handler') assert.fail(res.type, 'handler'); + expect(res.options).toStrictEqual({ + dialect: 'postgresql', + name: 'custom', + custom: true, + prefix: 'timestamp', + breakpoints: true, + schema: './schema.ts', + out: 'drizzle', + bundle: false, + }); +}); + +// custom config path +test('generate #8', async (t) => { + const res = await brotest(generate, '--config=expo.config.ts'); + assert.equal(res.type, 'handler'); + if (res.type !== 'handler') assert.fail(res.type, 'handler'); + expect(res.options).toStrictEqual({ + dialect: 'sqlite', + name: undefined, + custom: false, + prefix: 'index', + breakpoints: true, + schema: './schema.ts', + out: 'drizzle', + bundle: true, // expo driver + }); +}); + +// cli | pass through name, prefix and custom +test('generate #9', async (t) => { + const res = await brotest( + generate, + '--dialect=postgresql --schema=schema.ts --out=out --prefix=timestamp --name=custom --custom', + ); + + if (res.type !== 'handler') assert.fail(res.type, 'handler'); + expect(res.options).toStrictEqual({ + dialect: 'postgresql', + name: 'custom', + custom: true, + prefix: 'timestamp', + breakpoints: true, + schema: 'schema.ts', + out: 'out', + bundle: false, + }); +}); + +// --- errors --- +test('err #1', async (t) => { + const res = await brotest(generate, '--schema=src/schema.ts'); + assert.equal(res.type, 'error'); +}); + +test('err #2', async (t) => { + const res = await brotest(generate, '--dialect=postgresql'); + assert.equal(res.type, 'error'); +}); + +test('err #3', async (t) => { + const res = await brotest(generate, '--dialect=postgresql2'); + assert.equal(res.type, 'error'); +}); + +test('err #4', async (t) => { + const res = await brotest(generate, '--driver=expo'); + assert.equal(res.type, 'error'); +}); + +test('err #5', async (t) => { + const res = await brotest(generate, '--dialect=postgresql --out=out'); + assert.equal(res.type, 'error'); +}); + +test('err #6', async (t) => { + const res = await brotest(generate, '--config=drizzle.config.ts --out=out'); + assert.equal(res.type, 'error'); +}); + +test('err #7', async (t) => { + const res = await brotest(generate, '--config=drizzle.config.ts --schema=schema.ts'); + assert.equal(res.type, 'error'); +}); + +test('err #8', async (t) => { + const res = await brotest(generate, '--config=drizzle.config.ts --dialect=postgresql'); + assert.equal(res.type, 'error'); +}); diff --git a/drizzle-kit/tests/cli-migrate.test.ts b/drizzle-kit/tests/cli-migrate.test.ts new file mode 100644 index 000000000..a4ffec2f0 --- /dev/null +++ b/drizzle-kit/tests/cli-migrate.test.ts @@ -0,0 +1,104 @@ +import { test as brotest } from '@drizzle-team/brocli'; +import { assert, expect, test } from 'vitest'; +import { migrate } from '../src/cli/schema'; + +// good: +// #1 drizzle-kit generate +// #2 drizzle-kit generate --config=turso.config.ts +// #3 drizzle-kit generate --config=d1http.config.ts +// #4 drizzle-kit generate --config=postgres.config.ts ## spread connection params +// #5 drizzle-kit generate --config=drizzle2.config.ts ## custom schema and table for migrations journal + +// errors: +// #1 drizzle-kit generate --config=expo.config.ts +// TODO: missing required params in config? + +test('migrate #1', async (t) => { + const res = await brotest(migrate, ''); + if (res.type !== 'handler') assert.fail(res.type, 'handler'); + expect(res.options).toStrictEqual({ + dialect: 'postgresql', + out: 'drizzle', + credentials: { + url: 'postgresql://postgres:postgres@127.0.0.1:5432/db', + }, + schema: undefined, // drizzle migrations table schema + table: undefined, // drizzle migrations table name + }); +}); + +test('migrate #2', async (t) => { + const res = await brotest(migrate, '--config=turso.config.ts'); + if (res.type !== 'handler') assert.fail(res.type, 'handler'); + expect(res.options).toStrictEqual({ + dialect: 'sqlite', + out: 'drizzle', + credentials: { + authToken: 'token', + driver: 'turso', + url: 'turso.dev', + }, + schema: undefined, // drizzle migrations table schema + table: undefined, // drizzle migrations table name + }); +}); + +test('migrate #3', async (t) => { + const res = await brotest(migrate, '--config=d1http.config.ts'); + if (res.type !== 'handler') assert.fail(res.type, 'handler'); + expect(res.options).toStrictEqual({ + dialect: 'sqlite', + out: 'drizzle', + credentials: { + driver: 'd1-http', + accountId: 'accid', + databaseId: 'dbid', + token: 'token', + }, + schema: undefined, // drizzle migrations table schema + table: undefined, // drizzle migrations table name + }); +}); + +test('migrate #4', async (t) => { + const res = await brotest(migrate, '--config=postgres.config.ts'); + if (res.type !== 'handler') assert.fail(res.type, 'handler'); + expect(res.options).toStrictEqual({ + dialect: 'postgresql', + out: 'drizzle', + credentials: { + database: 'db', + host: '127.0.0.1', + password: 'postgres', + port: 5432, + user: 'postgresql', + }, + schema: undefined, // drizzle migrations table schema + table: undefined, // drizzle migrations table name + }); +}); + +// catched a bug +test('migrate #5', async (t) => { + const res = await brotest(migrate, '--config=postgres2.config.ts'); + if (res.type !== 'handler') assert.fail(res.type, 'handler'); + expect(res.options).toStrictEqual({ + dialect: 'postgresql', + out: 'drizzle', + credentials: { + database: 'db', + host: '127.0.0.1', + password: 'postgres', + port: 5432, + user: 'postgresql', + }, + schema: 'custom', // drizzle migrations table schema + table: 'custom', // drizzle migrations table name + }); +}); + +// --- errors --- +test('err #1', async (t) => { + const res = await brotest(migrate, '--config=expo.config.ts'); + assert.equal(res.type, 'error'); +}); diff --git a/drizzle-kit/tests/cli-push.test.ts b/drizzle-kit/tests/cli-push.test.ts new file mode 100644 index 000000000..1a4bde66d --- /dev/null +++ b/drizzle-kit/tests/cli-push.test.ts @@ -0,0 +1,119 @@ +import { test as brotest } from '@drizzle-team/brocli'; +import { assert, expect, test } from 'vitest'; +import { push } from '../src/cli/schema'; + +// good: +// #1 drizzle-kit push +// #2 drizzle-kit push --config=turso.config.ts +// #3 drizzle-kit push --config=d1http.config.ts +// #4 drizzle-kit push --config=postgres.config.ts ## spread connection params +// #5 drizzle-kit push --config=drizzle2.config.ts ## custom schema and table for migrations journal + +// errors: +// #1 drizzle-kit push --config=expo.config.ts +// TODO: missing required params in config? + +test('push #1', async (t) => { + const res = await brotest(push, ''); + if (res.type !== 'handler') assert.fail(res.type, 'handler'); + expect(res.options).toStrictEqual({ + dialect: 'postgresql', + credentials: { + url: 'postgresql://postgres:postgres@127.0.0.1:5432/db', + }, + force: false, + schemaPath: './schema.ts', + schemasFilter: ['public'], + tablesFilter: [], + strict: false, + verbose: false, + }); +}); + +test('push #2', async (t) => { + const res = await brotest(push, '--config=turso.config.ts'); + if (res.type !== 'handler') assert.fail(res.type, 'handler'); + expect(res.options).toStrictEqual({ + dialect: 'sqlite', + credentials: { + authToken: 'token', + driver: 'turso', + url: 'turso.dev', + }, + force: false, + schemaPath: './schema.ts', + schemasFilter: ['public'], + tablesFilter: [], + strict: false, + verbose: false, + }); +}); + +test('push #3', async (t) => { + const res = await brotest(push, '--config=d1http.config.ts'); + if (res.type !== 'handler') assert.fail(res.type, 'handler'); + expect(res.options).toStrictEqual({ + dialect: 'sqlite', + credentials: { + driver: 'd1-http', + accountId: 'accid', + databaseId: 'dbid', + token: 'token', + }, + force: false, + schemaPath: './schema.ts', + schemasFilter: ['public'], + tablesFilter: [], + strict: false, + verbose: false, + }); +}); + +test('push #4', async (t) => { + const res = await brotest(push, '--config=postgres.config.ts'); + if (res.type !== 'handler') assert.fail(res.type, 'handler'); + expect(res.options).toStrictEqual({ + dialect: 'postgresql', + credentials: { + database: 'db', + host: '127.0.0.1', + password: 'postgres', + port: 5432, + user: 'postgresql', + }, + force: false, + schemaPath: './schema.ts', + schemasFilter: ['public'], + tablesFilter: [], + strict: false, + verbose: false, + }); +}); + +// catched a bug +test('push #5', async (t) => { + const res = await brotest(push, '--config=postgres2.config.ts'); + if (res.type !== 'handler') assert.fail(res.type, 'handler'); + expect(res.options).toStrictEqual({ + dialect: 'postgresql', + credentials: { + database: 'db', + host: '127.0.0.1', + password: 'postgres', + port: 5432, + user: 'postgresql', + }, + schemaPath: './schema.ts', + schemasFilter: ['public'], + tablesFilter: [], + strict: false, + force: false, + verbose: false, + }); +}); + +// --- errors --- +test('err #1', async (t) => { + const res = await brotest(push, '--config=expo.config.ts'); + assert.equal(res.type, 'error'); +}); diff --git a/drizzle-kit/tests/cli/d1http.config.ts b/drizzle-kit/tests/cli/d1http.config.ts new file mode 100644 index 000000000..cb01bee82 --- /dev/null +++ b/drizzle-kit/tests/cli/d1http.config.ts @@ -0,0 +1,12 @@ +import { defineConfig } from '../../src'; + +export default defineConfig({ + schema: './schema.ts', + dialect: 'sqlite', + driver: 'd1-http', + dbCredentials: { + accountId: 'accid', + databaseId: 'dbid', + token: 'token', + }, +}); diff --git a/drizzle-kit/tests/cli/drizzle.config.ts b/drizzle-kit/tests/cli/drizzle.config.ts new file mode 100644 index 000000000..bab5a456f --- /dev/null +++ b/drizzle-kit/tests/cli/drizzle.config.ts @@ -0,0 +1,9 @@ +import { defineConfig } from '../../src'; + +export default defineConfig({ + schema: './schema.ts', + dialect: 'postgresql', + dbCredentials: { + url: 'postgresql://postgres:postgres@127.0.0.1:5432/db', + }, +}); diff --git a/drizzle-kit/tests/cli/expo.config.ts b/drizzle-kit/tests/cli/expo.config.ts new file mode 100644 index 000000000..035dd67a9 --- /dev/null +++ b/drizzle-kit/tests/cli/expo.config.ts @@ -0,0 +1,7 @@ +import { defineConfig } from '../../src'; + +export default defineConfig({ + schema: './schema.ts', + dialect: 'sqlite', + driver: 'expo', +}); diff --git a/drizzle-kit/tests/cli/postgres.config.ts b/drizzle-kit/tests/cli/postgres.config.ts new file mode 100644 index 000000000..6e6023f1c --- /dev/null +++ b/drizzle-kit/tests/cli/postgres.config.ts @@ -0,0 +1,13 @@ +import { defineConfig } from '../../src'; + +export default defineConfig({ + schema: './schema.ts', + dialect: 'postgresql', + dbCredentials: { + host: '127.0.0.1', + port: 5432, + user: 'postgresql', + password: 'postgres', + database: 'db', + }, +}); diff --git a/drizzle-kit/tests/cli/postgres2.config.ts b/drizzle-kit/tests/cli/postgres2.config.ts new file mode 100644 index 000000000..e798643bf --- /dev/null +++ b/drizzle-kit/tests/cli/postgres2.config.ts @@ -0,0 +1,17 @@ +import { defineConfig } from '../../src'; + +export default defineConfig({ + schema: './schema.ts', + dialect: 'postgresql', + dbCredentials: { + host: '127.0.0.1', + port: 5432, + user: 'postgresql', + password: 'postgres', + database: 'db', + }, + migrations: { + schema: 'custom', + table: 'custom', + }, +}); diff --git a/drizzle-kit/tests/cli/schema.ts b/drizzle-kit/tests/cli/schema.ts new file mode 100644 index 000000000..2a62e168c --- /dev/null +++ b/drizzle-kit/tests/cli/schema.ts @@ -0,0 +1 @@ +// mock diff --git a/drizzle-kit/tests/cli/turso.config.ts b/drizzle-kit/tests/cli/turso.config.ts new file mode 100644 index 000000000..089e4d216 --- /dev/null +++ b/drizzle-kit/tests/cli/turso.config.ts @@ -0,0 +1,11 @@ +import { defineConfig } from '../../src'; + +export default defineConfig({ + schema: './schema.ts', + dialect: 'sqlite', + driver: 'turso', + dbCredentials: { + url: 'turso.dev', + authToken: 'token', + }, +}); diff --git a/drizzle-kit/tests/common.ts b/drizzle-kit/tests/common.ts new file mode 100644 index 000000000..631614218 --- /dev/null +++ b/drizzle-kit/tests/common.ts @@ -0,0 +1,15 @@ +import { test } from 'vitest'; + +export interface DialectSuite { + /** + * 1 statement | create column: + * + * id int primary key autoincrement + */ + columns1(): Promise; +} + +export const run = (suite: DialectSuite) => { + test('add columns #1', suite.columns1); +}; +// test("add columns #1", suite.columns1) diff --git a/drizzle-kit/tests/indexes/common.ts b/drizzle-kit/tests/indexes/common.ts new file mode 100644 index 000000000..5bdc24446 --- /dev/null +++ b/drizzle-kit/tests/indexes/common.ts @@ -0,0 +1,21 @@ +import { afterAll, beforeAll, test } from 'vitest'; + +export interface DialectSuite { + simpleIndex(context?: any): Promise; + vectorIndex(context?: any): Promise; + indexesToBeTriggered(context?: any): Promise; +} + +export const run = ( + suite: DialectSuite, + beforeAllFn?: (context: any) => Promise, + afterAllFn?: (context: any) => Promise, +) => { + let context: any = {}; + beforeAll(beforeAllFn ? () => beforeAllFn(context) : () => {}); + test('index #1: simple index', () => suite.simpleIndex(context)); + test('index #2: vector index', () => suite.vectorIndex(context)); + test('index #3: fields that should be triggered on generate and not triggered on push', () => + suite.indexesToBeTriggered(context)); + afterAll(afterAllFn ? () => afterAllFn(context) : () => {}); +}; diff --git a/drizzle-kit/tests/indexes/pg.test.ts b/drizzle-kit/tests/indexes/pg.test.ts new file mode 100644 index 000000000..9958a2356 --- /dev/null +++ b/drizzle-kit/tests/indexes/pg.test.ts @@ -0,0 +1,245 @@ +import { sql } from 'drizzle-orm'; +import { index, pgTable, serial, text, vector } from 'drizzle-orm/pg-core'; +import { JsonCreateIndexStatement } from 'src/jsonStatements'; +import { PgSquasher } from 'src/serializer/pgSchema'; +import { diffTestSchemas } from 'tests/schemaDiffer'; +import { expect } from 'vitest'; +import { DialectSuite, run } from './common'; + +const pgSuite: DialectSuite = { + async vectorIndex() { + const schema1 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: vector('name', { dimensions: 3 }), + }), + }; + + const schema2 = { + users: pgTable( + 'users', + { + id: serial('id').primaryKey(), + embedding: vector('name', { dimensions: 3 }), + }, + (t) => ({ + indx2: index('vector_embedding_idx') + .using('hnsw', t.embedding.op('vector_ip_ops')) + .with({ m: 16, ef_construction: 64 }), + }), + ), + }; + + const { statements, sqlStatements } = await diffTestSchemas( + schema1, + schema2, + [], + ); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + schema: '', + tableName: 'users', + type: 'create_index_pg', + data: { + columns: [ + { + asc: true, + expression: 'name', + isExpression: false, + nulls: 'last', + opclass: 'vector_ip_ops', + }, + ], + concurrently: false, + isUnique: false, + method: 'hnsw', + name: 'vector_embedding_idx', + where: undefined, + with: { + ef_construction: 64, + m: 16, + }, + }, + }); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + `CREATE INDEX IF NOT EXISTS "vector_embedding_idx" ON "users" USING hnsw ("name" vector_ip_ops) WITH (m=16,ef_construction=64);`, + ); + }, + + async indexesToBeTriggered() { + const schema1 = { + users: pgTable( + 'users', + { + id: serial('id').primaryKey(), + name: text('name'), + }, + (t) => ({ + indx: index('indx').on(t.name.desc()).concurrently(), + indx1: index('indx1') + .on(t.name.desc()) + .where(sql`true`), + indx2: index('indx2') + .on(t.name.op('text_ops')) + .where(sql`true`), + indx3: index('indx3') + .on(sql`lower(name)`) + .where(sql`true`), + }), + ), + }; + + const schema2 = { + users: pgTable( + 'users', + { + id: serial('id').primaryKey(), + name: text('name'), + }, + (t) => ({ + indx: index('indx').on(t.name.desc()), + indx1: index('indx1') + .on(t.name.desc()) + .where(sql`false`), + indx2: index('indx2') + .on(t.name.op('test')) + .where(sql`true`), + indx3: index('indx3') + .on(sql`lower(${t.id})`) + .where(sql`true`), + indx4: index('indx4') + .on(sql`lower(id)`) + .where(sql`true`), + }), + ), + }; + + const { statements, sqlStatements } = await diffTestSchemas( + schema1, + schema2, + [], + ); + + expect(sqlStatements).toStrictEqual([ + 'DROP INDEX IF EXISTS "indx";', + 'DROP INDEX IF EXISTS "indx1";', + // 'DROP INDEX IF EXISTS "indx2";', + 'DROP INDEX IF EXISTS "indx3";', + 'CREATE INDEX IF NOT EXISTS "indx4" ON "users" USING btree (lower(id)) WHERE true;', + 'CREATE INDEX IF NOT EXISTS "indx" ON "users" USING btree ("name" DESC NULLS LAST);', + 'CREATE INDEX IF NOT EXISTS "indx1" ON "users" USING btree ("name" DESC NULLS LAST) WHERE false;', + // 'CREATE INDEX IF NOT EXISTS "indx2" ON "users" USING btree ("name" test) WHERE true;', + 'CREATE INDEX IF NOT EXISTS "indx3" ON "users" USING btree (lower("id")) WHERE true;', + ]); + }, + + async simpleIndex() { + const schema1 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }), + }; + + const schema2 = { + users: pgTable( + 'users', + { + id: serial('id').primaryKey(), + name: text('name'), + }, + (t) => ({ + indx: index() + .on(t.name.desc(), t.id.asc().nullsLast()) + .with({ fillfactor: 70 }) + .where(sql`select 1`), + indx1: index('indx1') + .using('hash', t.name.desc(), sql`${t.name}`) + .with({ fillfactor: 70 }), + }), + ), + }; + + const { statements, sqlStatements } = await diffTestSchemas( + schema1, + schema2, + [], + ); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + schema: '', + tableName: 'users', + type: 'create_index_pg', + data: { + columns: [ + { + asc: false, + expression: 'name', + isExpression: false, + nulls: 'last', + opclass: '', + }, + { + asc: true, + expression: 'id', + isExpression: false, + nulls: 'last', + opclass: '', + }, + ], + concurrently: false, + isUnique: false, + method: 'btree', + name: 'users_name_id_index', + where: 'select 1', + with: { + fillfactor: 70, + }, + }, + // data: 'users_name_id_index;name,false,last,undefined,,id,true,last,undefined;false;false;btree;select 1;{"fillfactor":70}', + }); + expect(statements[1]).toStrictEqual({ + schema: '', + tableName: 'users', + type: 'create_index_pg', + data: { + columns: [ + { + asc: false, + expression: 'name', + isExpression: false, + nulls: 'last', + opclass: '', + }, + { + asc: true, + expression: '"name"', + isExpression: true, + nulls: 'last', + opclass: '', + }, + ], + concurrently: false, + isUnique: false, + method: 'hash', + name: 'indx1', + where: undefined, + with: { + fillfactor: 70, + }, + }, + }); + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe( + `CREATE INDEX IF NOT EXISTS "users_name_id_index" ON "users" USING btree ("name" DESC NULLS LAST,"id") WITH (fillfactor=70) WHERE select 1;`, + ); + expect(sqlStatements[1]).toBe( + `CREATE INDEX IF NOT EXISTS "indx1" ON "users" USING hash ("name" DESC NULLS LAST,"name") WITH (fillfactor=70);`, + ); + }, +}; + +run(pgSuite); diff --git a/drizzle-kit/tests/introspect/mysql.test.ts b/drizzle-kit/tests/introspect/mysql.test.ts new file mode 100644 index 000000000..e35b34f40 --- /dev/null +++ b/drizzle-kit/tests/introspect/mysql.test.ts @@ -0,0 +1,167 @@ +import Docker from 'dockerode'; +import { SQL, sql } from 'drizzle-orm'; +import { char, int, mysqlTable, text, varchar } from 'drizzle-orm/mysql-core'; +import * as fs from 'fs'; +import getPort from 'get-port'; +import { Connection, createConnection } from 'mysql2/promise'; +import { introspectMySQLToFile } from 'tests/schemaDiffer'; +import { v4 as uuid } from 'uuid'; +import { afterAll, beforeAll, expect, test } from 'vitest'; + +let client: Connection; +let mysqlContainer: Docker.Container; + +async function createDockerDB(): Promise { + const docker = new Docker(); + const port = await getPort({ port: 3306 }); + const image = 'mysql:8'; + + const pullStream = await docker.pull(image); + await new Promise((resolve, reject) => + // eslint-disable-next-line @typescript-eslint/no-unsafe-argument + docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) + ); + + mysqlContainer = await docker.createContainer({ + Image: image, + Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], + name: `drizzle-integration-tests-${uuid()}`, + HostConfig: { + AutoRemove: true, + PortBindings: { + '3306/tcp': [{ HostPort: `${port}` }], + }, + }, + }); + + await mysqlContainer.start(); + + return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; +} + +beforeAll(async () => { + const connectionString = process.env.MYSQL_CONNECTION_STRING ?? await createDockerDB(); + + const sleep = 1000; + let timeLeft = 20000; + let connected = false; + let lastError: unknown | undefined; + do { + try { + client = await createConnection(connectionString); + await client.connect(); + connected = true; + break; + } catch (e) { + lastError = e; + await new Promise((resolve) => setTimeout(resolve, sleep)); + timeLeft -= sleep; + } + } while (timeLeft > 0); + if (!connected) { + console.error('Cannot connect to MySQL'); + await client?.end().catch(console.error); + await mysqlContainer?.stop().catch(console.error); + throw lastError; + } +}); + +afterAll(async () => { + await client?.end().catch(console.error); + await mysqlContainer?.stop().catch(console.error); +}); + +if (!fs.existsSync('tests/introspect/mysql')) { + fs.mkdirSync('tests/introspect/mysql'); +} + +test('generated always column: link to another column', async () => { + const schema = { + users: mysqlTable('users', { + id: int('id'), + email: text('email'), + generatedEmail: text('generatedEmail').generatedAlwaysAs( + (): SQL => sql`\`email\``, + ), + }), + }; + + const { statements, sqlStatements } = await introspectMySQLToFile( + client, + schema, + 'generated-link-column', + 'drizzle', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); + + await client.query(`drop table users;`); +}); + +test('generated always column virtual: link to another column', async () => { + const schema = { + users: mysqlTable('users', { + id: int('id'), + email: text('email'), + generatedEmail: text('generatedEmail').generatedAlwaysAs( + (): SQL => sql`\`email\``, + { mode: 'virtual' }, + ), + }), + }; + + const { statements, sqlStatements } = await introspectMySQLToFile( + client, + schema, + 'generated-link-column-virtual', + 'drizzle', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); + + await client.query(`drop table users;`); +}); + +test('Default value of character type column: char', async () => { + const schema = { + users: mysqlTable('users', { + id: int('id'), + sortKey: char('sortKey', { length: 255 }).default('0'), + }), + }; + + const { statements, sqlStatements } = await introspectMySQLToFile( + client, + schema, + 'default-value-char-column', + 'drizzle', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); + + await client.query(`drop table users;`); +}); + +test('Default value of character type column: varchar', async () => { + const schema = { + users: mysqlTable('users', { + id: int('id'), + sortKey: varchar('sortKey', { length: 255 }).default('0'), + }), + }; + + const { statements, sqlStatements } = await introspectMySQLToFile( + client, + schema, + 'default-value-varchar-column', + 'drizzle', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); + + await client.query(`drop table users;`); +}); diff --git a/drizzle-kit/tests/introspect/pg.test.ts b/drizzle-kit/tests/introspect/pg.test.ts new file mode 100644 index 000000000..e65c0f904 --- /dev/null +++ b/drizzle-kit/tests/introspect/pg.test.ts @@ -0,0 +1,407 @@ +import { PGlite } from '@electric-sql/pglite'; +import { SQL, sql } from 'drizzle-orm'; +import { + bigint, + bigserial, + boolean, + char, + cidr, + date, + doublePrecision, + inet, + integer, + interval, + json, + jsonb, + macaddr, + macaddr8, + numeric, + pgEnum, + pgSchema, + pgTable, + real, + serial, + smallint, + smallserial, + text, + time, + timestamp, + uuid, + varchar, +} from 'drizzle-orm/pg-core'; +import { introspectPgToFile } from 'tests/schemaDiffer'; +import { expect, test } from 'vitest'; + +test('basic introspect test', async () => { + const client = new PGlite(); + + const schema = { + users: pgTable('users', { + id: integer('id').notNull(), + email: text('email'), + }), + }; + + const { statements, sqlStatements } = await introspectPgToFile( + client, + schema, + 'basic-introspect', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('basic identity always test', async () => { + const client = new PGlite(); + + const schema = { + users: pgTable('users', { + id: integer('id').generatedAlwaysAsIdentity(), + email: text('email'), + }), + }; + + const { statements, sqlStatements } = await introspectPgToFile( + client, + schema, + 'basic-identity-always-introspect', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('basic identity by default test', async () => { + const client = new PGlite(); + + const schema = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity(), + email: text('email'), + }), + }; + + const { statements, sqlStatements } = await introspectPgToFile( + client, + schema, + 'basic-identity-default-introspect', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('identity always test: few params', async () => { + const client = new PGlite(); + + const schema = { + users: pgTable('users', { + id: integer('id').generatedAlwaysAsIdentity({ + startWith: 100, + name: 'custom_name', + }), + email: text('email'), + }), + }; + + const { statements, sqlStatements } = await introspectPgToFile( + client, + schema, + 'identity-always-few-params-introspect', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('identity by default test: few params', async () => { + const client = new PGlite(); + + const schema = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + maxValue: 10000, + name: 'custom_name', + }), + email: text('email'), + }), + }; + + const { statements, sqlStatements } = await introspectPgToFile( + client, + schema, + 'identity-default-few-params-introspect', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('identity always test: all params', async () => { + const client = new PGlite(); + + const schema = { + users: pgTable('users', { + id: integer('id').generatedAlwaysAsIdentity({ + startWith: 10, + increment: 4, + minValue: 10, + maxValue: 10000, + cache: 100, + cycle: true, + }), + email: text('email'), + }), + }; + + const { statements, sqlStatements } = await introspectPgToFile( + client, + schema, + 'identity-always-all-params-introspect', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('identity by default test: all params', async () => { + const client = new PGlite(); + + const schema = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + startWith: 10, + increment: 4, + minValue: 10, + maxValue: 10000, + cache: 100, + cycle: true, + }), + email: text('email'), + }), + }; + + const { statements, sqlStatements } = await introspectPgToFile( + client, + schema, + 'identity-default-all-params-introspect', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('generated column: link to another column', async () => { + const client = new PGlite(); + + const schema = { + users: pgTable('users', { + id: integer('id').generatedAlwaysAsIdentity(), + email: text('email'), + generatedEmail: text('generatedEmail').generatedAlwaysAs( + (): SQL => sql`email`, + ), + }), + }; + + const { statements, sqlStatements } = await introspectPgToFile( + client, + schema, + 'generated-link-column', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('instrospect all column types', async () => { + const client = new PGlite(); + + const myEnum = pgEnum('my_enum', ['a', 'b', 'c']); + const schema = { + enum_: myEnum, + // NOTE: Types from extensions aren't tested due to PGlite not supporting at the moment + columns: pgTable('columns', { + enum: myEnum('my_enum').default('a'), + smallint: smallint('smallint').default(10), + integer: integer('integer').default(10), + numeric: numeric('numeric', { precision: 3, scale: 1 }).default('99.9'), + bigint: bigint('bigint', { mode: 'number' }).default(100), + boolean: boolean('boolean').default(true), + text: text('test').default('abc'), + varchar: varchar('varchar', { length: 25 }).default('abc'), + char: char('char', { length: 3 }).default('abc'), + serial: serial('serial'), + bigserial: bigserial('bigserial', { mode: 'number' }), + smallserial: smallserial('smallserial'), + doublePrecision: doublePrecision('doublePrecision').default(100), + real: real('real').default(100), + json: json('json').$type<{ attr: string }>().default({ attr: 'value' }), + jsonb: jsonb('jsonb').$type<{ attr: string }>().default({ attr: 'value' }), + time1: time('time1').default('00:00:00'), + time2: time('time2').defaultNow(), + timestamp1: timestamp('timestamp1', { withTimezone: true, precision: 6 }).default(new Date()), + timestamp2: timestamp('timestamp2', { withTimezone: true, precision: 6 }).defaultNow(), + date1: date('date1').default('2024-01-01'), + date2: date('date2').defaultNow(), + uuid1: uuid('uuid1').default('a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11'), + uuid2: uuid('uuid2').defaultRandom(), + inet: inet('inet').default('127.0.0.1'), + cidr: cidr('cidr').default('127.0.0.1/32'), + macaddr: macaddr('macaddr').default('00:00:00:00:00:00'), + macaddr8: macaddr8('macaddr8').default('00:00:00:ff:fe:00:00:00'), + interval: interval('interval').default('1 day 01:00:00'), + }), + }; + + const { statements, sqlStatements } = await introspectPgToFile( + client, + schema, + 'introspect-all-columns-types', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('instrospect all column array types', async () => { + const client = new PGlite(); + + const myEnum = pgEnum('my_enum', ['a', 'b', 'c']); + const schema = { + enum_: myEnum, + // NOTE: Types from extensions aren't tested due to PGlite not supporting at the moment + columns: pgTable('columns', { + enum: myEnum('my_enum').array().default(['a', 'b']), + smallint: smallint('smallint').array().default([10, 20]), + integer: integer('integer').array().default([10, 20]), + numeric: numeric('numeric', { precision: 3, scale: 1 }).array().default(['99.9', '88.8']), + bigint: bigint('bigint', { mode: 'number' }).array().default([100, 200]), + boolean: boolean('boolean').array().default([true, false]), + text: text('test').array().default(['abc', 'def']), + varchar: varchar('varchar', { length: 25 }).array().default(['abc', 'def']), + char: char('char', { length: 3 }).array().default(['abc', 'def']), + doublePrecision: doublePrecision('doublePrecision').array().default([100, 200]), + real: real('real').array().default([100, 200]), + json: json('json').$type<{ attr: string }>().array().default([{ attr: 'value1' }, { attr: 'value2' }]), + jsonb: jsonb('jsonb').$type<{ attr: string }>().array().default([{ attr: 'value1' }, { attr: 'value2' }]), + time: time('time').array().default(['00:00:00', '01:00:00']), + timestamp: timestamp('timestamp', { withTimezone: true, precision: 6 }) + .array() + .default([new Date(), new Date()]), + date: date('date').array().default(['2024-01-01', '2024-01-02']), + uuid: uuid('uuid').array().default([ + 'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11', + 'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a12', + ]), + inet: inet('inet').array().default(['127.0.0.1', '127.0.0.2']), + cidr: cidr('cidr').array().default(['127.0.0.1/32', '127.0.0.2/32']), + macaddr: macaddr('macaddr').array().default(['00:00:00:00:00:00', '00:00:00:00:00:01']), + macaddr8: macaddr8('macaddr8').array().default(['00:00:00:ff:fe:00:00:00', '00:00:00:ff:fe:00:00:01']), + interval: interval('interval').array().default(['1 day 01:00:00', '1 day 02:00:00']), + }), + }; + + const { statements, sqlStatements } = await introspectPgToFile( + client, + schema, + 'introspect-all-columns-array-types', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('introspect columns with name with non-alphanumeric characters', async () => { + const client = new PGlite(); + const schema = { + users: pgTable('users', { + 'not:allowed': integer('not:allowed'), + 'nuh--uh': integer('nuh-uh'), + '1_nope': integer('1_nope'), + valid: integer('valid'), + }), + }; + + const { statements, sqlStatements } = await introspectPgToFile( + client, + schema, + 'introspect-column-with-name-with-non-alphanumeric-characters', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('introspect enum from different schema', async () => { + const client = new PGlite(); + + const schema2 = pgSchema('schema2'); + const myEnumInSchema2 = schema2.enum('my_enum', ['a', 'b', 'c']); + const schema = { + schema2, + myEnumInSchema2, + users: pgTable('users', { + col: myEnumInSchema2('col'), + }), + }; + + const { statements, sqlStatements } = await introspectPgToFile( + client, + schema, + 'introspect-enum-from-different-schema', + ['public', 'schema2'], + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('introspect enum with same names across different schema', async () => { + const client = new PGlite(); + + const schema2 = pgSchema('schema2'); + const myEnumInSchema2 = schema2.enum('my_enum', ['a', 'b', 'c']); + const myEnum = pgEnum('my_enum', ['a', 'b', 'c']); + const schema = { + schema2, + myEnumInSchema2, + myEnum, + users: pgTable('users', { + col1: myEnumInSchema2('col1'), + col2: myEnum('col2'), + }), + }; + + const { statements, sqlStatements } = await introspectPgToFile( + client, + schema, + 'introspect-enum-with-same-names-across-different-schema', + ['public', 'schema2'], + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('introspect enum with similar name to native type', async () => { + const client = new PGlite(); + + const timeLeft = pgEnum('time_left', ['short', 'medium', 'long']); + const schema = { + timeLeft, + auction: pgTable('auction', { + col: timeLeft('col1'), + }), + }; + + const { statements, sqlStatements } = await introspectPgToFile( + client, + schema, + 'introspect-enum-with-similar-name-to-native-type', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); diff --git a/drizzle-kit/tests/introspect/sqlite.test.ts b/drizzle-kit/tests/introspect/sqlite.test.ts new file mode 100644 index 000000000..18473e87b --- /dev/null +++ b/drizzle-kit/tests/introspect/sqlite.test.ts @@ -0,0 +1,57 @@ +import Database from 'better-sqlite3'; +import { SQL, sql } from 'drizzle-orm'; +import { int, sqliteTable, text } from 'drizzle-orm/sqlite-core'; +import * as fs from 'fs'; +import { introspectSQLiteToFile } from 'tests/schemaDiffer'; +import { expect, test } from 'vitest'; + +if (!fs.existsSync('tests/introspect/sqlite')) { + fs.mkdirSync('tests/introspect/sqlite'); +} + +test('generated always column: link to another column', async () => { + const sqlite = new Database(':memory:'); + + const schema = { + users: sqliteTable('users', { + id: int('id'), + email: text('email'), + generatedEmail: text('generatedEmail').generatedAlwaysAs( + (): SQL => sql`\`email\``, + ), + }), + }; + + const { statements, sqlStatements } = await introspectSQLiteToFile( + sqlite, + schema, + 'generated-link-column', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('generated always column virtual: link to another column', async () => { + const sqlite = new Database(':memory:'); + + const schema = { + users: sqliteTable('users', { + id: int('id'), + email: text('email'), + generatedEmail: text('generatedEmail').generatedAlwaysAs( + (): SQL => sql`\`email\``, + { mode: 'virtual' }, + ), + }), + }; + + const { statements, sqlStatements } = await introspectSQLiteToFile( + sqlite, + schema, + 'generated-link-column-virtual', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); diff --git a/drizzle-kit/tests/mysql-generated.test.ts b/drizzle-kit/tests/mysql-generated.test.ts new file mode 100644 index 000000000..3531582d0 --- /dev/null +++ b/drizzle-kit/tests/mysql-generated.test.ts @@ -0,0 +1,1290 @@ +import { SQL, sql } from 'drizzle-orm'; +import { int, mysqlTable, text } from 'drizzle-orm/mysql-core'; +import { expect, test } from 'vitest'; +import { diffTestSchemasMysql } from './schemaDiffer'; + +test('generated as callback: add column with generated constraint', async () => { + const from = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + }), + }; + const to = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + column: { + generated: { + as: "`users`.`name` || 'hello'", + type: 'stored', + }, + autoincrement: false, + name: 'gen_name', + notNull: false, + primaryKey: false, + type: 'text', + }, + schema: '', + tableName: 'users', + type: 'alter_table_add_column', + }, + ]); + expect(sqlStatements).toStrictEqual([ + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", + ]); +}); + +test('generated as callback: add generated constraint to an exisiting column as stored', async () => { + const from = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs((): SQL => sql`${from.users.name} || 'to add'`, { + mode: 'stored', + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'to add'", + type: 'stored', + }, + columnAutoIncrement: false, + columnName: 'gen_name', + columnNotNull: true, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + "ALTER TABLE `users` MODIFY COLUMN `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'to add') STORED NOT NULL;", + ]); +}); + +test('generated as callback: add generated constraint to an exisiting column as virtual', async () => { + const from = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs((): SQL => sql`${from.users.name} || 'to add'`, { + mode: 'virtual', + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'to add'", + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: true, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'to add') VIRTUAL NOT NULL;", + ]); +}); + +test('generated as callback: drop generated constraint as stored', async () => { + const from = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name} || 'to delete'`, + { mode: 'stored' }, + ), + }), + }; + const to = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + oldColumn: { + autoincrement: false, + generated: { + as: "`users`.`name` || 'to delete'", + type: 'stored', + }, + name: 'gen_name', + notNull: false, + onUpdate: undefined, + primaryKey: false, + type: 'text', + }, + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;', + ]); +}); + +test('generated as callback: drop generated constraint as virtual', async () => { + const from = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name} || 'to delete'`, + { mode: 'virtual' }, + ), + }), + }; + const to = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + oldColumn: { + autoincrement: false, + generated: { + as: "`users`.`name` || 'to delete'", + type: 'virtual', + }, + name: 'gen_name', + notNull: false, + onUpdate: undefined, + primaryKey: false, + type: 'text', + }, + tableName: 'users', + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + 'ALTER TABLE `users` ADD `gen_name` text;', + ]); +}); + +test('generated as callback: change generated constraint type from virtual to stored', async () => { + const from = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name}`, + { mode: 'virtual' }, + ), + }), + }; + const to = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'stored', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` drop column `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", + ]); +}); + +test('generated as callback: change generated constraint type from stored to virtual', async () => { + const from = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name}`, + ), + }), + }; + const to = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` drop column `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", + ]); +}); + +test('generated as callback: change generated constraint', async () => { + const from = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name}`, + ), + }), + }; + const to = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` drop column `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", + ]); +}); + +// --- + +test('generated as sql: add column with generated constraint', async () => { + const from = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + }), + }; + const to = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\`users\`.\`name\` || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + column: { + generated: { + as: "`users`.`name` || 'hello'", + type: 'stored', + }, + autoincrement: false, + name: 'gen_name', + notNull: false, + primaryKey: false, + type: 'text', + }, + schema: '', + tableName: 'users', + type: 'alter_table_add_column', + }, + ]); + expect(sqlStatements).toStrictEqual([ + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", + ]); +}); + +test('generated as sql: add generated constraint to an exisiting column as stored', async () => { + const from = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs(sql`\`users\`.\`name\` || 'to add'`, { + mode: 'stored', + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'to add'", + type: 'stored', + }, + columnAutoIncrement: false, + columnName: 'gen_name', + columnNotNull: true, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + "ALTER TABLE `users` MODIFY COLUMN `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'to add') STORED NOT NULL;", + ]); +}); + +test('generated as sql: add generated constraint to an exisiting column as virtual', async () => { + const from = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs(sql`\`users\`.\`name\` || 'to add'`, { + mode: 'virtual', + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'to add'", + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: true, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'to add') VIRTUAL NOT NULL;", + ]); +}); + +test('generated as sql: drop generated constraint as stored', async () => { + const from = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\`users\`.\`name\` || 'to delete'`, + { mode: 'stored' }, + ), + }), + }; + const to = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + oldColumn: { + autoincrement: false, + generated: { + as: "`users`.`name` || 'to delete'", + type: 'stored', + }, + name: 'gen_name', + notNull: false, + onUpdate: undefined, + primaryKey: false, + type: 'text', + }, + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;', + ]); +}); + +test('generated as sql: drop generated constraint as virtual', async () => { + const from = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\`users\`.\`name\` || 'to delete'`, + { mode: 'virtual' }, + ), + }), + }; + const to = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + oldColumn: { + autoincrement: false, + generated: { + as: "`users`.`name` || 'to delete'", + type: 'virtual', + }, + name: 'gen_name', + notNull: false, + onUpdate: undefined, + primaryKey: false, + type: 'text', + }, + tableName: 'users', + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + 'ALTER TABLE `users` ADD `gen_name` text;', + ]); +}); + +test('generated as sql: change generated constraint type from virtual to stored', async () => { + const from = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\`users\`.\`name\``, + { mode: 'virtual' }, + ), + }), + }; + const to = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\`users\`.\`name\` || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'stored', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` drop column `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", + ]); +}); + +test('generated as sql: change generated constraint type from stored to virtual', async () => { + const from = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\`users\`.\`name\``, + ), + }), + }; + const to = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\`users\`.\`name\` || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` drop column `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", + ]); +}); + +test('generated as sql: change generated constraint', async () => { + const from = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\`users\`.\`name\``, + ), + }), + }; + const to = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\`users\`.\`name\` || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` drop column `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", + ]); +}); + +// --- + +test('generated as string: add column with generated constraint', async () => { + const from = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + }), + }; + const to = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `\`users\`.\`name\` || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + column: { + generated: { + as: "`users`.`name` || 'hello'", + type: 'stored', + }, + autoincrement: false, + name: 'gen_name', + notNull: false, + primaryKey: false, + type: 'text', + }, + schema: '', + tableName: 'users', + type: 'alter_table_add_column', + }, + ]); + expect(sqlStatements).toStrictEqual([ + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", + ]); +}); + +test('generated as string: add generated constraint to an exisiting column as stored', async () => { + const from = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs(`\`users\`.\`name\` || 'to add'`, { + mode: 'stored', + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'to add'", + type: 'stored', + }, + columnAutoIncrement: false, + columnName: 'gen_name', + columnNotNull: true, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + "ALTER TABLE `users` MODIFY COLUMN `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'to add') STORED NOT NULL;", + ]); +}); + +test('generated as string: add generated constraint to an exisiting column as virtual', async () => { + const from = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs(`\`users\`.\`name\` || 'to add'`, { + mode: 'virtual', + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'to add'", + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: true, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'to add') VIRTUAL NOT NULL;", + ]); +}); + +test('generated as string: drop generated constraint as stored', async () => { + const from = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `\`users\`.\`name\` || 'to delete'`, + { mode: 'stored' }, + ), + }), + }; + const to = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + oldColumn: { + autoincrement: false, + generated: { + as: "`users`.`name` || 'to delete'", + type: 'stored', + }, + name: 'gen_name', + notNull: false, + onUpdate: undefined, + primaryKey: false, + type: 'text', + }, + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;', + ]); +}); + +test('generated as string: drop generated constraint as virtual', async () => { + const from = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `\`users\`.\`name\` || 'to delete'`, + { mode: 'virtual' }, + ), + }), + }; + const to = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + oldColumn: { + autoincrement: false, + generated: { + as: "`users`.`name` || 'to delete'", + type: 'virtual', + }, + name: 'gen_name', + notNull: false, + onUpdate: undefined, + primaryKey: false, + type: 'text', + }, + tableName: 'users', + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + 'ALTER TABLE `users` ADD `gen_name` text;', + ]); +}); + +test('generated as string: change generated constraint type from virtual to stored', async () => { + const from = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs(`\`users\`.\`name\``, { + mode: 'virtual', + }), + }), + }; + const to = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `\`users\`.\`name\` || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'stored', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` drop column `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", + ]); +}); + +test('generated as string: change generated constraint type from stored to virtual', async () => { + const from = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs(`\`users\`.\`name\``), + }), + }; + const to = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `\`users\`.\`name\` || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` drop column `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", + ]); +}); + +test('generated as string: change generated constraint', async () => { + const from = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs(`\`users\`.\`name\``), + }), + }; + const to = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `\`users\`.\`name\` || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` drop column `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", + ]); +}); diff --git a/drizzle-kit/tests/mysql-schemas.test.ts b/drizzle-kit/tests/mysql-schemas.test.ts new file mode 100644 index 000000000..826585d86 --- /dev/null +++ b/drizzle-kit/tests/mysql-schemas.test.ts @@ -0,0 +1,155 @@ +import { mysqlSchema, mysqlTable } from 'drizzle-orm/mysql-core'; +import { expect, test } from 'vitest'; +import { diffTestSchemasMysql } from './schemaDiffer'; + +// We don't manage databases(schemas) in MySQL with Drizzle Kit +test('add schema #1', async () => { + const to = { + devSchema: mysqlSchema('dev'), + }; + + const { statements } = await diffTestSchemasMysql({}, to, []); + + expect(statements.length).toBe(0); +}); + +test('add schema #2', async () => { + const from = { + devSchema: mysqlSchema('dev'), + }; + const to = { + devSchema: mysqlSchema('dev'), + devSchema2: mysqlSchema('dev2'), + }; + + const { statements } = await diffTestSchemasMysql(from, to, []); + + expect(statements.length).toBe(0); +}); + +test('delete schema #1', async () => { + const from = { + devSchema: mysqlSchema('dev'), + }; + + const { statements } = await diffTestSchemasMysql(from, {}, []); + + expect(statements.length).toBe(0); +}); + +test('delete schema #2', async () => { + const from = { + devSchema: mysqlSchema('dev'), + devSchema2: mysqlSchema('dev2'), + }; + const to = { + devSchema: mysqlSchema('dev'), + }; + + const { statements } = await diffTestSchemasMysql(from, to, []); + + expect(statements.length).toBe(0); +}); + +test('rename schema #1', async () => { + const from = { + devSchema: mysqlSchema('dev'), + }; + const to = { + devSchema2: mysqlSchema('dev2'), + }; + + const { statements } = await diffTestSchemasMysql(from, to, ['dev->dev2']); + + expect(statements.length).toBe(0); +}); + +test('rename schema #2', async () => { + const from = { + devSchema: mysqlSchema('dev'), + devSchema1: mysqlSchema('dev1'), + }; + const to = { + devSchema: mysqlSchema('dev'), + devSchema2: mysqlSchema('dev2'), + }; + + const { statements } = await diffTestSchemasMysql(from, to, ['dev1->dev2']); + + expect(statements.length).toBe(0); +}); + +test('add table to schema #1', async () => { + const dev = mysqlSchema('dev'); + const from = {}; + const to = { + dev, + users: dev.table('users', {}), + }; + + const { statements } = await diffTestSchemasMysql(from, to, ['dev1->dev2']); + + expect(statements.length).toBe(0); +}); + +test('add table to schema #2', async () => { + const dev = mysqlSchema('dev'); + const from = { dev }; + const to = { + dev, + users: dev.table('users', {}), + }; + + const { statements } = await diffTestSchemasMysql(from, to, ['dev1->dev2']); + + expect(statements.length).toBe(0); +}); + +test('add table to schema #3', async () => { + const dev = mysqlSchema('dev'); + const from = { dev }; + const to = { + dev, + usersInDev: dev.table('users', {}), + users: mysqlTable('users', {}), + }; + + const { statements } = await diffTestSchemasMysql(from, to, ['dev1->dev2']); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: undefined, + columns: [], + uniqueConstraints: [], + internals: { + tables: {}, + indexes: {}, + }, + compositePkName: '', + compositePKs: [], + }); +}); + +test('remove table from schema #1', async () => { + const dev = mysqlSchema('dev'); + const from = { dev, users: dev.table('users', {}) }; + const to = { + dev, + }; + + const { statements } = await diffTestSchemasMysql(from, to, ['dev1->dev2']); + + expect(statements.length).toBe(0); +}); + +test('remove table from schema #2', async () => { + const dev = mysqlSchema('dev'); + const from = { dev, users: dev.table('users', {}) }; + const to = {}; + + const { statements } = await diffTestSchemasMysql(from, to, ['dev1->dev2']); + + expect(statements.length).toBe(0); +}); diff --git a/drizzle-kit/tests/mysql.test.ts b/drizzle-kit/tests/mysql.test.ts new file mode 100644 index 000000000..e7b0b32a5 --- /dev/null +++ b/drizzle-kit/tests/mysql.test.ts @@ -0,0 +1,557 @@ +import { sql } from 'drizzle-orm'; +import { index, json, mysqlSchema, mysqlTable, primaryKey, serial, text, uniqueIndex } from 'drizzle-orm/mysql-core'; +import { expect, test } from 'vitest'; +import { diffTestSchemasMysql } from './schemaDiffer'; + +test('add table #1', async () => { + const to = { + users: mysqlTable('users', {}), + }; + + const { statements } = await diffTestSchemasMysql({}, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: undefined, + columns: [], + compositePKs: [], + internals: { + tables: {}, + indexes: {}, + }, + uniqueConstraints: [], + compositePkName: '', + }); +}); + +test('add table #2', async () => { + const to = { + users: mysqlTable('users', { + id: serial('id').primaryKey(), + }), + }; + + const { statements } = await diffTestSchemasMysql({}, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: undefined, + columns: [ + { + name: 'id', + notNull: true, + primaryKey: false, + type: 'serial', + autoincrement: true, + }, + ], + compositePKs: ['users_id;id'], + compositePkName: 'users_id', + uniqueConstraints: [], + internals: { + tables: {}, + indexes: {}, + }, + }); +}); + +test('add table #3', async () => { + const to = { + users: mysqlTable( + 'users', + { + id: serial('id'), + }, + (t) => { + return { + pk: primaryKey({ + name: 'users_pk', + columns: [t.id], + }), + }; + }, + ), + }; + + const { statements } = await diffTestSchemasMysql({}, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: undefined, + columns: [ + { + name: 'id', + notNull: true, + primaryKey: false, + type: 'serial', + autoincrement: true, + }, + ], + compositePKs: ['users_pk;id'], + uniqueConstraints: [], + compositePkName: 'users_pk', + internals: { + tables: {}, + indexes: {}, + }, + }); +}); + +test('add table #4', async () => { + const to = { + users: mysqlTable('users', {}), + posts: mysqlTable('posts', {}), + }; + + const { statements } = await diffTestSchemasMysql({}, to, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: undefined, + columns: [], + internals: { + tables: {}, + indexes: {}, + }, + compositePKs: [], + uniqueConstraints: [], + compositePkName: '', + }); + expect(statements[1]).toStrictEqual({ + type: 'create_table', + tableName: 'posts', + schema: undefined, + columns: [], + compositePKs: [], + internals: { + tables: {}, + indexes: {}, + }, + uniqueConstraints: [], + compositePkName: '', + }); +}); + +test('add table #5', async () => { + const schema = mysqlSchema('folder'); + const from = { + schema, + }; + + const to = { + schema, + users: schema.table('users', {}), + }; + + const { statements } = await diffTestSchemasMysql(from, to, []); + + expect(statements.length).toBe(0); +}); + +test('add table #6', async () => { + const from = { + users1: mysqlTable('users1', {}), + }; + + const to = { + users2: mysqlTable('users2', {}), + }; + + const { statements } = await diffTestSchemasMysql(from, to, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users2', + schema: undefined, + columns: [], + internals: { + tables: {}, + indexes: {}, + }, + compositePKs: [], + uniqueConstraints: [], + compositePkName: '', + }); + expect(statements[1]).toStrictEqual({ + type: 'drop_table', + tableName: 'users1', + schema: undefined, + }); +}); + +test('add table #7', async () => { + const from = { + users1: mysqlTable('users1', {}), + }; + + const to = { + users: mysqlTable('users', {}), + users2: mysqlTable('users2', {}), + }; + + const { statements } = await diffTestSchemasMysql(from, to, [ + 'public.users1->public.users2', + ]); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: undefined, + columns: [], + compositePKs: [], + uniqueConstraints: [], + internals: { + tables: {}, + indexes: {}, + }, + compositePkName: '', + }); + expect(statements[1]).toStrictEqual({ + type: 'rename_table', + tableNameFrom: 'users1', + tableNameTo: 'users2', + fromSchema: undefined, + toSchema: undefined, + }); +}); + +test('add schema + table #1', async () => { + const schema = mysqlSchema('folder'); + + const to = { + schema, + users: schema.table('users', {}), + }; + + const { statements } = await diffTestSchemasMysql({}, to, []); + + expect(statements.length).toBe(0); +}); + +test('change schema with tables #1', async () => { + const schema = mysqlSchema('folder'); + const schema2 = mysqlSchema('folder2'); + const from = { + schema, + users: schema.table('users', {}), + }; + const to = { + schema2, + users: schema2.table('users', {}), + }; + + const { statements } = await diffTestSchemasMysql(from, to, [ + 'folder->folder2', + ]); + + expect(statements.length).toBe(0); +}); + +test('change table schema #1', async () => { + const schema = mysqlSchema('folder'); + const from = { + schema, + users: mysqlTable('users', {}), + }; + const to = { + schema, + users: schema.table('users', {}), + }; + + const { statements } = await diffTestSchemasMysql(from, to, [ + 'public.users->folder.users', + ]); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'drop_table', + tableName: 'users', + schema: undefined, + }); +}); + +test('change table schema #2', async () => { + const schema = mysqlSchema('folder'); + const from = { + schema, + users: schema.table('users', {}), + }; + const to = { + schema, + users: mysqlTable('users', {}), + }; + + const { statements } = await diffTestSchemasMysql(from, to, [ + 'folder.users->public.users', + ]); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: undefined, + columns: [], + uniqueConstraints: [], + compositePkName: '', + compositePKs: [], + internals: { + tables: {}, + indexes: {}, + }, + }); +}); + +test('change table schema #3', async () => { + const schema1 = mysqlSchema('folder1'); + const schema2 = mysqlSchema('folder2'); + const from = { + schema1, + schema2, + users: schema1.table('users', {}), + }; + const to = { + schema1, + schema2, + users: schema2.table('users', {}), + }; + + const { statements } = await diffTestSchemasMysql(from, to, [ + 'folder1.users->folder2.users', + ]); + + expect(statements.length).toBe(0); +}); + +test('change table schema #4', async () => { + const schema1 = mysqlSchema('folder1'); + const schema2 = mysqlSchema('folder2'); + const from = { + schema1, + users: schema1.table('users', {}), + }; + const to = { + schema1, + schema2, // add schema + users: schema2.table('users', {}), // move table + }; + + const { statements } = await diffTestSchemasMysql(from, to, [ + 'folder1.users->folder2.users', + ]); + + expect(statements.length).toBe(0); +}); + +test('change table schema #5', async () => { + const schema1 = mysqlSchema('folder1'); + const schema2 = mysqlSchema('folder2'); + const from = { + schema1, // remove schema + users: schema1.table('users', {}), + }; + const to = { + schema2, // add schema + users: schema2.table('users', {}), // move table + }; + + const { statements } = await diffTestSchemasMysql(from, to, [ + 'folder1.users->folder2.users', + ]); + + expect(statements.length).toBe(0); +}); + +test('change table schema #5', async () => { + const schema1 = mysqlSchema('folder1'); + const schema2 = mysqlSchema('folder2'); + const from = { + schema1, + schema2, + users: schema1.table('users', {}), + }; + const to = { + schema1, + schema2, + users: schema2.table('users2', {}), // rename and move table + }; + + const { statements } = await diffTestSchemasMysql(from, to, [ + 'folder1.users->folder2.users2', + ]); + + expect(statements.length).toBe(0); +}); + +test('change table schema #6', async () => { + const schema1 = mysqlSchema('folder1'); + const schema2 = mysqlSchema('folder2'); + const from = { + schema1, + users: schema1.table('users', {}), + }; + const to = { + schema2, // rename schema + users: schema2.table('users2', {}), // rename table + }; + + const { statements } = await diffTestSchemasMysql(from, to, [ + 'folder1->folder2', + 'folder2.users->folder2.users2', + ]); + + expect(statements.length).toBe(0); +}); + +test('add table #10', async () => { + const to = { + users: mysqlTable('table', { + json: json('json').default({}), + }), + }; + + const { sqlStatements } = await diffTestSchemasMysql({}, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + "CREATE TABLE `table` (\n\t`json` json DEFAULT ('{}')\n);\n", + ); +}); + +test('add table #11', async () => { + const to = { + users: mysqlTable('table', { + json: json('json').default([]), + }), + }; + + const { sqlStatements } = await diffTestSchemasMysql({}, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + "CREATE TABLE `table` (\n\t`json` json DEFAULT ('[]')\n);\n", + ); +}); + +test('add table #12', async () => { + const to = { + users: mysqlTable('table', { + json: json('json').default([1, 2, 3]), + }), + }; + + const { sqlStatements } = await diffTestSchemasMysql({}, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + "CREATE TABLE `table` (\n\t`json` json DEFAULT ('[1,2,3]')\n);\n", + ); +}); + +test('add table #13', async () => { + const to = { + users: mysqlTable('table', { + json: json('json').default({ key: 'value' }), + }), + }; + + const { sqlStatements } = await diffTestSchemasMysql({}, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + 'CREATE TABLE `table` (\n\t`json` json DEFAULT (\'{"key":"value"}\')\n);\n', + ); +}); + +test('add table #14', async () => { + const to = { + users: mysqlTable('table', { + json: json('json').default({ + key: 'value', + arr: [1, 2, 3], + }), + }), + }; + + const { sqlStatements } = await diffTestSchemasMysql({}, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + 'CREATE TABLE `table` (\n\t`json` json DEFAULT (\'{"key":"value","arr":[1,2,3]}\')\n);\n', + ); +}); + +test('drop index', async () => { + const from = { + users: mysqlTable( + 'table', + { + name: text('name'), + }, + (t) => { + return { + idx: index('name_idx').on(t.name), + }; + }, + ), + }; + + const to = { + users: mysqlTable('table', { + name: text('name'), + }), + }; + + const { sqlStatements } = await diffTestSchemasMysql(from, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe('DROP INDEX `name_idx` ON `table`;'); +}); + +test('add table with indexes', async () => { + const from = {}; + + const to = { + users: mysqlTable( + 'users', + { + id: serial('id').primaryKey(), + name: text('name'), + email: text('email'), + }, + (t) => ({ + uniqueExpr: uniqueIndex('uniqueExpr').on(sql`(lower(${t.email}))`), + indexExpr: index('indexExpr').on(sql`(lower(${t.email}))`), + indexExprMultiple: index('indexExprMultiple').on( + sql`(lower(${t.email}))`, + sql`(lower(${t.email}))`, + ), + + uniqueCol: uniqueIndex('uniqueCol').on(t.email), + indexCol: index('indexCol').on(t.email), + indexColMultiple: index('indexColMultiple').on(t.email, t.email), + + indexColExpr: index('indexColExpr').on( + sql`(lower(${t.email}))`, + t.email, + ), + }), + ), + }; + + const { sqlStatements } = await diffTestSchemasMysql(from, to, []); + expect(sqlStatements.length).toBe(6); + expect(sqlStatements).toStrictEqual([ + `CREATE TABLE \`users\` (\n\t\`id\` serial AUTO_INCREMENT NOT NULL,\n\t\`name\` text,\n\t\`email\` text,\n\tCONSTRAINT \`users_id\` PRIMARY KEY(\`id\`),\n\tCONSTRAINT \`uniqueExpr\` UNIQUE((lower(\`email\`))),\n\tCONSTRAINT \`uniqueCol\` UNIQUE(\`email\`) +); +`, + 'CREATE INDEX `indexExpr` ON `users` ((lower(`email`)));', + 'CREATE INDEX `indexExprMultiple` ON `users` ((lower(`email`)),(lower(`email`)));', + 'CREATE INDEX `indexCol` ON `users` (`email`);', + 'CREATE INDEX `indexColMultiple` ON `users` (`email`,`email`);', + 'CREATE INDEX `indexColExpr` ON `users` ((lower(`email`)),`email`);', + ]); +}); diff --git a/drizzle-kit/tests/pg-array.test.ts b/drizzle-kit/tests/pg-array.test.ts new file mode 100644 index 000000000..e6c06d535 --- /dev/null +++ b/drizzle-kit/tests/pg-array.test.ts @@ -0,0 +1,368 @@ +import { + bigint, + boolean, + date, + integer, + json, + pgEnum, + pgTable, + serial, + text, + timestamp, + uuid, +} from 'drizzle-orm/pg-core'; +import { expect, test } from 'vitest'; +import { diffTestSchemas } from './schemaDiffer'; + +test('array #1: empty array default', async (t) => { + const from = { + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const to = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: integer('values').array().default([]), + }), + }; + + const { statements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_add_column', + tableName: 'test', + schema: '', + column: { name: 'values', type: 'integer[]', primaryKey: false, notNull: false, default: "'{}'" }, + }); +}); + +test('array #2: integer array default', async (t) => { + const from = { + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const to = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: integer('values').array().default([1, 2, 3]), + }), + }; + + const { statements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_add_column', + tableName: 'test', + schema: '', + column: { name: 'values', type: 'integer[]', primaryKey: false, notNull: false, default: "'{1,2,3}'" }, + }); +}); + +test('array #3: bigint array default', async (t) => { + const from = { + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const to = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: bigint('values', { mode: 'bigint' }).array().default([BigInt(1), BigInt(2), BigInt(3)]), + }), + }; + + const { statements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_add_column', + tableName: 'test', + schema: '', + column: { name: 'values', type: 'bigint[]', primaryKey: false, notNull: false, default: "'{1,2,3}'" }, + }); +}); + +test('array #4: boolean array default', async (t) => { + const from = { + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const to = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: boolean('values').array().default([true, false, true]), + }), + }; + + const { statements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_add_column', + tableName: 'test', + schema: '', + column: { + name: 'values', + type: 'boolean[]', + primaryKey: false, + notNull: false, + default: "'{true,false,true}'", + }, + }); +}); + +test('array #5: multi-dimensional array default', async (t) => { + const from = { + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const to = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: integer('values').array().array().default([[1, 2], [3, 4]]), + }), + }; + + const { statements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_add_column', + tableName: 'test', + schema: '', + column: { + name: 'values', + type: 'integer[][]', + primaryKey: false, + notNull: false, + default: "'{{1,2},{3,4}}'", + }, + }); +}); + +test('array #6: date array default', async (t) => { + const from = { + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const to = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: date('values').array().default(['2024-08-06', '2024-08-07']), + }), + }; + + const { statements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_add_column', + tableName: 'test', + schema: '', + column: { + name: 'values', + type: 'date[]', + primaryKey: false, + notNull: false, + default: '\'{"2024-08-06","2024-08-07"}\'', + }, + }); +}); + +test('array #7: timestamp array default', async (t) => { + const from = { + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const to = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: timestamp('values').array().default([new Date('2024-08-06'), new Date('2024-08-07')]), + }), + }; + + const { statements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_add_column', + tableName: 'test', + schema: '', + column: { + name: 'values', + type: 'timestamp[]', + primaryKey: false, + notNull: false, + default: '\'{"2024-08-06 00:00:00.000","2024-08-07 00:00:00.000"}\'', + }, + }); +}); + +test('array #8: json array default', async (t) => { + const from = { + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const to = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: json('values').array().default([{ a: 1 }, { b: 2 }]), + }), + }; + + const { statements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_add_column', + tableName: 'test', + schema: '', + column: { + name: 'values', + type: 'json[]', + primaryKey: false, + notNull: false, + default: '\'{"{\\"a\\":1}","{\\"b\\":2}"}\'', + }, + }); +}); + +test('array #9: text array default', async (t) => { + const from = { + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const to = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: text('values').array().default(['abc', 'def']), + }), + }; + + const { statements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_add_column', + tableName: 'test', + schema: '', + column: { + name: 'values', + type: 'text[]', + primaryKey: false, + notNull: false, + default: '\'{"abc","def"}\'', + }, + }); +}); + +test('array #10: uuid array default', async (t) => { + const from = { + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const to = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: uuid('values').array().default([ + 'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11', + 'b0eebc99-9c0b-4ef8-bb6d-cbb9bd380a11', + ]), + }), + }; + + const { statements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_add_column', + tableName: 'test', + schema: '', + column: { + name: 'values', + type: 'uuid[]', + primaryKey: false, + notNull: false, + default: '\'{"a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11","b0eebc99-9c0b-4ef8-bb6d-cbb9bd380a11"}\'', + }, + }); +}); + +test('array #11: enum array default', async (t) => { + const testEnum = pgEnum('test_enum', ['a', 'b', 'c']); + + const from = { + enum: testEnum, + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const to = { + enum: testEnum, + test: pgTable('test', { + id: serial('id').primaryKey(), + values: testEnum('values').array().default(['a', 'b', 'c']), + }), + }; + + const { statements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_add_column', + tableName: 'test', + schema: '', + column: { + name: 'values', + type: 'test_enum[]', + primaryKey: false, + notNull: false, + default: '\'{"a","b","c"}\'', + }, + }); +}); + +test('array #12: enum empty array default', async (t) => { + const testEnum = pgEnum('test_enum', ['a', 'b', 'c']); + + const from = { + enum: testEnum, + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const to = { + enum: testEnum, + test: pgTable('test', { + id: serial('id').primaryKey(), + values: testEnum('values').array().default([]), + }), + }; + + const { statements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_add_column', + tableName: 'test', + schema: '', + column: { + name: 'values', + type: 'test_enum[]', + primaryKey: false, + notNull: false, + default: "'{}'", + }, + }); +}); diff --git a/drizzle-kit/tests/pg-columns.test.ts b/drizzle-kit/tests/pg-columns.test.ts new file mode 100644 index 000000000..cffeed3ed --- /dev/null +++ b/drizzle-kit/tests/pg-columns.test.ts @@ -0,0 +1,458 @@ +import { integer, pgTable, primaryKey, serial, text, uuid } from 'drizzle-orm/pg-core'; +import { expect, test } from 'vitest'; +import { diffTestSchemas } from './schemaDiffer'; + +test('add columns #1', async (t) => { + const schema1 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + }), + }; + + const schema2 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }), + }; + + const { statements } = await diffTestSchemas(schema1, schema2, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_add_column', + tableName: 'users', + schema: '', + column: { name: 'name', type: 'text', primaryKey: false, notNull: false }, + }); +}); + +test('add columns #2', async (t) => { + const schema1 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + }), + }; + + const schema2 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + email: text('email'), + }), + }; + + const { statements } = await diffTestSchemas(schema1, schema2, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_add_column', + tableName: 'users', + schema: '', + column: { name: 'name', type: 'text', primaryKey: false, notNull: false }, + }); + expect(statements[1]).toStrictEqual({ + type: 'alter_table_add_column', + tableName: 'users', + schema: '', + column: { name: 'email', type: 'text', primaryKey: false, notNull: false }, + }); +}); + +test('alter column change name #1', async (t) => { + const schema1 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }), + }; + + const schema2 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: text('name1'), + }), + }; + + const { statements } = await diffTestSchemas(schema1, schema2, [ + 'public.users.name->public.users.name1', + ]); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_rename_column', + tableName: 'users', + schema: '', + oldColumnName: 'name', + newColumnName: 'name1', + }); +}); + +test('alter column change name #2', async (t) => { + const schema1 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }), + }; + + const schema2 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: text('name1'), + email: text('email'), + }), + }; + + const { statements } = await diffTestSchemas(schema1, schema2, [ + 'public.users.name->public.users.name1', + ]); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_rename_column', + tableName: 'users', + schema: '', + oldColumnName: 'name', + newColumnName: 'name1', + }); + expect(statements[1]).toStrictEqual({ + type: 'alter_table_add_column', + tableName: 'users', + schema: '', + column: { + name: 'email', + notNull: false, + primaryKey: false, + type: 'text', + }, + }); +}); + +test('alter table add composite pk', async (t) => { + const schema1 = { + table: pgTable('table', { + id1: integer('id1'), + id2: integer('id2'), + }), + }; + + const schema2 = { + table: pgTable( + 'table', + { + id1: integer('id1'), + id2: integer('id2'), + }, + (t) => { + return { + pk: primaryKey({ columns: [t.id1, t.id2] }), + }; + }, + ), + }; + + const { statements, sqlStatements } = await diffTestSchemas( + schema1, + schema2, + [], + ); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_composite_pk', + tableName: 'table', + data: 'id1,id2;table_id1_id2_pk', + schema: '', + constraintName: 'table_id1_id2_pk', + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + 'ALTER TABLE "table" ADD CONSTRAINT "table_id1_id2_pk" PRIMARY KEY("id1","id2");', + ); +}); + +test('rename table rename column #1', async (t) => { + const schema1 = { + users: pgTable('users', { + id: integer('id'), + }), + }; + + const schema2 = { + users: pgTable('users1', { + id: integer('id1'), + }), + }; + + const { statements } = await diffTestSchemas(schema1, schema2, [ + 'public.users->public.users1', + 'public.users1.id->public.users1.id1', + ]); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'rename_table', + tableNameFrom: 'users', + tableNameTo: 'users1', + fromSchema: '', + toSchema: '', + }); + expect(statements[1]).toStrictEqual({ + type: 'alter_table_rename_column', + oldColumnName: 'id', + newColumnName: 'id1', + schema: '', + tableName: 'users1', + }); +}); + +test('with composite pks #1', async (t) => { + const schema1 = { + users: pgTable( + 'users', + { + id1: integer('id1'), + id2: integer('id2'), + }, + (t) => { + return { + pk: primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' }), + }; + }, + ), + }; + + const schema2 = { + users: pgTable( + 'users', + { + id1: integer('id1'), + id2: integer('id2'), + text: text('text'), + }, + (t) => { + return { + pk: primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' }), + }; + }, + ), + }; + + const { statements } = await diffTestSchemas(schema1, schema2, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_add_column', + tableName: 'users', + schema: '', + column: { + name: 'text', + notNull: false, + primaryKey: false, + type: 'text', + }, + }); +}); + +test('with composite pks #2', async (t) => { + const schema1 = { + users: pgTable('users', { + id1: integer('id1'), + id2: integer('id2'), + }), + }; + + const schema2 = { + users: pgTable( + 'users', + { + id1: integer('id1'), + id2: integer('id2'), + }, + (t) => { + return { + pk: primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' }), + }; + }, + ), + }; + + const { statements } = await diffTestSchemas(schema1, schema2, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_composite_pk', + tableName: 'users', + schema: '', + constraintName: 'compositePK', + data: 'id1,id2;compositePK', + }); +}); + +test('with composite pks #3', async (t) => { + const schema1 = { + users: pgTable( + 'users', + { + id1: integer('id1'), + id2: integer('id2'), + }, + (t) => { + return { + pk: primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' }), + }; + }, + ), + }; + + const schema2 = { + users: pgTable( + 'users', + { + id1: integer('id1'), + id3: integer('id3'), + }, + (t) => { + return { + pk: primaryKey({ columns: [t.id1, t.id3], name: 'compositePK' }), + }; + }, + ), + }; + + // TODO: remove redundand drop/create create constraint + const { statements } = await diffTestSchemas(schema1, schema2, [ + 'public.users.id2->public.users.id3', + ]); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_rename_column', + tableName: 'users', + schema: '', + newColumnName: 'id3', + oldColumnName: 'id2', + }); + expect(statements[1]).toStrictEqual({ + type: 'alter_composite_pk', + tableName: 'users', + schema: '', + new: 'id1,id3;compositePK', + old: 'id1,id2;compositePK', + newConstraintName: 'compositePK', + oldConstraintName: 'compositePK', + }); +}); + +test('add multiple constraints #1', async (t) => { + const t1 = pgTable('t1', { + id: uuid('id').primaryKey().defaultRandom(), + }); + + const t2 = pgTable('t2', { + id: uuid('id').primaryKey().defaultRandom(), + }); + + const t3 = pgTable('t3', { + id: uuid('id').primaryKey().defaultRandom(), + }); + + const schema1 = { + t1, + t2, + t3, + ref1: pgTable('ref1', { + id1: uuid('id1').references(() => t1.id), + id2: uuid('id2').references(() => t2.id), + id3: uuid('id3').references(() => t3.id), + }), + }; + + const schema2 = { + t1, + t2, + t3, + ref1: pgTable('ref1', { + id1: uuid('id1').references(() => t1.id, { onDelete: 'cascade' }), + id2: uuid('id2').references(() => t2.id, { onDelete: 'set null' }), + id3: uuid('id3').references(() => t3.id, { onDelete: 'cascade' }), + }), + }; + + // TODO: remove redundand drop/create create constraint + const { statements } = await diffTestSchemas(schema1, schema2, []); + + expect(statements.length).toBe(6); +}); + +test('add multiple constraints #2', async (t) => { + const t1 = pgTable('t1', { + id1: uuid('id1').primaryKey().defaultRandom(), + id2: uuid('id2').primaryKey().defaultRandom(), + id3: uuid('id3').primaryKey().defaultRandom(), + }); + + const schema1 = { + t1, + ref1: pgTable('ref1', { + id1: uuid('id1').references(() => t1.id1), + id2: uuid('id2').references(() => t1.id2), + id3: uuid('id3').references(() => t1.id3), + }), + }; + + const schema2 = { + t1, + ref1: pgTable('ref1', { + id1: uuid('id1').references(() => t1.id1, { onDelete: 'cascade' }), + id2: uuid('id2').references(() => t1.id2, { onDelete: 'set null' }), + id3: uuid('id3').references(() => t1.id3, { onDelete: 'cascade' }), + }), + }; + + // TODO: remove redundand drop/create create constraint + const { statements } = await diffTestSchemas(schema1, schema2, []); + + expect(statements.length).toBe(6); +}); + +test('add multiple constraints #3', async (t) => { + const t1 = pgTable('t1', { + id1: uuid('id1').primaryKey().defaultRandom(), + id2: uuid('id2').primaryKey().defaultRandom(), + id3: uuid('id3').primaryKey().defaultRandom(), + }); + + const schema1 = { + t1, + ref1: pgTable('ref1', { + id: uuid('id').references(() => t1.id1), + }), + ref2: pgTable('ref2', { + id: uuid('id').references(() => t1.id2), + }), + ref3: pgTable('ref3', { + id: uuid('id').references(() => t1.id3), + }), + }; + + const schema2 = { + t1, + ref1: pgTable('ref1', { + id: uuid('id').references(() => t1.id1, { onDelete: 'cascade' }), + }), + ref2: pgTable('ref2', { + id: uuid('id').references(() => t1.id2, { onDelete: 'set null' }), + }), + ref3: pgTable('ref3', { + id: uuid('id').references(() => t1.id3, { onDelete: 'cascade' }), + }), + }; + + // TODO: remove redundand drop/create create constraint + const { statements } = await diffTestSchemas(schema1, schema2, []); + + expect(statements.length).toBe(6); +}); diff --git a/drizzle-kit/tests/pg-enums.test.ts b/drizzle-kit/tests/pg-enums.test.ts new file mode 100644 index 000000000..cd8877a43 --- /dev/null +++ b/drizzle-kit/tests/pg-enums.test.ts @@ -0,0 +1,460 @@ +import { pgEnum, pgSchema, pgTable } from 'drizzle-orm/pg-core'; +import { expect, test } from 'vitest'; +import { diffTestSchemas } from './schemaDiffer'; + +test('enums #1', async () => { + const to = { + enum: pgEnum('enum', ['value']), + }; + + const { statements } = await diffTestSchemas({}, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + name: 'enum', + schema: 'public', + type: 'create_type_enum', + values: ['value'], + }); +}); + +test('enums #2', async () => { + const folder = pgSchema('folder'); + const to = { + enum: folder.enum('enum', ['value']), + }; + + const { statements } = await diffTestSchemas({}, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + name: 'enum', + schema: 'folder', + type: 'create_type_enum', + values: ['value'], + }); +}); + +test('enums #3', async () => { + const from = { + enum: pgEnum('enum', ['value']), + }; + + const { statements } = await diffTestSchemas(from, {}, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'drop_type_enum', + name: 'enum', + schema: 'public', + }); +}); + +test('enums #4', async () => { + const folder = pgSchema('folder'); + + const from = { + enum: folder.enum('enum', ['value']), + }; + + const { statements } = await diffTestSchemas(from, {}, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'drop_type_enum', + name: 'enum', + schema: 'folder', + }); +}); + +test('enums #5', async () => { + const folder1 = pgSchema('folder1'); + const folder2 = pgSchema('folder2'); + + const from = { + folder1, + enum: folder1.enum('enum', ['value']), + }; + + const to = { + folder2, + enum: folder2.enum('enum', ['value']), + }; + + const { statements } = await diffTestSchemas(from, to, ['folder1->folder2']); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'rename_schema', + from: 'folder1', + to: 'folder2', + }); +}); + +test('enums #6', async () => { + const folder1 = pgSchema('folder1'); + const folder2 = pgSchema('folder2'); + + const from = { + folder1, + folder2, + enum: folder1.enum('enum', ['value']), + }; + + const to = { + folder1, + folder2, + enum: folder2.enum('enum', ['value']), + }; + + const { statements } = await diffTestSchemas(from, to, [ + 'folder1.enum->folder2.enum', + ]); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'move_type_enum', + name: 'enum', + schemaFrom: 'folder1', + schemaTo: 'folder2', + }); +}); + +test('enums #7', async () => { + const from = { + enum: pgEnum('enum', ['value1']), + }; + + const to = { + enum: pgEnum('enum', ['value1', 'value2']), + }; + + const { statements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_type_add_value', + name: 'enum', + schema: 'public', + value: 'value2', + before: '', + }); +}); + +test('enums #8', async () => { + const from = { + enum: pgEnum('enum', ['value1']), + }; + + const to = { + enum: pgEnum('enum', ['value1', 'value2', 'value3']), + }; + + const { statements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'alter_type_add_value', + name: 'enum', + schema: 'public', + value: 'value2', + before: '', + }); + + expect(statements[1]).toStrictEqual({ + type: 'alter_type_add_value', + name: 'enum', + schema: 'public', + value: 'value3', + before: '', + }); +}); + +test('enums #9', async () => { + const from = { + enum: pgEnum('enum', ['value1', 'value3']), + }; + + const to = { + enum: pgEnum('enum', ['value1', 'value2', 'value3']), + }; + + const { statements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_type_add_value', + name: 'enum', + schema: 'public', + value: 'value2', + before: 'value3', + }); +}); + +test('enums #10', async () => { + const schema = pgSchema('folder'); + const from = { + enum: schema.enum('enum', ['value1']), + }; + + const to = { + enum: schema.enum('enum', ['value1', 'value2']), + }; + + const { statements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_type_add_value', + name: 'enum', + schema: 'folder', + value: 'value2', + before: '', + }); +}); + +test('enums #11', async () => { + const schema1 = pgSchema('folder1'); + const from = { + enum: schema1.enum('enum', ['value1']), + }; + + const to = { + enum: pgEnum('enum', ['value1']), + }; + + const { statements } = await diffTestSchemas(from, to, [ + 'folder1.enum->public.enum', + ]); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'move_type_enum', + name: 'enum', + schemaFrom: 'folder1', + schemaTo: 'public', + }); +}); + +test('enums #12', async () => { + const schema1 = pgSchema('folder1'); + const from = { + enum: pgEnum('enum', ['value1']), + }; + + const to = { + enum: schema1.enum('enum', ['value1']), + }; + + const { statements } = await diffTestSchemas(from, to, [ + 'public.enum->folder1.enum', + ]); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'move_type_enum', + name: 'enum', + schemaFrom: 'public', + schemaTo: 'folder1', + }); +}); + +test('enums #13', async () => { + const from = { + enum: pgEnum('enum1', ['value1']), + }; + + const to = { + enum: pgEnum('enum2', ['value1']), + }; + + const { statements } = await diffTestSchemas(from, to, [ + 'public.enum1->public.enum2', + ]); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'rename_type_enum', + nameFrom: 'enum1', + nameTo: 'enum2', + schema: 'public', + }); +}); + +test('enums #14', async () => { + const folder1 = pgSchema('folder1'); + const folder2 = pgSchema('folder2'); + const from = { + enum: folder1.enum('enum1', ['value1']), + }; + + const to = { + enum: folder2.enum('enum2', ['value1']), + }; + + const { statements } = await diffTestSchemas(from, to, [ + 'folder1.enum1->folder2.enum2', + ]); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'move_type_enum', + name: 'enum1', + schemaFrom: 'folder1', + schemaTo: 'folder2', + }); + expect(statements[1]).toStrictEqual({ + type: 'rename_type_enum', + nameFrom: 'enum1', + nameTo: 'enum2', + schema: 'folder2', + }); +}); + +test('enums #15', async () => { + const folder1 = pgSchema('folder1'); + const folder2 = pgSchema('folder2'); + const from = { + enum: folder1.enum('enum1', ['value1', 'value4']), + }; + + const to = { + enum: folder2.enum('enum2', ['value1', 'value2', 'value3', 'value4']), + }; + + const { statements } = await diffTestSchemas(from, to, [ + 'folder1.enum1->folder2.enum2', + ]); + + expect(statements.length).toBe(4); + expect(statements[0]).toStrictEqual({ + type: 'move_type_enum', + name: 'enum1', + schemaFrom: 'folder1', + schemaTo: 'folder2', + }); + expect(statements[1]).toStrictEqual({ + type: 'rename_type_enum', + nameFrom: 'enum1', + nameTo: 'enum2', + schema: 'folder2', + }); + expect(statements[2]).toStrictEqual({ + type: 'alter_type_add_value', + name: 'enum2', + schema: 'folder2', + value: 'value2', + before: 'value4', + }); + expect(statements[3]).toStrictEqual({ + type: 'alter_type_add_value', + name: 'enum2', + schema: 'folder2', + value: 'value3', + before: 'value4', + }); +}); + +test('enums #16', async () => { + const enum1 = pgEnum('enum1', ['value1']); + const enum2 = pgEnum('enum2', ['value1']); + + const from = { + enum1, + table: pgTable('table', { + column: enum1('column'), + }), + }; + + const to = { + enum2, + table: pgTable('table', { + column: enum2('column'), + }), + }; + + const { statements } = await diffTestSchemas(from, to, [ + 'public.enum1->public.enum2', + ]); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'rename_type_enum', + nameFrom: 'enum1', + nameTo: 'enum2', + schema: 'public', + }); +}); + +test('enums #17', async () => { + const schema = pgSchema('schema'); + const enum1 = pgEnum('enum1', ['value1']); + const enum2 = schema.enum('enum1', ['value1']); + + const from = { + enum1, + table: pgTable('table', { + column: enum1('column'), + }), + }; + + const to = { + enum2, + table: pgTable('table', { + column: enum2('column'), + }), + }; + + const { statements } = await diffTestSchemas(from, to, [ + 'public.enum1->schema.enum1', + ]); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'move_type_enum', + name: 'enum1', + schemaFrom: 'public', + schemaTo: 'schema', + }); +}); + +test('enums #18', async () => { + const schema1 = pgSchema('schema1'); + const schema2 = pgSchema('schema2'); + + const enum1 = schema1.enum('enum1', ['value1']); + const enum2 = schema2.enum('enum2', ['value1']); + + const from = { + enum1, + table: pgTable('table', { + column: enum1('column'), + }), + }; + + const to = { + enum2, + table: pgTable('table', { + column: enum2('column'), + }), + }; + + // change name and schema of the enum, no table changes + const { statements } = await diffTestSchemas(from, to, [ + 'schema1.enum1->schema2.enum2', + ]); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'move_type_enum', + name: 'enum1', + schemaFrom: 'schema1', + schemaTo: 'schema2', + }); + expect(statements[1]).toStrictEqual({ + type: 'rename_type_enum', + nameFrom: 'enum1', + nameTo: 'enum2', + schema: 'schema2', + }); +}); diff --git a/drizzle-kit/tests/pg-generated.test.ts b/drizzle-kit/tests/pg-generated.test.ts new file mode 100644 index 000000000..e9f294891 --- /dev/null +++ b/drizzle-kit/tests/pg-generated.test.ts @@ -0,0 +1,529 @@ +// test cases + +import { SQL, sql } from 'drizzle-orm'; +import { integer, pgTable, text } from 'drizzle-orm/pg-core'; +import { expect, test } from 'vitest'; +import { diffTestSchemas } from './schemaDiffer'; + +test('generated as callback: add column with generated constraint', async () => { + const from = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + }), + }; + const to = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + column: { + generated: { + as: '"users"."name" || \'hello\'', + type: 'stored', + }, + name: 'gen_name', + notNull: false, + primaryKey: false, + type: 'text', + }, + schema: '', + tableName: 'users', + type: 'alter_table_add_column', + }, + ]); + expect(sqlStatements).toStrictEqual([ + `ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS (\"users\".\"name\" || 'hello') STORED;`, + ]); +}); + +test('generated as callback: add generated constraint to an exisiting column', async () => { + const from = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs((): SQL => sql`${from.users.name} || 'to add'`), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: undefined, + columnDefault: undefined, + columnGenerated: { as: '"users"."name" || \'to add\'', type: 'stored' }, + columnName: 'gen_name', + columnNotNull: true, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE "users" drop column "gen_name";', + 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name" || \'to add\') STORED NOT NULL;', + ]); +}); + +test('generated as callback: drop generated constraint', async () => { + const from = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name} || 'to delete'`, + ), + }), + }; + const to = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: undefined, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + `ALTER TABLE \"users\" ALTER COLUMN \"gen_name\" DROP EXPRESSION;`, + ]); +}); + +test('generated as callback: change generated constraint', async () => { + const from = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name}`, + ), + }), + }; + const to = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: undefined, + columnDefault: undefined, + columnGenerated: { as: '"users"."name" || \'hello\'', type: 'stored' }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE "users" drop column "gen_name";', + 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name" || \'hello\') STORED;', + ]); +}); + +// --- + +test('generated as sql: add column with generated constraint', async () => { + const from = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + }), + }; + const to = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\"users\".\"name\" || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + column: { + generated: { + as: '"users"."name" || \'hello\'', + type: 'stored', + }, + name: 'gen_name', + notNull: false, + primaryKey: false, + type: 'text', + }, + schema: '', + tableName: 'users', + type: 'alter_table_add_column', + }, + ]); + expect(sqlStatements).toStrictEqual([ + `ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS (\"users\".\"name\" || 'hello') STORED;`, + ]); +}); + +test('generated as sql: add generated constraint to an exisiting column', async () => { + const from = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs(sql`\"users\".\"name\" || 'to add'`), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: undefined, + columnDefault: undefined, + columnGenerated: { as: '"users"."name" || \'to add\'', type: 'stored' }, + columnName: 'gen_name', + columnNotNull: true, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE "users" drop column "gen_name";', + 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name" || \'to add\') STORED NOT NULL;', + ]); +}); + +test('generated as sql: drop generated constraint', async () => { + const from = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\"users\".\"name\" || 'to delete'`, + ), + }), + }; + const to = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: undefined, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + `ALTER TABLE \"users\" ALTER COLUMN \"gen_name\" DROP EXPRESSION;`, + ]); +}); + +test('generated as sql: change generated constraint', async () => { + const from = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\"users\".\"name\"`, + ), + }), + }; + const to = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\"users\".\"name\" || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: undefined, + columnDefault: undefined, + columnGenerated: { as: '"users"."name" || \'hello\'', type: 'stored' }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE "users" drop column "gen_name";', + 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name" || \'hello\') STORED;', + ]); +}); + +// --- + +test('generated as string: add column with generated constraint', async () => { + const from = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + }), + }; + const to = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `\"users\".\"name\" || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + column: { + generated: { + as: '"users"."name" || \'hello\'', + type: 'stored', + }, + name: 'gen_name', + notNull: false, + primaryKey: false, + type: 'text', + }, + schema: '', + tableName: 'users', + type: 'alter_table_add_column', + }, + ]); + expect(sqlStatements).toStrictEqual([ + `ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS (\"users\".\"name\" || 'hello') STORED;`, + ]); +}); + +test('generated as string: add generated constraint to an exisiting column', async () => { + const from = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs(`\"users\".\"name\" || 'to add'`), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: undefined, + columnDefault: undefined, + columnGenerated: { as: '"users"."name" || \'to add\'', type: 'stored' }, + columnName: 'gen_name', + columnNotNull: true, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE "users" drop column "gen_name";', + 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name" || \'to add\') STORED NOT NULL;', + ]); +}); + +test('generated as string: drop generated constraint', async () => { + const from = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `\"users\".\"name\" || 'to delete'`, + ), + }), + }; + const to = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: undefined, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + `ALTER TABLE \"users\" ALTER COLUMN \"gen_name\" DROP EXPRESSION;`, + ]); +}); + +test('generated as string: change generated constraint', async () => { + const from = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name}`, + ), + }), + }; + const to = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `\"users\".\"name\" || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: undefined, + columnDefault: undefined, + columnGenerated: { as: '"users"."name" || \'hello\'', type: 'stored' }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE "users" drop column "gen_name";', + 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name" || \'hello\') STORED;', + ]); +}); diff --git a/drizzle-kit/tests/pg-identity.test.ts b/drizzle-kit/tests/pg-identity.test.ts new file mode 100644 index 000000000..906d812d4 --- /dev/null +++ b/drizzle-kit/tests/pg-identity.test.ts @@ -0,0 +1,438 @@ +import { integer, pgSequence, pgTable } from 'drizzle-orm/pg-core'; +import { expect, test } from 'vitest'; +import { diffTestSchemas } from './schemaDiffer'; + +// same table - no diff +// 2. identity always/by default - no params + +// 3. identity always/by default - with a few params + +// 4. identity always/by default - with all params + + +// diff table with create statement +// 2. identity always/by default - no params + +// 3. identity always/by default - with a few params + +// 4. identity always/by default - with all params + + +// diff for drop statement +// 2. identity always/by default - no params, with params + + +// diff for alters +// 2. identity always/by default - no params -> add param + +// 3. identity always/by default - with a few params - remove/add/change params + +// 4. identity always/by default - with all params - remove/add/change params + + +test('create table: identity always/by default - no params', async () => { + const from = {}; + + const to = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity(), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + columns: [ + { + identity: 'users_id_seq;byDefault;1;2147483647;1;1;1;false', + name: 'id', + notNull: true, + primaryKey: false, + type: 'integer', + }, + ], + compositePKs: [], + compositePkName: '', + schema: '', + tableName: 'users', + type: 'create_table', + uniqueConstraints: [], + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'CREATE TABLE IF NOT EXISTS "users" (\n\t"id" integer GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1)\n);\n', + ]); +}); + +test('create table: identity always/by default - few params', async () => { + const from = {}; + + const to = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + name: 'custom_seq', + increment: 4, + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + columns: [ + { + identity: 'custom_seq;byDefault;1;2147483647;4;1;1;false', + name: 'id', + notNull: true, + primaryKey: false, + type: 'integer', + }, + ], + compositePKs: [], + compositePkName: '', + schema: '', + tableName: 'users', + type: 'create_table', + uniqueConstraints: [], + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'CREATE TABLE IF NOT EXISTS "users" (\n\t"id" integer GENERATED BY DEFAULT AS IDENTITY (sequence name "custom_seq" INCREMENT BY 4 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1)\n);\n', + ]); +}); + +test('create table: identity always/by default - all params', async () => { + const from = {}; + + const to = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + name: 'custom_seq', + increment: 4, + minValue: 3, + maxValue: 1000, + cache: 200, + cycle: false, + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + columns: [ + { + identity: 'custom_seq;byDefault;3;1000;4;3;200;false', + name: 'id', + notNull: true, + primaryKey: false, + type: 'integer', + }, + ], + compositePKs: [], + compositePkName: '', + schema: '', + tableName: 'users', + type: 'create_table', + uniqueConstraints: [], + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'CREATE TABLE IF NOT EXISTS "users" (\n\t"id" integer GENERATED BY DEFAULT AS IDENTITY (sequence name "custom_seq" INCREMENT BY 4 MINVALUE 3 MAXVALUE 1000 START WITH 3 CACHE 200)\n);\n', + ]); +}); + +test('no diff: identity always/by default - no params', async () => { + const from = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity(), + }), + }; + + const to = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity(), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test('no diff: identity always/by default - few params', async () => { + const from = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + name: 'custom_seq', + increment: 4, + }), + }), + }; + + const to = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + name: 'custom_seq', + increment: 4, + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test('no diff: identity always/by default - all params', async () => { + const from = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + name: 'custom_seq', + increment: 4, + minValue: 3, + maxValue: 1000, + cache: 200, + cycle: false, + }), + }), + }; + + const to = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + name: 'custom_seq', + increment: 4, + minValue: 3, + maxValue: 1000, + cache: 200, + cycle: false, + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test('drop identity from a column - no params', async () => { + const from = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity(), + }), + }; + + const to = { + users: pgTable('users', { + id: integer('id'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + columnName: 'id', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_drop_identity', + }, + ]); + expect(sqlStatements).toStrictEqual([ + `ALTER TABLE \"users\" ALTER COLUMN \"id\" DROP IDENTITY;`, + ]); +}); + +test('drop identity from a column - few params', async () => { + const from = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + startWith: 100, + increment: 3, + }), + }), + }; + + const to = { + users: pgTable('users', { + id: integer('id'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + columnName: 'id', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_drop_identity', + }, + ]); + expect(sqlStatements).toStrictEqual([ + `ALTER TABLE \"users\" ALTER COLUMN \"id\" DROP IDENTITY;`, + ]); +}); + +test('drop identity from a column - all params', async () => { + const from = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + startWith: 100, + increment: 3, + cache: 100, + cycle: true, + }), + }), + }; + + const to = { + users: pgTable('users', { + id: integer('id'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + columnName: 'id', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_drop_identity', + }, + ]); + expect(sqlStatements).toStrictEqual([ + `ALTER TABLE \"users\" ALTER COLUMN \"id\" DROP IDENTITY;`, + ]); +}); + +test('alter identity from a column - no params', async () => { + const from = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity(), + }), + }; + + const to = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ startWith: 100 }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + columnName: 'id', + identity: 'users_id_seq;byDefault;1;2147483647;1;100;1;false', + oldIdentity: 'users_id_seq;byDefault;1;2147483647;1;1;1;false', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_change_identity', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE "users" ALTER COLUMN "id" SET START WITH 100;', + ]); +}); + +test('alter identity from a column - few params', async () => { + const from = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ startWith: 100 }), + }), + }; + + const to = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + startWith: 100, + cache: 10, + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + columnName: 'id', + identity: 'users_id_seq;byDefault;1;2147483647;1;100;10;false', + oldIdentity: 'users_id_seq;byDefault;1;2147483647;1;100;1;false', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_change_identity', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE "users" ALTER COLUMN "id" SET CACHE 10;', + ]); +}); + +test('alter identity from a column - by default to always', async () => { + const from = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity(), + }), + }; + + const to = { + users: pgTable('users', { + id: integer('id').generatedAlwaysAsIdentity({ + startWith: 100, + cache: 10, + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + columnName: 'id', + identity: 'users_id_seq;always;1;2147483647;1;100;10;false', + oldIdentity: 'users_id_seq;byDefault;1;2147483647;1;1;1;false', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_change_identity', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE "users" ALTER COLUMN "id" SET GENERATED ALWAYS;', + 'ALTER TABLE "users" ALTER COLUMN "id" SET START WITH 100;', + 'ALTER TABLE "users" ALTER COLUMN "id" SET CACHE 10;', + ]); +}); + +test('alter identity from a column - always to by default', async () => { + const from = { + users: pgTable('users', { + id: integer('id').generatedAlwaysAsIdentity(), + }), + }; + + const to = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + startWith: 100, + cache: 10, + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + columnName: 'id', + identity: 'users_id_seq;byDefault;1;2147483647;1;100;10;false', + oldIdentity: 'users_id_seq;always;1;2147483647;1;1;1;false', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_change_identity', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE "users" ALTER COLUMN "id" SET GENERATED BY DEFAULT;', + 'ALTER TABLE "users" ALTER COLUMN "id" SET START WITH 100;', + 'ALTER TABLE "users" ALTER COLUMN "id" SET CACHE 10;', + ]); +}); diff --git a/drizzle-kit/tests/pg-schemas.test.ts b/drizzle-kit/tests/pg-schemas.test.ts new file mode 100644 index 000000000..d8c724e27 --- /dev/null +++ b/drizzle-kit/tests/pg-schemas.test.ts @@ -0,0 +1,105 @@ +import { pgSchema } from 'drizzle-orm/pg-core'; +import { expect, test } from 'vitest'; +import { diffTestSchemas } from './schemaDiffer'; + +test('add schema #1', async () => { + const to = { + devSchema: pgSchema('dev'), + }; + + const { statements } = await diffTestSchemas({}, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_schema', + name: 'dev', + }); +}); + +test('add schema #2', async () => { + const from = { + devSchema: pgSchema('dev'), + }; + const to = { + devSchema: pgSchema('dev'), + devSchema2: pgSchema('dev2'), + }; + + const { statements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_schema', + name: 'dev2', + }); +}); + +test('delete schema #1', async () => { + const from = { + devSchema: pgSchema('dev'), + }; + + const { statements } = await diffTestSchemas(from, {}, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'drop_schema', + name: 'dev', + }); +}); + +test('delete schema #2', async () => { + const from = { + devSchema: pgSchema('dev'), + devSchema2: pgSchema('dev2'), + }; + const to = { + devSchema: pgSchema('dev'), + }; + + const { statements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'drop_schema', + name: 'dev2', + }); +}); + +test('rename schema #1', async () => { + const from = { + devSchema: pgSchema('dev'), + }; + const to = { + devSchema2: pgSchema('dev2'), + }; + + const { statements } = await diffTestSchemas(from, to, ['dev->dev2']); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'rename_schema', + from: 'dev', + to: 'dev2', + }); +}); + +test('rename schema #2', async () => { + const from = { + devSchema: pgSchema('dev'), + devSchema1: pgSchema('dev1'), + }; + const to = { + devSchema: pgSchema('dev'), + devSchema2: pgSchema('dev2'), + }; + + const { statements } = await diffTestSchemas(from, to, ['dev1->dev2']); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'rename_schema', + from: 'dev1', + to: 'dev2', + }); +}); diff --git a/drizzle-kit/tests/pg-sequences.test.ts b/drizzle-kit/tests/pg-sequences.test.ts new file mode 100644 index 000000000..05ca5b1bd --- /dev/null +++ b/drizzle-kit/tests/pg-sequences.test.ts @@ -0,0 +1,298 @@ +import { pgSchema, pgSequence } from 'drizzle-orm/pg-core'; +import { expect, test } from 'vitest'; +import { diffTestSchemas } from './schemaDiffer'; + +test('create sequence', async () => { + const from = {}; + const to = { + seq: pgSequence('name', { startWith: 100 }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + name: 'name', + schema: 'public', + type: 'create_sequence', + values: { + cache: '1', + cycle: false, + increment: '1', + maxValue: '9223372036854775807', + minValue: '1', + startWith: '100', + }, + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'CREATE SEQUENCE "public"."name" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 100 CACHE 1;', + ]); +}); + +test('create sequence: all fields', async () => { + const from = {}; + const to = { + seq: pgSequence('name', { + startWith: 100, + maxValue: 10000, + minValue: 100, + cycle: true, + cache: 10, + increment: 2, + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + type: 'create_sequence', + name: 'name', + schema: 'public', + values: { + startWith: '100', + maxValue: '10000', + minValue: '100', + cycle: true, + cache: '10', + increment: '2', + }, + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'CREATE SEQUENCE "public"."name" INCREMENT BY 2 MINVALUE 100 MAXVALUE 10000 START WITH 100 CACHE 10 CYCLE;', + ]); +}); + +test('create sequence: custom schema', async () => { + const customSchema = pgSchema('custom'); + const from = {}; + const to = { + seq: customSchema.sequence('name', { startWith: 100 }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + name: 'name', + schema: 'custom', + type: 'create_sequence', + values: { + cache: '1', + cycle: false, + increment: '1', + maxValue: '9223372036854775807', + minValue: '1', + startWith: '100', + }, + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'CREATE SEQUENCE "custom"."name" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 100 CACHE 1;', + ]); +}); + +test('create sequence: custom schema + all fields', async () => { + const customSchema = pgSchema('custom'); + const from = {}; + const to = { + seq: customSchema.sequence('name', { + startWith: 100, + maxValue: 10000, + minValue: 100, + cycle: true, + cache: 10, + increment: 2, + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + type: 'create_sequence', + name: 'name', + schema: 'custom', + values: { + startWith: '100', + maxValue: '10000', + minValue: '100', + cycle: true, + cache: '10', + increment: '2', + }, + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'CREATE SEQUENCE "custom"."name" INCREMENT BY 2 MINVALUE 100 MAXVALUE 10000 START WITH 100 CACHE 10 CYCLE;', + ]); +}); + +test('drop sequence', async () => { + const from = { seq: pgSequence('name', { startWith: 100 }) }; + const to = {}; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + type: 'drop_sequence', + name: 'name', + schema: 'public', + }, + ]); + expect(sqlStatements).toStrictEqual(['DROP SEQUENCE "public"."name";']); +}); + +test('drop sequence: custom schema', async () => { + const customSchema = pgSchema('custom'); + const from = { seq: customSchema.sequence('name', { startWith: 100 }) }; + const to = {}; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + type: 'drop_sequence', + name: 'name', + schema: 'custom', + }, + ]); + expect(sqlStatements).toStrictEqual(['DROP SEQUENCE "custom"."name";']); +}); + +// rename sequence + +test('rename sequence', async () => { + const from = { seq: pgSequence('name', { startWith: 100 }) }; + const to = { seq: pgSequence('name_new', { startWith: 100 }) }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, [ + 'public.name->public.name_new', + ]); + + expect(statements).toStrictEqual([ + { + type: 'rename_sequence', + nameFrom: 'name', + nameTo: 'name_new', + schema: 'public', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER SEQUENCE "public"."name" RENAME TO "name_new";', + ]); +}); + +test('rename sequence in custom schema', async () => { + const customSchema = pgSchema('custom'); + + const from = { seq: customSchema.sequence('name', { startWith: 100 }) }; + const to = { seq: customSchema.sequence('name_new', { startWith: 100 }) }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, [ + 'custom.name->custom.name_new', + ]); + + expect(statements).toStrictEqual([ + { + type: 'rename_sequence', + nameFrom: 'name', + nameTo: 'name_new', + schema: 'custom', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER SEQUENCE "custom"."name" RENAME TO "name_new";', + ]); +}); + +test('move sequence between schemas #1', async () => { + const customSchema = pgSchema('custom'); + const from = { seq: pgSequence('name', { startWith: 100 }) }; + const to = { seq: customSchema.sequence('name', { startWith: 100 }) }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, [ + 'public.name->custom.name', + ]); + + expect(statements).toStrictEqual([ + { + type: 'move_sequence', + name: 'name', + schemaFrom: 'public', + schemaTo: 'custom', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER SEQUENCE "public"."name" SET SCHEMA "custom";', + ]); +}); + +test('move sequence between schemas #2', async () => { + const customSchema = pgSchema('custom'); + const from = { seq: customSchema.sequence('name', { startWith: 100 }) }; + const to = { seq: pgSequence('name', { startWith: 100 }) }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, [ + 'custom.name->public.name', + ]); + + expect(statements).toStrictEqual([ + { + type: 'move_sequence', + name: 'name', + schemaFrom: 'custom', + schemaTo: 'public', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER SEQUENCE "custom"."name" SET SCHEMA "public";', + ]); +}); + +// Add squasher for sequences to make alters work + +// Run all tests + +// Finish introspect for sequences + +// Check push for sequences + + +// add tests for generated to postgresql + +// add tests for generated to mysql + +// add tests for generated to sqlite + + +// add tests for identity to postgresql + +// check introspect generated(all dialects) + +// check push generated(all dialect) + + +// add introspect ts file logic for all the features +// manually test everything +// beta release + +test('alter sequence', async () => { + const from = { seq: pgSequence('name', { startWith: 100 }) }; + const to = { seq: pgSequence('name', { startWith: 105 }) }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements).toStrictEqual([ + { + name: 'name', + schema: 'public', + type: 'alter_sequence', + values: { + cache: '1', + cycle: false, + increment: '1', + maxValue: '9223372036854775807', + minValue: '1', + startWith: '105', + }, + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER SEQUENCE "public"."name" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 105 CACHE 1;', + ]); +}); diff --git a/drizzle-kit/tests/pg-tables.test.ts b/drizzle-kit/tests/pg-tables.test.ts new file mode 100644 index 000000000..4171af333 --- /dev/null +++ b/drizzle-kit/tests/pg-tables.test.ts @@ -0,0 +1,641 @@ +import { sql } from 'drizzle-orm'; +import { + AnyPgColumn, + geometry, + index, + integer, + pgEnum, + pgSchema, + pgSequence, + pgTable, + pgTableCreator, + primaryKey, + serial, + text, + vector, +} from 'drizzle-orm/pg-core'; +import { expect, test } from 'vitest'; +import { diffTestSchemas } from './schemaDiffer'; + +test('add table #1', async () => { + const to = { + users: pgTable('users', {}), + }; + + const { statements } = await diffTestSchemas({}, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: '', + columns: [], + compositePKs: [], + uniqueConstraints: [], + compositePkName: '', + }); +}); + +test('add table #2', async () => { + const to = { + users: pgTable('users', { + id: serial('id').primaryKey(), + }), + }; + + const { statements } = await diffTestSchemas({}, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: '', + columns: [ + { + name: 'id', + notNull: true, + primaryKey: true, + type: 'serial', + }, + ], + compositePKs: [], + uniqueConstraints: [], + compositePkName: '', + }); +}); + +test('add table #3', async () => { + const to = { + users: pgTable( + 'users', + { + id: serial('id'), + }, + (t) => { + return { + pk: primaryKey({ + name: 'users_pk', + columns: [t.id], + }), + }; + }, + ), + }; + + const { statements } = await diffTestSchemas({}, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: '', + columns: [ + { + name: 'id', + notNull: true, + primaryKey: false, + type: 'serial', + }, + ], + compositePKs: ['id;users_pk'], + uniqueConstraints: [], + compositePkName: 'users_pk', + }); +}); + +test('add table #4', async () => { + const to = { + users: pgTable('users', {}), + posts: pgTable('posts', {}), + }; + + const { statements } = await diffTestSchemas({}, to, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: '', + columns: [], + compositePKs: [], + uniqueConstraints: [], + compositePkName: '', + }); + expect(statements[1]).toStrictEqual({ + type: 'create_table', + tableName: 'posts', + schema: '', + columns: [], + compositePKs: [], + uniqueConstraints: [], + compositePkName: '', + }); +}); + +test('add table #5', async () => { + const schema = pgSchema('folder'); + const from = { + schema, + }; + + const to = { + schema, + users: schema.table('users', {}), + }; + + const { statements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: 'folder', + columns: [], + compositePKs: [], + uniqueConstraints: [], + compositePkName: '', + }); +}); + +test('add table #6', async () => { + const from = { + users1: pgTable('users1', {}), + }; + + const to = { + users2: pgTable('users2', {}), + }; + + const { statements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users2', + schema: '', + columns: [], + compositePKs: [], + uniqueConstraints: [], + compositePkName: '', + }); + expect(statements[1]).toStrictEqual({ + type: 'drop_table', + tableName: 'users1', + schema: '', + }); +}); + +test('add table #7', async () => { + const from = { + users1: pgTable('users1', {}), + }; + + const to = { + users: pgTable('users', {}), + users2: pgTable('users2', {}), + }; + + const { statements } = await diffTestSchemas(from, to, [ + 'public.users1->public.users2', + ]); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: '', + columns: [], + compositePKs: [], + uniqueConstraints: [], + compositePkName: '', + }); + expect(statements[1]).toStrictEqual({ + type: 'rename_table', + tableNameFrom: 'users1', + tableNameTo: 'users2', + fromSchema: '', + toSchema: '', + }); +}); + +test('add table #8: geometry types', async () => { + const from = {}; + + const to = { + users: pgTable('users', { + geom: geometry('geom', { type: 'point' }).notNull(), + geom1: geometry('geom1').notNull(), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + + expect(sqlStatements).toStrictEqual([ + `CREATE TABLE IF NOT EXISTS "users" (\n\t"geom" geometry(point) NOT NULL,\n\t"geom1" geometry(point) NOT NULL\n);\n`, + ]); +}); + +test('multiproject schema add table #1', async () => { + const table = pgTableCreator((name) => `prefix_${name}`); + + const to = { + users: table('users', { + id: serial('id').primaryKey(), + }), + }; + + const { statements } = await diffTestSchemas({}, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'prefix_users', + schema: '', + columns: [ + { + name: 'id', + notNull: true, + primaryKey: true, + type: 'serial', + }, + ], + compositePKs: [], + compositePkName: '', + uniqueConstraints: [], + }); +}); + +test('multiproject schema drop table #1', async () => { + const table = pgTableCreator((name) => `prefix_${name}`); + + const from = { + users: table('users', { + id: serial('id').primaryKey(), + }), + }; + const to = {}; + + const { statements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + schema: '', + tableName: 'prefix_users', + type: 'drop_table', + }); +}); + +test('multiproject schema alter table name #1', async () => { + const table = pgTableCreator((name) => `prefix_${name}`); + + const from = { + users: table('users', { + id: serial('id').primaryKey(), + }), + }; + const to = { + users1: table('users1', { + id: serial('id').primaryKey(), + }), + }; + + const { statements } = await diffTestSchemas(from, to, [ + 'public.prefix_users->public.prefix_users1', + ]); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'rename_table', + fromSchema: '', + toSchema: '', + tableNameFrom: 'prefix_users', + tableNameTo: 'prefix_users1', + }); +}); + +test('add table #8: column with pgvector', async () => { + const from = {}; + + const to = { + users2: pgTable('users2', { + id: serial('id').primaryKey(), + name: vector('name', { dimensions: 3 }), + }), + }; + + const { sqlStatements } = await diffTestSchemas(from, to, []); + + expect(sqlStatements[0]).toBe( + `CREATE TABLE IF NOT EXISTS "users2" (\n\t"id" serial PRIMARY KEY NOT NULL,\n\t"name" vector(3)\n); +`, + ); +}); + +test('add schema + table #1', async () => { + const schema = pgSchema('folder'); + + const to = { + schema, + users: schema.table('users', {}), + }; + + const { statements } = await diffTestSchemas({}, to, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'create_schema', + name: 'folder', + }); + + expect(statements[1]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: 'folder', + columns: [], + compositePKs: [], + uniqueConstraints: [], + compositePkName: '', + }); +}); + +test('change schema with tables #1', async () => { + const schema = pgSchema('folder'); + const schema2 = pgSchema('folder2'); + const from = { + schema, + users: schema.table('users', {}), + }; + const to = { + schema2, + users: schema2.table('users', {}), + }; + + const { statements } = await diffTestSchemas(from, to, ['folder->folder2']); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'rename_schema', + from: 'folder', + to: 'folder2', + }); +}); + +test('change table schema #1', async () => { + const schema = pgSchema('folder'); + const from = { + schema, + users: pgTable('users', {}), + }; + const to = { + schema, + users: schema.table('users', {}), + }; + + const { statements } = await diffTestSchemas(from, to, [ + 'public.users->folder.users', + ]); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_set_schema', + tableName: 'users', + schemaFrom: 'public', + schemaTo: 'folder', + }); +}); + +test('change table schema #2', async () => { + const schema = pgSchema('folder'); + const from = { + schema, + users: schema.table('users', {}), + }; + const to = { + schema, + users: pgTable('users', {}), + }; + + const { statements } = await diffTestSchemas(from, to, [ + 'folder.users->public.users', + ]); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_set_schema', + tableName: 'users', + schemaFrom: 'folder', + schemaTo: 'public', + }); +}); + +test('change table schema #3', async () => { + const schema1 = pgSchema('folder1'); + const schema2 = pgSchema('folder2'); + const from = { + schema1, + schema2, + users: schema1.table('users', {}), + }; + const to = { + schema1, + schema2, + users: schema2.table('users', {}), + }; + + const { statements } = await diffTestSchemas(from, to, [ + 'folder1.users->folder2.users', + ]); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_set_schema', + tableName: 'users', + schemaFrom: 'folder1', + schemaTo: 'folder2', + }); +}); + +test('change table schema #4', async () => { + const schema1 = pgSchema('folder1'); + const schema2 = pgSchema('folder2'); + const from = { + schema1, + users: schema1.table('users', {}), + }; + const to = { + schema1, + schema2, // add schema + users: schema2.table('users', {}), // move table + }; + + const { statements } = await diffTestSchemas(from, to, [ + 'folder1.users->folder2.users', + ]); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'create_schema', + name: 'folder2', + }); + expect(statements[1]).toStrictEqual({ + type: 'alter_table_set_schema', + tableName: 'users', + schemaFrom: 'folder1', + schemaTo: 'folder2', + }); +}); + +test('change table schema #5', async () => { + const schema1 = pgSchema('folder1'); + const schema2 = pgSchema('folder2'); + const from = { + schema1, // remove schema + users: schema1.table('users', {}), + }; + const to = { + schema2, // add schema + users: schema2.table('users', {}), // move table + }; + + const { statements } = await diffTestSchemas(from, to, [ + 'folder1.users->folder2.users', + ]); + + expect(statements.length).toBe(3); + expect(statements[0]).toStrictEqual({ + type: 'create_schema', + name: 'folder2', + }); + expect(statements[1]).toStrictEqual({ + type: 'alter_table_set_schema', + tableName: 'users', + schemaFrom: 'folder1', + schemaTo: 'folder2', + }); + expect(statements[2]).toStrictEqual({ + type: 'drop_schema', + name: 'folder1', + }); +}); + +test('change table schema #5', async () => { + const schema1 = pgSchema('folder1'); + const schema2 = pgSchema('folder2'); + const from = { + schema1, + schema2, + users: schema1.table('users', {}), + }; + const to = { + schema1, + schema2, + users: schema2.table('users2', {}), // rename and move table + }; + + const { statements } = await diffTestSchemas(from, to, [ + 'folder1.users->folder2.users2', + ]); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_set_schema', + tableName: 'users', + schemaFrom: 'folder1', + schemaTo: 'folder2', + }); + expect(statements[1]).toStrictEqual({ + type: 'rename_table', + tableNameFrom: 'users', + tableNameTo: 'users2', + fromSchema: 'folder2', + toSchema: 'folder2', + }); +}); + +test('change table schema #6', async () => { + const schema1 = pgSchema('folder1'); + const schema2 = pgSchema('folder2'); + const from = { + schema1, + users: schema1.table('users', {}), + }; + const to = { + schema2, // rename schema + users: schema2.table('users2', {}), // rename table + }; + + const { statements } = await diffTestSchemas(from, to, [ + 'folder1->folder2', + 'folder2.users->folder2.users2', + ]); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'rename_schema', + from: 'folder1', + to: 'folder2', + }); + expect(statements[1]).toStrictEqual({ + type: 'rename_table', + tableNameFrom: 'users', + tableNameTo: 'users2', + fromSchema: 'folder2', + toSchema: 'folder2', + }); +}); + +test('drop table + rename schema #1', async () => { + const schema1 = pgSchema('folder1'); + const schema2 = pgSchema('folder2'); + const from = { + schema1, + users: schema1.table('users', {}), + }; + const to = { + schema2, // rename schema + // drop table + }; + + const { statements } = await diffTestSchemas(from, to, ['folder1->folder2']); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'rename_schema', + from: 'folder1', + to: 'folder2', + }); + expect(statements[1]).toStrictEqual({ + type: 'drop_table', + tableName: 'users', + schema: 'folder2', + }); +}); + +test('create table with tsvector', async () => { + const from = {}; + const to = { + users: pgTable( + 'posts', + { + id: serial('id').primaryKey(), + title: text('title').notNull(), + description: text('description').notNull(), + }, + (table) => ({ + titleSearchIndex: index('title_search_index').using( + 'gin', + sql`to_tsvector('english', ${table.title})`, + ), + }), + ), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(sqlStatements).toStrictEqual([ + 'CREATE TABLE IF NOT EXISTS "posts" (\n\t"id" serial PRIMARY KEY NOT NULL,\n\t"title" text NOT NULL,\n\t"description" text NOT NULL\n);\n', + `CREATE INDEX IF NOT EXISTS "title_search_index" ON "posts" USING gin (to_tsvector('english', "title"));`, + ]); +}); diff --git a/drizzle-kit/tests/push/common.ts b/drizzle-kit/tests/push/common.ts new file mode 100644 index 000000000..0c679ca6f --- /dev/null +++ b/drizzle-kit/tests/push/common.ts @@ -0,0 +1,50 @@ +import { afterAll, beforeAll, test } from 'vitest'; + +export interface DialectSuite { + allTypes(context?: any): Promise; + addBasicIndexes(context?: any): Promise; + changeIndexFields(context?: any): Promise; + dropIndex(context?: any): Promise; + indexesToBeNotTriggered(context?: any): Promise; + indexesTestCase1(context?: any): Promise; + addNotNull(context?: any): Promise; + addNotNullWithDataNoRollback(context?: any): Promise; + addBasicSequences(context?: any): Promise; + addGeneratedColumn(context?: any): Promise; + addGeneratedToColumn(context?: any): Promise; + dropGeneratedConstraint(context?: any): Promise; + alterGeneratedConstraint(context?: any): Promise; + createTableWithGeneratedConstraint(context?: any): Promise; + case1(): Promise; +} + +export const run = ( + suite: DialectSuite, + beforeAllFn?: (context: any) => Promise, + afterAllFn?: (context: any) => Promise, +) => { + let context: any = {}; + beforeAll(beforeAllFn ? () => beforeAllFn(context) : () => {}); + + test('No diffs for all database types', () => suite.allTypes(context)); + test('Adding basic indexes', () => suite.addBasicIndexes(context)); + test('Dropping basic index', () => suite.dropIndex(context)); + test('Altering indexes', () => suite.changeIndexFields(context)); + test('Indexes properties that should not trigger push changes', () => suite.indexesToBeNotTriggered(context)); + test('Indexes test case #1', () => suite.indexesTestCase1(context)); + test('Drop column', () => suite.case1()); + + test('Add not null to a column', () => suite.addNotNull()); + test('Add not null to a column with null data. Should rollback', () => suite.addNotNullWithDataNoRollback()); + + test('Add basic sequences', () => suite.addBasicSequences()); + + test('Add generated column', () => suite.addGeneratedColumn(context)); + test('Add generated constraint to an existing column', () => suite.addGeneratedToColumn(context)); + test('Drop generated constraint from a column', () => suite.dropGeneratedConstraint(context)); + // should ignore on push + test('Alter generated constraint', () => suite.alterGeneratedConstraint(context)); + test('Create table with generated column', () => suite.createTableWithGeneratedConstraint(context)); + + afterAll(afterAllFn ? () => afterAllFn(context) : () => {}); +}; diff --git a/drizzle-kit/tests/push/mysql.test.ts b/drizzle-kit/tests/push/mysql.test.ts new file mode 100644 index 000000000..5cad140be --- /dev/null +++ b/drizzle-kit/tests/push/mysql.test.ts @@ -0,0 +1,699 @@ +import Docker from 'dockerode'; +import { SQL, sql } from 'drizzle-orm'; +import { + bigint, + binary, + char, + date, + datetime, + decimal, + double, + float, + int, + json, + mediumint, + mysqlEnum, + mysqlTable, + serial, + smallint, + text, + time, + timestamp, + tinyint, + varbinary, + varchar, + year, +} from 'drizzle-orm/mysql-core'; +import getPort from 'get-port'; +import { Connection, createConnection } from 'mysql2/promise'; +import { diffTestSchemasMysql, diffTestSchemasPushMysql } from 'tests/schemaDiffer'; +import { v4 as uuid } from 'uuid'; +import { expect } from 'vitest'; +import { DialectSuite, run } from './common'; + +async function createDockerDB(context: any): Promise { + const docker = new Docker(); + const port = await getPort({ port: 3306 }); + const image = 'mysql:8'; + + const pullStream = await docker.pull(image); + await new Promise((resolve, reject) => + // eslint-disable-next-line @typescript-eslint/no-unsafe-argument + docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) + ); + + context.mysqlContainer = await docker.createContainer({ + Image: image, + Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], + name: `drizzle-integration-tests-${uuid()}`, + HostConfig: { + AutoRemove: true, + PortBindings: { + '3306/tcp': [{ HostPort: `${port}` }], + }, + }, + }); + + await context.mysqlContainer.start(); + + return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; +} + +const mysqlSuite: DialectSuite = { + allTypes: async function(context: any): Promise { + const schema1 = { + allBigInts: mysqlTable('all_big_ints', { + simple: bigint('simple', { mode: 'number' }), + columnNotNull: bigint('column_not_null', { mode: 'number' }).notNull(), + columnDefault: bigint('column_default', { mode: 'number' }).default(12), + columnDefaultSql: bigint('column_default_sql', { + mode: 'number', + }).default(12), + }), + allBools: mysqlTable('all_bools', { + simple: tinyint('simple'), + columnNotNull: tinyint('column_not_null').notNull(), + columnDefault: tinyint('column_default').default(1), + }), + allChars: mysqlTable('all_chars', { + simple: char('simple', { length: 1 }), + columnNotNull: char('column_not_null', { length: 45 }).notNull(), + // columnDefault: char("column_default", { length: 1 }).default("h"), + columnDefaultSql: char('column_default_sql', { length: 1 }).default( + 'h', + ), + }), + allDateTimes: mysqlTable('all_date_times', { + simple: datetime('simple', { mode: 'string', fsp: 1 }), + columnNotNull: datetime('column_not_null', { + mode: 'string', + }).notNull(), + columnDefault: datetime('column_default', { mode: 'string' }).default( + '2023-03-01 14:05:29', + ), + }), + allDates: mysqlTable('all_dates', { + simple: date('simple', { mode: 'string' }), + column_not_null: date('column_not_null', { mode: 'string' }).notNull(), + column_default: date('column_default', { mode: 'string' }).default( + '2023-03-01', + ), + }), + allDecimals: mysqlTable('all_decimals', { + simple: decimal('simple', { precision: 1, scale: 0 }), + columnNotNull: decimal('column_not_null', { + precision: 45, + scale: 3, + }).notNull(), + columnDefault: decimal('column_default', { + precision: 10, + scale: 0, + }).default('100'), + columnDefaultSql: decimal('column_default_sql', { + precision: 10, + scale: 0, + }).default('101'), + }), + + allDoubles: mysqlTable('all_doubles', { + simple: double('simple'), + columnNotNull: double('column_not_null').notNull(), + columnDefault: double('column_default').default(100), + columnDefaultSql: double('column_default_sql').default(101), + }), + + allEnums: mysqlTable('all_enums', { + simple: mysqlEnum('simple', ['hi', 'hello']), + }), + + allEnums1: mysqlTable('all_enums1', { + simple: mysqlEnum('simple', ['hi', 'hello']).default('hi'), + }), + + allFloats: mysqlTable('all_floats', { + columnNotNull: float('column_not_null').notNull(), + columnDefault: float('column_default').default(100), + columnDefaultSql: float('column_default_sql').default(101), + }), + + allInts: mysqlTable('all_ints', { + simple: int('simple'), + columnNotNull: int('column_not_null').notNull(), + columnDefault: int('column_default').default(100), + columnDefaultSql: int('column_default_sql').default(101), + }), + + allIntsRef: mysqlTable('all_ints_ref', { + simple: int('simple'), + columnNotNull: int('column_not_null').notNull(), + columnDefault: int('column_default').default(100), + columnDefaultSql: int('column_default_sql').default(101), + }), + + allJsons: mysqlTable('all_jsons', { + columnDefaultObject: json('column_default_object') + .default({ hello: 'world world' }) + .notNull(), + columnDefaultArray: json('column_default_array').default({ + hello: { 'world world': ['foo', 'bar'] }, + foo: 'bar', + fe: 23, + }), + column: json('column'), + }), + + allMInts: mysqlTable('all_m_ints', { + simple: mediumint('simple'), + columnNotNull: mediumint('column_not_null').notNull(), + columnDefault: mediumint('column_default').default(100), + columnDefaultSql: mediumint('column_default_sql').default(101), + }), + + allReals: mysqlTable('all_reals', { + simple: double('simple', { precision: 5, scale: 2 }), + columnNotNull: double('column_not_null').notNull(), + columnDefault: double('column_default').default(100), + columnDefaultSql: double('column_default_sql').default(101), + }), + + allSInts: mysqlTable('all_s_ints', { + simple: smallint('simple'), + columnNotNull: smallint('column_not_null').notNull(), + columnDefault: smallint('column_default').default(100), + columnDefaultSql: smallint('column_default_sql').default(101), + }), + + allSmallSerials: mysqlTable('all_small_serials', { + columnAll: serial('column_all').primaryKey().notNull(), + }), + + allTInts: mysqlTable('all_t_ints', { + simple: tinyint('simple'), + columnNotNull: tinyint('column_not_null').notNull(), + columnDefault: tinyint('column_default').default(10), + columnDefaultSql: tinyint('column_default_sql').default(11), + }), + + allTexts: mysqlTable('all_texts', { + simple: text('simple'), + columnNotNull: text('column_not_null').notNull(), + columnDefault: text('column_default').default('hello'), + columnDefaultSql: text('column_default_sql').default('hello'), + }), + + allTimes: mysqlTable('all_times', { + simple: time('simple', { fsp: 1 }), + columnNotNull: time('column_not_null').notNull(), + columnDefault: time('column_default').default('22:12:12'), + }), + + allTimestamps: mysqlTable('all_timestamps', { + columnDateNow: timestamp('column_date_now', { + fsp: 1, + mode: 'string', + }).default(sql`(now())`), + columnAll: timestamp('column_all', { mode: 'string' }) + .default('2023-03-01 14:05:29') + .notNull(), + column: timestamp('column', { mode: 'string' }).default( + '2023-02-28 16:18:31', + ), + }), + + allVarChars: mysqlTable('all_var_chars', { + simple: varchar('simple', { length: 100 }), + columnNotNull: varchar('column_not_null', { length: 45 }).notNull(), + columnDefault: varchar('column_default', { length: 100 }).default( + 'hello', + ), + columnDefaultSql: varchar('column_default_sql', { + length: 100, + }).default('hello'), + }), + + allVarbinaries: mysqlTable('all_varbinaries', { + simple: varbinary('simple', { length: 100 }), + columnNotNull: varbinary('column_not_null', { length: 100 }).notNull(), + columnDefault: varbinary('column_default', { length: 12 }).default( + sql`(uuid_to_bin(uuid()))`, + ), + }), + + allYears: mysqlTable('all_years', { + simple: year('simple'), + columnNotNull: year('column_not_null').notNull(), + columnDefault: year('column_default').default(2022), + }), + + binafry: mysqlTable('binary', { + simple: binary('simple', { length: 1 }), + columnNotNull: binary('column_not_null', { length: 1 }).notNull(), + columnDefault: binary('column_default', { length: 12 }).default( + sql`(uuid_to_bin(uuid()))`, + ), + }), + }; + + const { statements } = await diffTestSchemasPushMysql( + context.client as Connection, + schema1, + schema1, + [], + 'drizzle', + false, + ); + expect(statements.length).toBe(2); + expect(statements).toEqual([ + { + type: 'delete_unique_constraint', + tableName: 'all_small_serials', + data: 'column_all;column_all', + schema: '', + }, + { + type: 'delete_unique_constraint', + tableName: 'all_small_serials', + data: 'column_all;column_all', + schema: '', + }, + ]); + + const { sqlStatements: dropStatements } = await diffTestSchemasMysql( + schema1, + {}, + [], + false, + ); + + for (const st of dropStatements) { + await context.client.query(st); + } + }, + addBasicIndexes: function(context?: any): Promise { + return {} as any; + }, + changeIndexFields: function(context?: any): Promise { + return {} as any; + }, + dropIndex: function(context?: any): Promise { + return {} as any; + }, + indexesToBeNotTriggered: function(context?: any): Promise { + return {} as any; + }, + indexesTestCase1: function(context?: any): Promise { + return {} as any; + }, + async case1() { + // TODO: implement if needed + expect(true).toBe(true); + }, + addNotNull: function(context?: any): Promise { + return {} as any; + }, + addNotNullWithDataNoRollback: function(context?: any): Promise { + return {} as any; + }, + addBasicSequences: function(context?: any): Promise { + return {} as any; + }, + addGeneratedColumn: async function(context: any): Promise { + const schema1 = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + }), + }; + const schema2 = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${schema2.users.name} || 'hello'`, + { mode: 'stored' }, + ), + generatedName1: text('gen_name1').generatedAlwaysAs( + (): SQL => sql`${schema2.users.name} || 'hello'`, + { mode: 'virtual' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPushMysql( + context.client as Connection, + schema1, + schema2, + [], + 'drizzle', + false, + ); + + expect(statements).toStrictEqual([ + { + column: { + autoincrement: false, + generated: { + as: "`users`.`name` || 'hello'", + type: 'stored', + }, + name: 'gen_name', + notNull: false, + primaryKey: false, + type: 'text', + }, + schema: '', + tableName: 'users', + type: 'alter_table_add_column', + }, + { + column: { + autoincrement: false, + generated: { + as: "`users`.`name` || 'hello'", + type: 'virtual', + }, + name: 'gen_name1', + notNull: false, + primaryKey: false, + type: 'text', + }, + schema: '', + tableName: 'users', + type: 'alter_table_add_column', + }, + ]); + expect(sqlStatements).toStrictEqual([ + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", + "ALTER TABLE `users` ADD `gen_name1` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", + ]); + + for (const st of sqlStatements) { + await context.client.query(st); + } + + const { sqlStatements: dropStatements } = await diffTestSchemasMysql( + schema2, + {}, + [], + false, + ); + + for (const st of dropStatements) { + await context.client.query(st); + } + }, + addGeneratedToColumn: async function(context: any): Promise { + const schema1 = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name'), + generatedName1: text('gen_name1'), + }), + }; + const schema2 = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${schema2.users.name} || 'hello'`, + { mode: 'stored' }, + ), + generatedName1: text('gen_name1').generatedAlwaysAs( + (): SQL => sql`${schema2.users.name} || 'hello'`, + { mode: 'virtual' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPushMysql( + context.client as Connection, + schema1, + schema2, + [], + 'drizzle', + false, + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'stored', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'virtual', + }, + columnName: 'gen_name1', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + "ALTER TABLE `users` MODIFY COLUMN `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", + 'ALTER TABLE `users` DROP COLUMN `gen_name1`;', + "ALTER TABLE `users` ADD `gen_name1` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", + ]); + + for (const st of sqlStatements) { + await context.client.query(st); + } + + const { sqlStatements: dropStatements } = await diffTestSchemasMysql( + schema2, + {}, + [], + false, + ); + + for (const st of dropStatements) { + await context.client.query(st); + } + }, + dropGeneratedConstraint: async function(context: any): Promise { + const schema1 = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${schema2.users.name}`, + { mode: 'stored' }, + ), + generatedName1: text('gen_name1').generatedAlwaysAs( + (): SQL => sql`${schema2.users.name}`, + { mode: 'virtual' }, + ), + }), + }; + const schema2 = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name'), + generatedName1: text('gen_name1'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPushMysql( + context.client as Connection, + schema1, + schema2, + [], + 'drizzle', + false, + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + oldColumn: { + autoincrement: false, + default: undefined, + generated: { + as: '`name`', + type: 'stored', + }, + name: 'gen_name', + notNull: false, + onUpdate: undefined, + primaryKey: false, + type: 'text', + }, + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_drop_generated', + }, + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name1', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + oldColumn: { + autoincrement: false, + default: undefined, + generated: { + as: '`name`', + type: 'virtual', + }, + name: 'gen_name1', + notNull: false, + onUpdate: undefined, + primaryKey: false, + type: 'text', + }, + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;', + 'ALTER TABLE `users` DROP COLUMN `gen_name1`;', + 'ALTER TABLE `users` ADD `gen_name1` text;', + ]); + + for (const st of sqlStatements) { + await context.client.query(st); + } + + const { sqlStatements: dropStatements } = await diffTestSchemasMysql( + schema2, + {}, + [], + false, + ); + + for (const st of dropStatements) { + await context.client.query(st); + } + }, + alterGeneratedConstraint: async function(context: any): Promise { + const schema1 = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${schema2.users.name}`, + { mode: 'stored' }, + ), + generatedName1: text('gen_name1').generatedAlwaysAs( + (): SQL => sql`${schema2.users.name}`, + { mode: 'virtual' }, + ), + }), + }; + const schema2 = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${schema2.users.name} || 'hello'`, + { mode: 'stored' }, + ), + generatedName1: text('gen_name1').generatedAlwaysAs( + (): SQL => sql`${schema2.users.name} || 'hello'`, + { mode: 'virtual' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPushMysql( + context.client as Connection, + schema1, + schema2, + [], + 'drizzle', + false, + ); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); + + const { sqlStatements: dropStatements } = await diffTestSchemasMysql( + schema2, + {}, + [], + false, + ); + + for (const st of dropStatements) { + await context.client.query(st); + } + }, + createTableWithGeneratedConstraint: function(context?: any): Promise { + return {} as any; + }, +}; + +run( + mysqlSuite, + async (context: any) => { + const connectionString = process.env.MYSQL_CONNECTION_STRING ?? await createDockerDB(context); + + const sleep = 1000; + let timeLeft = 20000; + let connected = false; + let lastError: unknown | undefined; + do { + try { + context.client = await createConnection(connectionString); + await context.client.connect(); + connected = true; + break; + } catch (e) { + lastError = e; + await new Promise((resolve) => setTimeout(resolve, sleep)); + timeLeft -= sleep; + } + } while (timeLeft > 0); + if (!connected) { + console.error('Cannot connect to MySQL'); + await context.client?.end().catch(console.error); + await context.mysqlContainer?.stop().catch(console.error); + throw lastError; + } + }, + async (context: any) => { + await context.client?.end().catch(console.error); + await context.mysqlContainer?.stop().catch(console.error); + }, +); diff --git a/drizzle-kit/tests/push/pg.test.ts b/drizzle-kit/tests/push/pg.test.ts new file mode 100644 index 000000000..cb1a97122 --- /dev/null +++ b/drizzle-kit/tests/push/pg.test.ts @@ -0,0 +1,2238 @@ +import { PGlite } from '@electric-sql/pglite'; +import { + bigint, + bigserial, + boolean, + char, + date, + doublePrecision, + index, + integer, + interval, + json, + jsonb, + numeric, + pgEnum, + pgSchema, + pgSequence, + pgTable, + real, + serial, + smallint, + text, + time, + timestamp, + uniqueIndex, + uuid, + varchar, + vector, +} from 'drizzle-orm/pg-core'; +import { drizzle } from 'drizzle-orm/pglite'; +import { SQL, sql } from 'drizzle-orm/sql'; +import { pgSuggestions } from 'src/cli/commands/pgPushUtils'; +import { diffTestSchemasPush } from 'tests/schemaDiffer'; +import { afterEach, expect, test } from 'vitest'; +import { DialectSuite, run } from './common'; + +const pgSuite: DialectSuite = { + async allTypes() { + const client = new PGlite(); + + const customSchema = pgSchema('schemass'); + + const transactionStatusEnum = customSchema.enum( + 'TransactionStatusEnum', + ['PENDING', 'FAILED', 'SUCCESS'], + ); + + const enumname = pgEnum('enumname', ['three', 'two', 'one']); + + const schema1 = { + test: pgEnum('test', ['ds']), + testHello: pgEnum('test_hello', ['ds']), + enumname: pgEnum('enumname', ['three', 'two', 'one']), + + customSchema: customSchema, + transactionStatusEnum: customSchema.enum('TransactionStatusEnum', [ + 'PENDING', + 'FAILED', + 'SUCCESS', + ]), + + allSmallSerials: pgTable('schema_test', { + columnAll: uuid('column_all').defaultRandom(), + column: transactionStatusEnum('column').notNull(), + }), + + allSmallInts: customSchema.table( + 'schema_test2', + { + columnAll: smallint('column_all').default(124).notNull(), + column: smallint('columns').array(), + column1: smallint('column1').array().array(), + column2: smallint('column2').array().array(), + column3: smallint('column3').array(), + }, + (t) => ({ + cd: uniqueIndex('testdfds').on(t.column), + }), + ), + + allEnums: customSchema.table( + 'all_enums', + { + columnAll: enumname('column_all').default('three').notNull(), + column: enumname('columns'), + }, + (t) => ({ + d: index('ds').on(t.column), + }), + ), + + allTimestamps: customSchema.table('all_timestamps', { + columnDateNow: timestamp('column_date_now', { + precision: 1, + withTimezone: true, + mode: 'string', + }).defaultNow(), + columnAll: timestamp('column_all', { mode: 'string' }).default( + '2023-03-01 12:47:29.792', + ), + column: timestamp('column', { mode: 'string' }).default( + sql`'2023-02-28 16:18:31.18'`, + ), + column2: timestamp('column2', { mode: 'string', precision: 3 }).default( + sql`'2023-02-28 16:18:31.18'`, + ), + }), + + allUuids: customSchema.table('all_uuids', { + columnAll: uuid('column_all').defaultRandom().notNull(), + column: uuid('column'), + }), + + allDates: customSchema.table('all_dates', { + column_date_now: date('column_date_now').defaultNow(), + column_all: date('column_all', { mode: 'date' }) + .default(new Date()) + .notNull(), + column: date('column'), + }), + + allReals: customSchema.table('all_reals', { + columnAll: real('column_all').default(32).notNull(), + column: real('column'), + columnPrimary: real('column_primary').primaryKey().notNull(), + }), + + allBigints: pgTable('all_bigints', { + columnAll: bigint('column_all', { mode: 'number' }) + .default(124) + .notNull(), + column: bigint('column', { mode: 'number' }), + }), + + allBigserials: customSchema.table('all_bigserials', { + columnAll: bigserial('column_all', { mode: 'bigint' }).notNull(), + column: bigserial('column', { mode: 'bigint' }).notNull(), + }), + + allIntervals: customSchema.table('all_intervals', { + columnAllConstrains: interval('column_all_constrains', { + fields: 'month', + }) + .default('1 mon') + .notNull(), + columnMinToSec: interval('column_min_to_sec', { + fields: 'minute to second', + }), + columnWithoutFields: interval('column_without_fields') + .default('00:00:01') + .notNull(), + column: interval('column'), + column5: interval('column5', { + fields: 'minute to second', + precision: 3, + }), + column6: interval('column6'), + }), + + allSerials: customSchema.table('all_serials', { + columnAll: serial('column_all').notNull(), + column: serial('column').notNull(), + }), + + allTexts: customSchema.table( + 'all_texts', + { + columnAll: text('column_all').default('text').notNull(), + column: text('columns').primaryKey(), + }, + (t) => ({ + cd: index('test').on(t.column), + }), + ), + + allBools: customSchema.table('all_bools', { + columnAll: boolean('column_all').default(true).notNull(), + column: boolean('column'), + }), + + allVarchars: customSchema.table('all_varchars', { + columnAll: varchar('column_all').default('text').notNull(), + column: varchar('column', { length: 200 }), + }), + + allTimes: customSchema.table('all_times', { + columnDateNow: time('column_date_now').defaultNow(), + columnAll: time('column_all').default('22:12:12').notNull(), + column: time('column'), + }), + + allChars: customSchema.table('all_chars', { + columnAll: char('column_all', { length: 1 }).default('text').notNull(), + column: char('column', { length: 1 }), + }), + + allDoublePrecision: customSchema.table('all_double_precision', { + columnAll: doublePrecision('column_all').default(33.2).notNull(), + column: doublePrecision('column'), + }), + + allJsonb: customSchema.table('all_jsonb', { + columnDefaultObject: jsonb('column_default_object') + .default({ hello: 'world world' }) + .notNull(), + columnDefaultArray: jsonb('column_default_array').default({ + hello: { 'world world': ['foo', 'bar'] }, + }), + column: jsonb('column'), + }), + + allJson: customSchema.table('all_json', { + columnDefaultObject: json('column_default_object') + .default({ hello: 'world world' }) + .notNull(), + columnDefaultArray: json('column_default_array').default({ + hello: { 'world world': ['foo', 'bar'] }, + foo: 'bar', + fe: 23, + }), + column: json('column'), + }), + + allIntegers: customSchema.table('all_integers', { + columnAll: integer('column_all').primaryKey(), + column: integer('column'), + columnPrimary: integer('column_primary'), + }), + + allNumerics: customSchema.table('all_numerics', { + columnAll: numeric('column_all', { precision: 1, scale: 1 }) + .default('32') + .notNull(), + column: numeric('column'), + columnPrimary: numeric('column_primary').primaryKey().notNull(), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema1, + [], + false, + ['public', 'schemass'], + ); + expect(statements.length).toBe(0); + }, + + async addBasicIndexes() { + const client = new PGlite(); + + const schema1 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }), + }; + + const schema2 = { + users: pgTable( + 'users', + { + id: serial('id').primaryKey(), + name: text('name'), + }, + (t) => ({ + indx: index() + .on(t.name.desc(), t.id.asc().nullsLast()) + .with({ fillfactor: 70 }) + .where(sql`select 1`), + indx1: index('indx1') + .using('hash', t.name.desc(), sql`${t.name}`) + .with({ fillfactor: 70 }), + }), + ), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + schema: '', + tableName: 'users', + type: 'create_index_pg', + data: { + columns: [ + { + asc: false, + expression: 'name', + isExpression: false, + nulls: 'last', + opclass: undefined, + }, + { + asc: true, + expression: 'id', + isExpression: false, + nulls: 'last', + opclass: undefined, + }, + ], + concurrently: false, + isUnique: false, + method: 'btree', + name: 'users_name_id_index', + where: 'select 1', + with: { + fillfactor: 70, + }, + }, + }); + expect(statements[1]).toStrictEqual({ + schema: '', + tableName: 'users', + type: 'create_index_pg', + data: { + columns: [ + { + asc: false, + expression: 'name', + isExpression: false, + nulls: 'last', + opclass: undefined, + }, + { + asc: true, + expression: '"name"', + isExpression: true, + nulls: 'last', + }, + ], + concurrently: false, + isUnique: false, + method: 'hash', + name: 'indx1', + where: undefined, + with: { + fillfactor: 70, + }, + }, + }); + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe( + `CREATE INDEX IF NOT EXISTS "users_name_id_index" ON "users" USING btree ("name" DESC NULLS LAST,"id") WITH (fillfactor=70) WHERE select 1;`, + ); + expect(sqlStatements[1]).toBe( + `CREATE INDEX IF NOT EXISTS "indx1" ON "users" USING hash ("name" DESC NULLS LAST,"name") WITH (fillfactor=70);`, + ); + }, + + async addGeneratedColumn() { + const client = new PGlite(); + + const schema1 = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + }), + }; + const schema2 = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${schema2.users.name}`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements).toStrictEqual([ + { + column: { + generated: { + as: '"users"."name"', + type: 'stored', + }, + name: 'gen_name', + notNull: false, + primaryKey: false, + type: 'text', + }, + schema: '', + tableName: 'users', + type: 'alter_table_add_column', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name") STORED;', + ]); + + // for (const st of sqlStatements) { + // await client.query(st); + // } + }, + + async addGeneratedToColumn() { + const client = new PGlite(); + + const schema1 = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name'), + }), + }; + const schema2 = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${schema2.users.name}`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: undefined, + columnDefault: undefined, + columnGenerated: { + as: '"users"."name"', + type: 'stored', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE "users" drop column "gen_name";', + 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name") STORED;', + ]); + + // for (const st of sqlStatements) { + // await client.query(st); + // } + }, + + async dropGeneratedConstraint() { + const client = new PGlite(); + + const schema1 = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${schema1.users.name}`, + ), + }), + }; + const schema2 = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: undefined, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE "users" ALTER COLUMN "gen_name" DROP EXPRESSION;', + ]); + }, + + async alterGeneratedConstraint() { + const client = new PGlite(); + + const schema1 = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${schema1.users.name}`, + ), + }), + }; + const schema2 = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${schema2.users.name} || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); + }, + + async createTableWithGeneratedConstraint() { + const client = new PGlite(); + + const schema1 = {}; + const schema2 = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${schema2.users.name} || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements).toStrictEqual([ + { + columns: [ + { + name: 'id', + notNull: false, + primaryKey: false, + type: 'integer', + }, + { + name: 'id2', + notNull: false, + primaryKey: false, + type: 'integer', + }, + { + name: 'name', + notNull: false, + primaryKey: false, + type: 'text', + }, + { + generated: { + as: '"users"."name" || \'hello\'', + type: 'stored', + }, + name: 'gen_name', + notNull: false, + primaryKey: false, + type: 'text', + }, + ], + compositePKs: [], + compositePkName: '', + schema: '', + tableName: 'users', + type: 'create_table', + uniqueConstraints: [], + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'CREATE TABLE IF NOT EXISTS "users" (\n\t"id" integer,\n\t"id2" integer,\n\t"name" text,\n\t"gen_name" text GENERATED ALWAYS AS ("users"."name" || \'hello\') STORED\n);\n', + ]); + }, + + async addBasicSequences() { + const client = new PGlite(); + + const schema1 = { + seq: pgSequence('my_seq', { startWith: 100 }), + }; + + const schema2 = { + seq: pgSequence('my_seq', { startWith: 100 }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + expect(statements.length).toBe(0); + }, + + async changeIndexFields() { + const client = new PGlite(); + + const schema1 = { + users: pgTable( + 'users', + { + id: serial('id').primaryKey(), + name: text('name'), + }, + (t) => ({ + removeColumn: index('removeColumn').on(t.name, t.id), + addColumn: index('addColumn') + .on(t.name.desc()) + .with({ fillfactor: 70 }), + removeExpression: index('removeExpression') + .on(t.name.desc(), sql`name`) + .concurrently(), + addExpression: index('addExpression').on(t.id.desc()), + changeExpression: index('changeExpression').on( + t.id.desc(), + sql`name`, + ), + changeName: index('changeName') + .on(t.name.desc(), t.id.asc().nullsLast()) + .with({ fillfactor: 70 }), + changeWith: index('changeWith').on(t.name).with({ fillfactor: 70 }), + changeUsing: index('changeUsing').on(t.name), + }), + ), + }; + + const schema2 = { + users: pgTable( + 'users', + { + id: serial('id').primaryKey(), + name: text('name'), + }, + (t) => ({ + removeColumn: index('removeColumn').on(t.name), + addColumn: index('addColumn') + .on(t.name.desc(), t.id.nullsLast()) + .with({ fillfactor: 70 }), + removeExpression: index('removeExpression') + .on(t.name.desc()) + .concurrently(), + addExpression: index('addExpression').on(t.id.desc()), + changeExpression: index('changeExpression').on( + t.id.desc(), + sql`name desc`, + ), + changeName: index('newName') + .on(t.name.desc(), sql`name`) + .with({ fillfactor: 70 }), + changeWith: index('changeWith').on(t.name).with({ fillfactor: 90 }), + changeUsing: index('changeUsing').using('hash', t.name), + }), + ), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(sqlStatements).toStrictEqual([ + 'DROP INDEX IF EXISTS "changeName";', + 'DROP INDEX IF EXISTS "addColumn";', + 'DROP INDEX IF EXISTS "changeExpression";', + 'DROP INDEX IF EXISTS "changeUsing";', + 'DROP INDEX IF EXISTS "changeWith";', + 'DROP INDEX IF EXISTS "removeColumn";', + 'DROP INDEX IF EXISTS "removeExpression";', + 'CREATE INDEX IF NOT EXISTS "newName" ON "users" USING btree ("name" DESC NULLS LAST,name) WITH (fillfactor=70);', + 'CREATE INDEX IF NOT EXISTS "addColumn" ON "users" USING btree ("name" DESC NULLS LAST,"id") WITH (fillfactor=70);', + 'CREATE INDEX IF NOT EXISTS "changeExpression" ON "users" USING btree ("id" DESC NULLS LAST,name desc);', + 'CREATE INDEX IF NOT EXISTS "changeUsing" ON "users" USING hash ("name");', + 'CREATE INDEX IF NOT EXISTS "changeWith" ON "users" USING btree ("name") WITH (fillfactor=90);', + 'CREATE INDEX IF NOT EXISTS "removeColumn" ON "users" USING btree ("name");', + 'CREATE INDEX CONCURRENTLY IF NOT EXISTS "removeExpression" ON "users" USING btree ("name" DESC NULLS LAST);', + ]); + }, + + async dropIndex() { + const client = new PGlite(); + + const schema1 = { + users: pgTable( + 'users', + { + id: serial('id').primaryKey(), + name: text('name'), + }, + (t) => ({ + indx: index() + .on(t.name.desc(), t.id.asc().nullsLast()) + .with({ fillfactor: 70 }), + }), + ), + }; + + const schema2 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + schema: '', + tableName: 'users', + type: 'drop_index', + data: 'users_name_id_index;name--false--last,,id--true--last;false;btree;{"fillfactor":"70"}', + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + `DROP INDEX IF EXISTS "users_name_id_index";`, + ); + }, + + async indexesToBeNotTriggered() { + const client = new PGlite(); + + const schema1 = { + users: pgTable( + 'users', + { + id: serial('id').primaryKey(), + name: text('name'), + }, + (t) => ({ + indx: index('indx').on(t.name.desc()).concurrently(), + indx1: index('indx1') + .on(t.name.desc()) + .where(sql`true`), + indx2: index('indx2') + .on(t.name.op('text_ops')) + .where(sql`true`), + indx3: index('indx3') + .on(sql`lower(name)`) + .where(sql`true`), + }), + ), + }; + + const schema2 = { + users: pgTable( + 'users', + { + id: serial('id').primaryKey(), + name: text('name'), + }, + (t) => ({ + indx: index('indx').on(t.name.desc()), + indx1: index('indx1') + .on(t.name.desc()) + .where(sql`false`), + indx2: index('indx2') + .on(t.name.op('test')) + .where(sql`true`), + indx3: index('indx3') + .on(sql`lower(id)`) + .where(sql`true`), + }), + ), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements.length).toBe(0); + }, + + async indexesTestCase1() { + const client = new PGlite(); + + const schema1 = { + users: pgTable( + 'users', + { + id: uuid('id').defaultRandom().primaryKey(), + name: text('name').notNull(), + description: text('description'), + imageUrl: text('image_url'), + inStock: boolean('in_stock').default(true), + }, + (t) => ({ + indx: index().on(t.id.desc().nullsFirst()), + indx1: index('indx1').on(t.id, t.imageUrl), + indx2: index('indx4').on(t.id), + }), + ), + }; + + const schema2 = { + users: pgTable( + 'users', + { + id: uuid('id').defaultRandom().primaryKey(), + name: text('name').notNull(), + description: text('description'), + imageUrl: text('image_url'), + inStock: boolean('in_stock').default(true), + }, + (t) => ({ + indx: index().on(t.id.desc().nullsFirst()), + indx1: index('indx1').on(t.id, t.imageUrl), + indx2: index('indx4').on(t.id), + }), + ), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements.length).toBe(0); + }, + + async addNotNull() { + const client = new PGlite(); + + const schema1 = { + users: pgTable( + 'User', + { + id: text('id').primaryKey().notNull(), + name: text('name'), + username: text('username'), + gh_username: text('gh_username'), + email: text('email'), + emailVerified: timestamp('emailVerified', { + precision: 3, + mode: 'date', + }), + image: text('image'), + createdAt: timestamp('createdAt', { precision: 3, mode: 'date' }) + .default(sql`CURRENT_TIMESTAMP`) + .notNull(), + updatedAt: timestamp('updatedAt', { precision: 3, mode: 'date' }) + .notNull() + .$onUpdate(() => new Date()), + }, + (table) => { + return { + emailKey: uniqueIndex('User_email_key').on(table.email), + }; + }, + ), + }; + + const schema2 = { + users: pgTable( + 'User', + { + id: text('id').primaryKey().notNull(), + name: text('name'), + username: text('username'), + gh_username: text('gh_username'), + email: text('email').notNull(), + emailVerified: timestamp('emailVerified', { + precision: 3, + mode: 'date', + }), + image: text('image'), + createdAt: timestamp('createdAt', { precision: 3, mode: 'date' }) + .default(sql`CURRENT_TIMESTAMP`) + .notNull(), + updatedAt: timestamp('updatedAt', { precision: 3, mode: 'date' }) + .notNull() + .$onUpdate(() => new Date()), + }, + (table) => { + return { + emailKey: uniqueIndex('User_email_key').on(table.email), + }; + }, + ), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + const query = async (sql: string, params?: any[]) => { + const result = await client.query(sql, params ?? []); + return result.rows as any[]; + }; + + const { statementsToExecute } = await pgSuggestions({ query }, statements); + + expect(statementsToExecute).toStrictEqual([ + 'ALTER TABLE "User" ALTER COLUMN "email" SET NOT NULL;', + ]); + }, + + async addNotNullWithDataNoRollback() { + const client = new PGlite(); + const db = drizzle(client); + + const schema1 = { + users: pgTable( + 'User', + { + id: text('id').primaryKey().notNull(), + name: text('name'), + username: text('username'), + gh_username: text('gh_username'), + email: text('email'), + emailVerified: timestamp('emailVerified', { + precision: 3, + mode: 'date', + }), + image: text('image'), + createdAt: timestamp('createdAt', { precision: 3, mode: 'date' }) + .default(sql`CURRENT_TIMESTAMP`) + .notNull(), + updatedAt: timestamp('updatedAt', { precision: 3, mode: 'date' }) + .notNull() + .$onUpdate(() => new Date()), + }, + (table) => { + return { + emailKey: uniqueIndex('User_email_key').on(table.email), + }; + }, + ), + }; + + const schema2 = { + users: pgTable( + 'User', + { + id: text('id').primaryKey().notNull(), + name: text('name'), + username: text('username'), + gh_username: text('gh_username'), + email: text('email').notNull(), + emailVerified: timestamp('emailVerified', { + precision: 3, + mode: 'date', + }), + image: text('image'), + createdAt: timestamp('createdAt', { precision: 3, mode: 'date' }) + .default(sql`CURRENT_TIMESTAMP`) + .notNull(), + updatedAt: timestamp('updatedAt', { precision: 3, mode: 'date' }) + .notNull() + .$onUpdate(() => new Date()), + }, + (table) => { + return { + emailKey: uniqueIndex('User_email_key').on(table.email), + }; + }, + ), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + const query = async (sql: string, params?: any[]) => { + const result = await client.query(sql, params ?? []); + return result.rows as any[]; + }; + + await db.insert(schema1.users).values({ id: 'str', email: 'email@gmail' }); + + const { statementsToExecute, shouldAskForApprove } = await pgSuggestions( + { query }, + statements, + ); + + expect(statementsToExecute).toStrictEqual([ + 'ALTER TABLE "User" ALTER COLUMN "email" SET NOT NULL;', + ]); + + expect(shouldAskForApprove).toBeFalsy(); + }, + + // async addVectorIndexes() { + // const client = new PGlite(); + + // const schema1 = { + // users: pgTable("users", { + // id: serial("id").primaryKey(), + // name: vector("name", { dimensions: 3 }), + // }), + // }; + + // const schema2 = { + // users: pgTable( + // "users", + // { + // id: serial("id").primaryKey(), + // embedding: vector("name", { dimensions: 3 }), + // }, + // (t) => ({ + // indx2: index("vector_embedding_idx") + // .using("hnsw", t.embedding.op("vector_ip_ops")) + // .with({ m: 16, ef_construction: 64 }), + // }) + // ), + // }; + + // const { statements, sqlStatements } = await diffTestSchemasPush( + // client, + // schema1, + // schema2, + // [], + // false, + // ["public"] + // ); + // expect(statements.length).toBe(1); + // expect(statements[0]).toStrictEqual({ + // schema: "", + // tableName: "users", + // type: "create_index", + // data: 'vector_embedding_idx;name,true,last,vector_ip_ops;false;false;hnsw;undefined;{"m":16,"ef_construction":64}', + // }); + // expect(sqlStatements.length).toBe(1); + // expect(sqlStatements[0]).toBe( + // `CREATE INDEX IF NOT EXISTS "vector_embedding_idx" ON "users" USING hnsw (name vector_ip_ops) WITH (m=16,ef_construction=64);` + // ); + // }, + async case1() { + // TODO: implement if needed + expect(true).toBe(true); + }, +}; + +run(pgSuite); + +test('full sequence: no changes', async () => { + const client = new PGlite(); + + const schema1 = { + seq: pgSequence('my_seq', { + startWith: 100, + maxValue: 10000, + minValue: 100, + cycle: true, + cache: 10, + increment: 2, + }), + }; + + const schema2 = { + seq: pgSequence('my_seq', { + startWith: 100, + maxValue: 10000, + minValue: 100, + cycle: true, + cache: 10, + increment: 2, + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); + + for (const st of sqlStatements) { + await client.query(st); + } +}); + +test('basic sequence: change fields', async () => { + const client = new PGlite(); + + const schema1 = { + seq: pgSequence('my_seq', { + startWith: 100, + maxValue: 10000, + minValue: 100, + cycle: true, + cache: 10, + increment: 2, + }), + }; + + const schema2 = { + seq: pgSequence('my_seq', { + startWith: 100, + maxValue: 100000, + minValue: 100, + cycle: true, + cache: 10, + increment: 4, + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements).toStrictEqual([ + { + type: 'alter_sequence', + schema: 'public', + name: 'my_seq', + values: { + minValue: '100', + maxValue: '100000', + increment: '4', + startWith: '100', + cache: '10', + cycle: true, + }, + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER SEQUENCE "public"."my_seq" INCREMENT BY 4 MINVALUE 100 MAXVALUE 100000 START WITH 100 CACHE 10 CYCLE;', + ]); + + for (const st of sqlStatements) { + await client.query(st); + } +}); + +test('basic sequence: change name', async () => { + const client = new PGlite(); + + const schema1 = { + seq: pgSequence('my_seq', { + startWith: 100, + maxValue: 10000, + minValue: 100, + cycle: true, + cache: 10, + increment: 2, + }), + }; + + const schema2 = { + seq: pgSequence('my_seq2', { + startWith: 100, + maxValue: 10000, + minValue: 100, + cycle: true, + cache: 10, + increment: 2, + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + ['public.my_seq->public.my_seq2'], + false, + ['public'], + ); + + expect(statements).toStrictEqual([ + { + nameFrom: 'my_seq', + nameTo: 'my_seq2', + schema: 'public', + type: 'rename_sequence', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER SEQUENCE "public"."my_seq" RENAME TO "my_seq2";', + ]); + + for (const st of sqlStatements) { + await client.query(st); + } +}); + +test('basic sequence: change name and fields', async () => { + const client = new PGlite(); + + const schema1 = { + seq: pgSequence('my_seq', { + startWith: 100, + maxValue: 10000, + minValue: 100, + cycle: true, + cache: 10, + increment: 2, + }), + }; + + const schema2 = { + seq: pgSequence('my_seq2', { + startWith: 100, + maxValue: 10000, + minValue: 100, + cycle: true, + cache: 10, + increment: 4, + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + ['public.my_seq->public.my_seq2'], + false, + ['public'], + ); + + expect(statements).toStrictEqual([ + { + nameFrom: 'my_seq', + nameTo: 'my_seq2', + schema: 'public', + type: 'rename_sequence', + }, + { + name: 'my_seq2', + schema: 'public', + type: 'alter_sequence', + values: { + cache: '10', + cycle: true, + increment: '4', + maxValue: '10000', + minValue: '100', + startWith: '100', + }, + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER SEQUENCE "public"."my_seq" RENAME TO "my_seq2";', + 'ALTER SEQUENCE "public"."my_seq2" INCREMENT BY 4 MINVALUE 100 MAXVALUE 10000 START WITH 100 CACHE 10 CYCLE;', + ]); + + for (const st of sqlStatements) { + await client.query(st); + } +}); + +// identity push tests +test('create table: identity always/by default - no params', async () => { + const client = new PGlite(); + + const schema1 = {}; + + const schema2 = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity(), + id1: bigint('id1', { mode: 'number' }).generatedByDefaultAsIdentity(), + id2: smallint('id2').generatedByDefaultAsIdentity(), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements).toStrictEqual([ + { + columns: [ + { + identity: 'users_id_seq;byDefault;1;2147483647;1;1;1;false', + name: 'id', + notNull: true, + primaryKey: false, + type: 'integer', + }, + { + identity: 'users_id1_seq;byDefault;1;9223372036854775807;1;1;1;false', + name: 'id1', + notNull: true, + primaryKey: false, + type: 'bigint', + }, + { + identity: 'users_id2_seq;byDefault;1;32767;1;1;1;false', + name: 'id2', + notNull: true, + primaryKey: false, + type: 'smallint', + }, + ], + compositePKs: [], + compositePkName: '', + schema: '', + tableName: 'users', + type: 'create_table', + uniqueConstraints: [], + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'CREATE TABLE IF NOT EXISTS "users" (\n\t"id" integer GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),\n\t"id1" bigint GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id1_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),\n\t"id2" smallint GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id2_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 32767 START WITH 1 CACHE 1)\n);\n', + ]); + + for (const st of sqlStatements) { + await client.query(st); + } +}); + +test('create table: identity always/by default - few params', async () => { + const client = new PGlite(); + + const schema1 = {}; + + const schema2 = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ increment: 4 }), + id1: bigint('id1', { mode: 'number' }).generatedByDefaultAsIdentity({ + startWith: 120, + maxValue: 17000, + }), + id2: smallint('id2').generatedByDefaultAsIdentity({ cycle: true }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements).toStrictEqual([ + { + columns: [ + { + identity: 'users_id_seq;byDefault;1;2147483647;4;1;1;false', + name: 'id', + notNull: true, + primaryKey: false, + type: 'integer', + }, + { + identity: 'users_id1_seq;byDefault;1;17000;1;120;1;false', + name: 'id1', + notNull: true, + primaryKey: false, + type: 'bigint', + }, + { + identity: 'users_id2_seq;byDefault;1;32767;1;1;1;true', + name: 'id2', + notNull: true, + primaryKey: false, + type: 'smallint', + }, + ], + compositePKs: [], + compositePkName: '', + schema: '', + tableName: 'users', + type: 'create_table', + uniqueConstraints: [], + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'CREATE TABLE IF NOT EXISTS "users" (\n\t"id" integer GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id_seq" INCREMENT BY 4 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),\n\t"id1" bigint GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id1_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 17000 START WITH 120 CACHE 1),\n\t"id2" smallint GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id2_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 32767 START WITH 1 CACHE 1 CYCLE)\n);\n', + ]); + + for (const st of sqlStatements) { + await client.query(st); + } +}); + +test('create table: identity always/by default - all params', async () => { + const client = new PGlite(); + + const schema1 = {}; + + const schema2 = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + increment: 4, + minValue: 100, + }), + id1: bigint('id1', { mode: 'number' }).generatedByDefaultAsIdentity({ + startWith: 120, + maxValue: 17000, + increment: 3, + cycle: true, + cache: 100, + }), + id2: smallint('id2').generatedByDefaultAsIdentity({ cycle: true }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements).toStrictEqual([ + { + columns: [ + { + identity: 'users_id_seq;byDefault;100;2147483647;4;100;1;false', + name: 'id', + notNull: true, + primaryKey: false, + type: 'integer', + }, + { + identity: 'users_id1_seq;byDefault;1;17000;3;120;100;true', + name: 'id1', + notNull: true, + primaryKey: false, + type: 'bigint', + }, + { + identity: 'users_id2_seq;byDefault;1;32767;1;1;1;true', + name: 'id2', + notNull: true, + primaryKey: false, + type: 'smallint', + }, + ], + compositePKs: [], + compositePkName: '', + schema: '', + tableName: 'users', + type: 'create_table', + uniqueConstraints: [], + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'CREATE TABLE IF NOT EXISTS "users" (\n\t"id" integer GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id_seq" INCREMENT BY 4 MINVALUE 100 MAXVALUE 2147483647 START WITH 100 CACHE 1),\n\t"id1" bigint GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id1_seq" INCREMENT BY 3 MINVALUE 1 MAXVALUE 17000 START WITH 120 CACHE 100 CYCLE),\n\t"id2" smallint GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id2_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 32767 START WITH 1 CACHE 1 CYCLE)\n);\n', + ]); + + for (const st of sqlStatements) { + await client.query(st); + } +}); + +test('no diff: identity always/by default - no params', async () => { + const client = new PGlite(); + + const schema1 = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity(), + id2: integer('id2').generatedAlwaysAsIdentity(), + }), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity(), + id2: integer('id2').generatedAlwaysAsIdentity(), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test('no diff: identity always/by default - few params', async () => { + const client = new PGlite(); + + const schema1 = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + name: 'custom_name', + }), + id2: integer('id2').generatedAlwaysAsIdentity({ + increment: 1, + startWith: 3, + }), + }), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + name: 'custom_name', + }), + id2: integer('id2').generatedAlwaysAsIdentity({ + increment: 1, + startWith: 3, + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test('no diff: identity always/by default - all params', async () => { + const client = new PGlite(); + + const schema1 = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + name: 'custom_name', + startWith: 10, + minValue: 10, + maxValue: 1000, + cycle: true, + cache: 10, + increment: 2, + }), + id2: integer('id2').generatedAlwaysAsIdentity({ + startWith: 10, + minValue: 10, + maxValue: 1000, + cycle: true, + cache: 10, + increment: 2, + }), + }), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + name: 'custom_name', + startWith: 10, + minValue: 10, + maxValue: 1000, + cycle: true, + cache: 10, + increment: 2, + }), + id2: integer('id2').generatedAlwaysAsIdentity({ + startWith: 10, + minValue: 10, + maxValue: 1000, + cycle: true, + cache: 10, + increment: 2, + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test('drop identity from a column - no params', async () => { + const client = new PGlite(); + + const schema1 = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity(), + }), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements).toStrictEqual([ + { + columnName: 'id', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_drop_identity', + }, + ]); + expect(sqlStatements).toStrictEqual([ + `ALTER TABLE \"users\" ALTER COLUMN \"id\" DROP IDENTITY;`, + ]); + + for (const st of sqlStatements) { + await client.query(st); + } +}); + +test('drop identity from a column - few params', async () => { + const client = new PGlite(); + + const schema1 = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ name: 'custom_name' }), + id1: integer('id1').generatedByDefaultAsIdentity({ + name: 'custom_name1', + increment: 4, + }), + id2: integer('id2').generatedAlwaysAsIdentity({ + name: 'custom_name2', + increment: 4, + }), + }), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id'), + id1: integer('id1'), + id2: integer('id2'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements).toStrictEqual([ + { + columnName: 'id', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_drop_identity', + }, + { + columnName: 'id1', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_drop_identity', + }, + { + columnName: 'id2', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_drop_identity', + }, + ]); + expect(sqlStatements).toStrictEqual([ + `ALTER TABLE \"users\" ALTER COLUMN \"id\" DROP IDENTITY;`, + 'ALTER TABLE "users" ALTER COLUMN "id1" DROP IDENTITY;', + 'ALTER TABLE "users" ALTER COLUMN "id2" DROP IDENTITY;', + ]); + + for (const st of sqlStatements) { + await client.query(st); + } +}); + +test('drop identity from a column - all params', async () => { + const client = new PGlite(); + + const schema1 = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity(), + id1: integer('id1').generatedByDefaultAsIdentity({ + name: 'custom_name1', + startWith: 10, + minValue: 10, + maxValue: 1000, + cycle: true, + cache: 10, + increment: 2, + }), + id2: integer('id2').generatedAlwaysAsIdentity({ + name: 'custom_name2', + startWith: 10, + minValue: 10, + maxValue: 1000, + cycle: true, + cache: 10, + increment: 2, + }), + }), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id'), + id1: integer('id1'), + id2: integer('id2'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements).toStrictEqual([ + { + columnName: 'id', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_drop_identity', + }, + { + columnName: 'id1', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_drop_identity', + }, + { + columnName: 'id2', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_drop_identity', + }, + ]); + expect(sqlStatements).toStrictEqual([ + `ALTER TABLE \"users\" ALTER COLUMN \"id\" DROP IDENTITY;`, + 'ALTER TABLE "users" ALTER COLUMN "id1" DROP IDENTITY;', + 'ALTER TABLE "users" ALTER COLUMN "id2" DROP IDENTITY;', + ]); + + for (const st of sqlStatements) { + await client.query(st); + } +}); + +test('alter identity from a column - no params', async () => { + const client = new PGlite(); + + const schema1 = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity(), + }), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ startWith: 100 }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements).toStrictEqual([ + { + columnName: 'id', + identity: 'users_id_seq;byDefault;1;2147483647;1;100;1;false', + oldIdentity: 'users_id_seq;byDefault;1;2147483647;1;1;1;false', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_change_identity', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE "users" ALTER COLUMN "id" SET START WITH 100;', + ]); + + for (const st of sqlStatements) { + await client.query(st); + } +}); + +test('alter identity from a column - few params', async () => { + const client = new PGlite(); + + const schema1 = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ startWith: 100 }), + }), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + startWith: 100, + increment: 4, + maxValue: 10000, + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements).toStrictEqual([ + { + columnName: 'id', + identity: 'users_id_seq;byDefault;1;10000;4;100;1;false', + oldIdentity: 'users_id_seq;byDefault;1;2147483647;1;100;1;false', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_change_identity', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE "users" ALTER COLUMN "id" SET MAXVALUE 10000;', + 'ALTER TABLE "users" ALTER COLUMN "id" SET INCREMENT BY 4;', + ]); + + for (const st of sqlStatements) { + await client.query(st); + } +}); + +test('alter identity from a column - by default to always', async () => { + const client = new PGlite(); + + const schema1 = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ startWith: 100 }), + }), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id').generatedAlwaysAsIdentity({ + startWith: 100, + increment: 4, + maxValue: 10000, + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements).toStrictEqual([ + { + columnName: 'id', + identity: 'users_id_seq;always;1;10000;4;100;1;false', + oldIdentity: 'users_id_seq;byDefault;1;2147483647;1;100;1;false', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_change_identity', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE "users" ALTER COLUMN "id" SET GENERATED ALWAYS;', + 'ALTER TABLE "users" ALTER COLUMN "id" SET MAXVALUE 10000;', + 'ALTER TABLE "users" ALTER COLUMN "id" SET INCREMENT BY 4;', + ]); + + for (const st of sqlStatements) { + await client.query(st); + } +}); + +test('alter identity from a column - always to by default', async () => { + const client = new PGlite(); + + const schema1 = { + users: pgTable('users', { + id: integer('id').generatedAlwaysAsIdentity({ startWith: 100 }), + }), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + startWith: 100, + increment: 4, + maxValue: 10000, + cycle: true, + cache: 100, + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements).toStrictEqual([ + { + columnName: 'id', + identity: 'users_id_seq;byDefault;1;10000;4;100;100;true', + oldIdentity: 'users_id_seq;always;1;2147483647;1;100;1;false', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_change_identity', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE "users" ALTER COLUMN "id" SET GENERATED BY DEFAULT;', + 'ALTER TABLE "users" ALTER COLUMN "id" SET MAXVALUE 10000;', + 'ALTER TABLE "users" ALTER COLUMN "id" SET INCREMENT BY 4;', + 'ALTER TABLE "users" ALTER COLUMN "id" SET CACHE 100;', + 'ALTER TABLE "users" ALTER COLUMN "id" SET CYCLE;', + ]); + + for (const st of sqlStatements) { + await client.query(st); + } +}); + +test('add column with identity - few params', async () => { + const client = new PGlite(); + + const schema1 = { + users: pgTable('users', { + email: text('email'), + }), + }; + + const schema2 = { + users: pgTable('users', { + email: text('email'), + id: integer('id').generatedByDefaultAsIdentity({ name: 'custom_name' }), + id1: integer('id1').generatedAlwaysAsIdentity({ + name: 'custom_name1', + increment: 4, + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements).toStrictEqual([ + { + column: { + identity: 'custom_name;byDefault;1;2147483647;1;1;1;false', + name: 'id', + notNull: true, + primaryKey: false, + type: 'integer', + }, + schema: '', + tableName: 'users', + type: 'alter_table_add_column', + }, + { + column: { + identity: 'custom_name1;always;1;2147483647;4;1;1;false', + name: 'id1', + notNull: true, + primaryKey: false, + type: 'integer', + }, + schema: '', + tableName: 'users', + type: 'alter_table_add_column', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE "users" ADD COLUMN "id" integer NOT NULL GENERATED BY DEFAULT AS IDENTITY (sequence name "custom_name" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1);', + 'ALTER TABLE "users" ADD COLUMN "id1" integer NOT NULL GENERATED ALWAYS AS IDENTITY (sequence name "custom_name1" INCREMENT BY 4 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1);', + ]); + + // for (const st of sqlStatements) { + // await client.query(st); + // } +}); + +test('add identity to column - few params', async () => { + const client = new PGlite(); + + const schema1 = { + users: pgTable('users', { + id: integer('id'), + id1: integer('id1'), + }), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ name: 'custom_name' }), + id1: integer('id1').generatedAlwaysAsIdentity({ + name: 'custom_name1', + increment: 4, + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements).toStrictEqual([ + { + columnName: 'id', + identity: 'custom_name;byDefault;1;2147483647;1;1;1;false', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_identity', + }, + { + columnName: 'id1', + identity: 'custom_name1;always;1;2147483647;4;1;1;false', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_identity', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE "users" ALTER COLUMN "id" ADD GENERATED BY DEFAULT AS IDENTITY (sequence name "custom_name" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1);', + 'ALTER TABLE "users" ALTER COLUMN "id1" ADD GENERATED ALWAYS AS IDENTITY (sequence name "custom_name1" INCREMENT BY 4 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1);', + ]); + + // for (const st of sqlStatements) { + // await client.query(st); + // } +}); + +test('add array column - empty array default', async () => { + const client = new PGlite(); + + const schema1 = { + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const schema2 = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: integer('values').array().default([]), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements).toStrictEqual([ + { + type: 'alter_table_add_column', + tableName: 'test', + schema: '', + column: { name: 'values', type: 'integer[]', primaryKey: false, notNull: false, default: "'{}'" }, + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE "test" ADD COLUMN "values" integer[] DEFAULT \'{}\';', + ]); +}); + +test('add array column - default', async () => { + const client = new PGlite(); + + const schema1 = { + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const schema2 = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: integer('values').array().default([1, 2, 3]), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements).toStrictEqual([ + { + type: 'alter_table_add_column', + tableName: 'test', + schema: '', + column: { name: 'values', type: 'integer[]', primaryKey: false, notNull: false, default: "'{1,2,3}'" }, + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE "test" ADD COLUMN "values" integer[] DEFAULT \'{1,2,3}\';', + ]); +}); diff --git a/drizzle-kit/tests/push/sqlite.test.ts b/drizzle-kit/tests/push/sqlite.test.ts new file mode 100644 index 000000000..cf468d3ec --- /dev/null +++ b/drizzle-kit/tests/push/sqlite.test.ts @@ -0,0 +1,434 @@ +import Database from 'better-sqlite3'; +import { SQL, sql } from 'drizzle-orm'; +import { blob, foreignKey, int, integer, numeric, real, sqliteTable, text } from 'drizzle-orm/sqlite-core'; +import { diffTestSchemasPushSqlite } from 'tests/schemaDiffer'; +import { expect, test } from 'vitest'; +import { DialectSuite, run } from './common'; + +const sqliteSuite: DialectSuite = { + addBasicIndexes: function(context?: any): Promise { + return {} as any; + }, + changeIndexFields: function(context?: any): Promise { + return {} as any; + }, + dropIndex: function(context?: any): Promise { + return {} as any; + }, + + async allTypes() { + const sqlite = new Database(':memory:'); + + const Users = sqliteTable('users', { + id: integer('id').primaryKey().notNull(), + name: text('name').notNull(), + email: text('email'), + textJson: text('text_json', { mode: 'json' }), + blobJon: blob('blob_json', { mode: 'json' }), + blobBigInt: blob('blob_bigint', { mode: 'bigint' }), + numeric: numeric('numeric'), + createdAt: integer('created_at', { mode: 'timestamp' }), + createdAtMs: integer('created_at_ms', { mode: 'timestamp_ms' }), + real: real('real'), + text: text('text', { length: 255 }), + role: text('role', { enum: ['admin', 'user'] }).default('user'), + isConfirmed: integer('is_confirmed', { + mode: 'boolean', + }), + }); + + const schema1 = { + Users, + + Customers: sqliteTable('customers', { + id: integer('id').primaryKey(), + address: text('address').notNull(), + isConfirmed: integer('is_confirmed', { mode: 'boolean' }), + registrationDate: integer('registration_date', { mode: 'timestamp_ms' }) + .notNull() + .$defaultFn(() => new Date()), + userId: integer('user_id') + .references(() => Users.id) + .notNull(), + }), + + Posts: sqliteTable('posts', { + id: integer('id').primaryKey(), + content: text('content'), + authorId: integer('author_id'), + }), + }; + + const { statements } = await diffTestSchemasPushSqlite( + sqlite, + schema1, + schema1, + [], + false, + ); + expect(statements.length).toBe(0); + }, + indexesToBeNotTriggered: function(context?: any): Promise { + return {} as any; + }, + indexesTestCase1: function(context?: any): Promise { + return {} as any; + }, + async case1(): Promise { + const sqlite = new Database(':memory:'); + + const schema1 = { + users: sqliteTable('users', { + id: text('id').notNull().primaryKey(), + firstName: text('first_name').notNull(), + lastName: text('last_name').notNull(), + username: text('username').notNull().unique(), + email: text('email').notNull().unique(), + password: text('password').notNull(), + avatarUrl: text('avatar_url').notNull(), + postsCount: integer('posts_count').notNull().default(0), + followersCount: integer('followers_count').notNull().default(0), + followingsCount: integer('followings_count').notNull().default(0), + createdAt: integer('created_at').notNull(), + }), + }; + + const schema2 = { + users: sqliteTable('users', { + id: text('id').notNull().primaryKey(), + firstName: text('first_name').notNull(), + lastName: text('last_name').notNull(), + username: text('username').notNull().unique(), + email: text('email').notNull().unique(), + password: text('password').notNull(), + avatarUrl: text('avatar_url').notNull(), + followersCount: integer('followers_count').notNull().default(0), + followingsCount: integer('followings_count').notNull().default(0), + createdAt: integer('created_at').notNull(), + }), + }; + + const { statements } = await diffTestSchemasPushSqlite( + sqlite, + schema1, + schema2, + [], + false, + ); + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_drop_column', + tableName: 'users', + columnName: 'posts_count', + schema: '', + }); + }, + addNotNull: function(context?: any): Promise { + return {} as any; + }, + addNotNullWithDataNoRollback: function(context?: any): Promise { + return {} as any; + }, + addBasicSequences: function(context?: any): Promise { + return {} as any; + }, + // --- + addGeneratedColumn: async function(context?: any): Promise { + const sqlite = new Database(':memory:'); + + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPushSqlite( + sqlite, + from, + to, + [], + ); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); + }, + addGeneratedToColumn: async function(context?: any): Promise { + const sqlite = new Database(':memory:'); + + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + generatedName1: text('gen_name1'), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs((): SQL => sql`${to.users.name} || 'hello'`, { + mode: 'stored', + }), + generatedName1: text('gen_name1').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + { mode: 'virtual' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPushSqlite( + sqlite, + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: '("name" || \'hello\')', + type: 'virtual', + }, + columnName: 'gen_name1', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name1`;', + 'ALTER TABLE `users` ADD `gen_name1` text GENERATED ALWAYS AS ("name" || \'hello\') VIRTUAL;', + ]); + + for (const st of sqlStatements) { + sqlite.exec(st); + } + }, + dropGeneratedConstraint: async function(context?: any): Promise { + const sqlite = new Database(':memory:'); + + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + { mode: 'stored' }, + ), + generatedName1: text('gen_name1').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + { mode: 'virtual' }, + ), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name'), + generatedName1: text('gen_name1'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPushSqlite( + sqlite, + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_drop_generated', + }, + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name1', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + 'ALTER TABLE `users` ADD `gen_name` text;', + 'ALTER TABLE `users` DROP COLUMN `gen_name1`;', + 'ALTER TABLE `users` ADD `gen_name1` text;', + ]); + + for (const st of sqlStatements) { + sqlite.exec(st); + } + }, + alterGeneratedConstraint: async function(context?: any): Promise { + const sqlite = new Database(':memory:'); + + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + { mode: 'stored' }, + ), + generatedName1: text('gen_name1').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + { mode: 'virtual' }, + ), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name}`, + { mode: 'stored' }, + ), + generatedName1: text('gen_name1').generatedAlwaysAs( + (): SQL => sql`${to.users.name}`, + { mode: 'virtual' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPushSqlite( + sqlite, + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: '("name")', + type: 'virtual', + }, + columnName: 'gen_name1', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name1`;', + 'ALTER TABLE `users` ADD `gen_name1` text GENERATED ALWAYS AS ("name") VIRTUAL;', + ]); + + for (const st of sqlStatements) { + sqlite.exec(st); + } + }, + createTableWithGeneratedConstraint: function(context?: any): Promise { + return {} as any; + }, +}; + +run(sqliteSuite); + +test('create table with custom name references', async (t) => { + const sqlite = new Database(':memory:'); + + const users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name').notNull(), + }); + + const schema1 = { + users, + posts: sqliteTable( + 'posts', + { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name'), + userId: int('user_id'), + }, + (t) => ({ + fk: foreignKey({ + columns: [t.id], + foreignColumns: [users.id], + name: 'custom_name_fk', + }), + }), + ), + }; + + const schema2 = { + users, + posts: sqliteTable( + 'posts', + { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name'), + userId: int('user_id'), + }, + (t) => ({ + fk: foreignKey({ + columns: [t.id], + foreignColumns: [users.id], + name: 'custom_name_fk', + }), + }), + ), + }; + + const { sqlStatements } = await diffTestSchemasPushSqlite( + sqlite, + schema1, + schema2, + [], + ); + + expect(sqlStatements!.length).toBe(0); +}); diff --git a/drizzle-kit/tests/schemaDiffer.ts b/drizzle-kit/tests/schemaDiffer.ts new file mode 100644 index 000000000..f300fc68c --- /dev/null +++ b/drizzle-kit/tests/schemaDiffer.ts @@ -0,0 +1,1509 @@ +import { PGlite } from '@electric-sql/pglite'; +import { Database } from 'better-sqlite3'; +import { is } from 'drizzle-orm'; +import { MySqlSchema, MySqlTable } from 'drizzle-orm/mysql-core'; +import { isPgEnum, isPgSequence, PgEnum, PgSchema, PgSequence, PgTable } from 'drizzle-orm/pg-core'; +import { SingleStoreSchema, SingleStoreTable } from 'drizzle-orm/singlestore-core'; +import { SQLiteTable } from 'drizzle-orm/sqlite-core'; +import * as fs from 'fs'; +import { Connection } from 'mysql2/promise'; +import { + columnsResolver, + enumsResolver, + Named, + schemasResolver, + sequencesResolver, + tablesResolver, +} from 'src/cli/commands/migrate'; +import { logSuggestionsAndReturn } from 'src/cli/commands/sqlitePushUtils'; +import { schemaToTypeScript as schemaToTypeScriptMySQL } from 'src/introspect-mysql'; +import { schemaToTypeScript } from 'src/introspect-pg'; +import { schemaToTypeScript as schemaToTypeScriptSingleStore } from 'src/introspect-singlestore'; +import { schemaToTypeScript as schemaToTypeScriptSQLite } from 'src/introspect-sqlite'; +import { prepareFromMySqlImports } from 'src/serializer/mysqlImports'; +import { mysqlSchema, squashMysqlScheme } from 'src/serializer/mysqlSchema'; +import { fromDatabase as fromMySqlDatabase, generateMySqlSnapshot } from 'src/serializer/mysqlSerializer'; +import { prepareFromPgImports } from 'src/serializer/pgImports'; +import { pgSchema, squashPgScheme } from 'src/serializer/pgSchema'; +import { fromDatabase, generatePgSnapshot } from 'src/serializer/pgSerializer'; +import { prepareFromSingleStoreImports } from 'src/serializer/singlestoreImports'; +import { singlestoreSchema, squashSingleStoreScheme } from 'src/serializer/singlestoreSchema'; +import { + fromDatabase as fromSingleStoreDatabase, + generateSingleStoreSnapshot, +} from 'src/serializer/singlestoreSerializer'; +import { prepareFromSqliteImports } from 'src/serializer/sqliteImports'; +import { sqliteSchema, squashSqliteScheme } from 'src/serializer/sqliteSchema'; +import { fromDatabase as fromSqliteDatabase, generateSqliteSnapshot } from 'src/serializer/sqliteSerializer'; +import { + applyMysqlSnapshotsDiff, + applyPgSnapshotsDiff, + applySingleStoreSnapshotsDiff, + applySqliteSnapshotsDiff, + Column, + ColumnsResolverInput, + ColumnsResolverOutput, + Enum, + ResolverInput, + ResolverOutput, + ResolverOutputWithMoved, + Sequence, + Table, +} from 'src/snapshotsDiffer'; + +export type PostgresSchema = Record< + string, + PgTable | PgEnum | PgSchema | PgSequence +>; +export type MysqlSchema = Record | MySqlSchema>; +export type SinglestoreSchema = Record | SingleStoreSchema>; +export type SqliteSchema = Record>; + +export const testSchemasResolver = + (renames: Set) => async (input: ResolverInput): Promise> => { + try { + if ( + input.created.length === 0 + || input.deleted.length === 0 + || renames.size === 0 + ) { + return { + created: input.created, + renamed: [], + deleted: input.deleted, + }; + } + + let createdSchemas = [...input.created]; + let deletedSchemas = [...input.deleted]; + + const result: { + created: Named[]; + renamed: { from: Named; to: Named }[]; + deleted: Named[]; + } = { created: [], renamed: [], deleted: [] }; + + for (let rename of renames) { + const [from, to] = rename.split('->'); + + const idxFrom = deletedSchemas.findIndex((it) => { + return it.name === from; + }); + + if (idxFrom >= 0) { + const idxTo = createdSchemas.findIndex((it) => { + return it.name === to; + }); + + result.renamed.push({ + from: deletedSchemas[idxFrom], + to: createdSchemas[idxTo], + }); + + delete createdSchemas[idxTo]; + delete deletedSchemas[idxFrom]; + + createdSchemas = createdSchemas.filter(Boolean); + deletedSchemas = deletedSchemas.filter(Boolean); + } + } + + result.created = createdSchemas; + result.deleted = deletedSchemas; + + return result; + } catch (e) { + console.error(e); + throw e; + } + }; + +export const testSequencesResolver = (renames: Set) => +async ( + input: ResolverInput, +): Promise> => { + try { + if ( + input.created.length === 0 + || input.deleted.length === 0 + || renames.size === 0 + ) { + return { + created: input.created, + moved: [], + renamed: [], + deleted: input.deleted, + }; + } + + let createdSequences = [...input.created]; + let deletedSequences = [...input.deleted]; + + const result: { + created: Sequence[]; + moved: { name: string; schemaFrom: string; schemaTo: string }[]; + renamed: { from: Sequence; to: Sequence }[]; + deleted: Sequence[]; + } = { created: [], renamed: [], deleted: [], moved: [] }; + + for (let rename of renames) { + const [from, to] = rename.split('->'); + + const idxFrom = deletedSequences.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === from; + }); + + if (idxFrom >= 0) { + const idxTo = createdSequences.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === to; + }); + + const tableFrom = deletedSequences[idxFrom]; + const tableTo = createdSequences[idxFrom]; + + if (tableFrom.schema !== tableTo.schema) { + result.moved.push({ + name: tableFrom.name, + schemaFrom: tableFrom.schema, + schemaTo: tableTo.schema, + }); + } + + if (tableFrom.name !== tableTo.name) { + result.renamed.push({ + from: deletedSequences[idxFrom], + to: createdSequences[idxTo], + }); + } + + delete createdSequences[idxTo]; + delete deletedSequences[idxFrom]; + + createdSequences = createdSequences.filter(Boolean); + deletedSequences = deletedSequences.filter(Boolean); + } + } + + result.created = createdSequences; + result.deleted = deletedSequences; + + return result; + } catch (e) { + console.error(e); + throw e; + } +}; + +export const testEnumsResolver = (renames: Set) => +async ( + input: ResolverInput, +): Promise> => { + try { + if ( + input.created.length === 0 + || input.deleted.length === 0 + || renames.size === 0 + ) { + return { + created: input.created, + moved: [], + renamed: [], + deleted: input.deleted, + }; + } + + let createdEnums = [...input.created]; + let deletedEnums = [...input.deleted]; + + const result: { + created: Enum[]; + moved: { name: string; schemaFrom: string; schemaTo: string }[]; + renamed: { from: Enum; to: Enum }[]; + deleted: Enum[]; + } = { created: [], renamed: [], deleted: [], moved: [] }; + + for (let rename of renames) { + const [from, to] = rename.split('->'); + + const idxFrom = deletedEnums.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === from; + }); + + if (idxFrom >= 0) { + const idxTo = createdEnums.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === to; + }); + + const tableFrom = deletedEnums[idxFrom]; + const tableTo = createdEnums[idxFrom]; + + if (tableFrom.schema !== tableTo.schema) { + result.moved.push({ + name: tableFrom.name, + schemaFrom: tableFrom.schema, + schemaTo: tableTo.schema, + }); + } + + if (tableFrom.name !== tableTo.name) { + result.renamed.push({ + from: deletedEnums[idxFrom], + to: createdEnums[idxTo], + }); + } + + delete createdEnums[idxTo]; + delete deletedEnums[idxFrom]; + + createdEnums = createdEnums.filter(Boolean); + deletedEnums = deletedEnums.filter(Boolean); + } + } + + result.created = createdEnums; + result.deleted = deletedEnums; + + return result; + } catch (e) { + console.error(e); + throw e; + } +}; + +export const testTablesResolver = (renames: Set) => +async ( + input: ResolverInput
, +): Promise> => { + try { + if ( + input.created.length === 0 + || input.deleted.length === 0 + || renames.size === 0 + ) { + return { + created: input.created, + moved: [], + renamed: [], + deleted: input.deleted, + }; + } + + let createdTables = [...input.created]; + let deletedTables = [...input.deleted]; + + const result: { + created: Table[]; + moved: { name: string; schemaFrom: string; schemaTo: string }[]; + renamed: { from: Table; to: Table }[]; + deleted: Table[]; + } = { created: [], renamed: [], deleted: [], moved: [] }; + + for (let rename of renames) { + const [from, to] = rename.split('->'); + + const idxFrom = deletedTables.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === from; + }); + + if (idxFrom >= 0) { + const idxTo = createdTables.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === to; + }); + + const tableFrom = deletedTables[idxFrom]; + const tableTo = createdTables[idxFrom]; + + if (tableFrom.schema !== tableTo.schema) { + result.moved.push({ + name: tableFrom.name, + schemaFrom: tableFrom.schema, + schemaTo: tableTo.schema, + }); + } + + if (tableFrom.name !== tableTo.name) { + result.renamed.push({ + from: deletedTables[idxFrom], + to: createdTables[idxTo], + }); + } + + delete createdTables[idxTo]; + delete deletedTables[idxFrom]; + + createdTables = createdTables.filter(Boolean); + deletedTables = deletedTables.filter(Boolean); + } + } + + result.created = createdTables; + result.deleted = deletedTables; + + return result; + } catch (e) { + console.error(e); + throw e; + } +}; + +export const testColumnsResolver = (renames: Set) => +async ( + input: ColumnsResolverInput, +): Promise> => { + try { + if ( + input.created.length === 0 + || input.deleted.length === 0 + || renames.size === 0 + ) { + return { + tableName: input.tableName, + schema: input.schema, + created: input.created, + renamed: [], + deleted: input.deleted, + }; + } + + let createdColumns = [...input.created]; + let deletedColumns = [...input.deleted]; + + const renamed: { from: Column; to: Column }[] = []; + + const schema = input.schema || 'public'; + + for (let rename of renames) { + const [from, to] = rename.split('->'); + + const idxFrom = deletedColumns.findIndex((it) => { + return `${schema}.${input.tableName}.${it.name}` === from; + }); + + if (idxFrom >= 0) { + const idxTo = createdColumns.findIndex((it) => { + return `${schema}.${input.tableName}.${it.name}` === to; + }); + + renamed.push({ + from: deletedColumns[idxFrom], + to: createdColumns[idxTo], + }); + + delete createdColumns[idxTo]; + delete deletedColumns[idxFrom]; + + createdColumns = createdColumns.filter(Boolean); + deletedColumns = deletedColumns.filter(Boolean); + } + } + + return { + tableName: input.tableName, + schema: input.schema, + created: createdColumns, + deleted: deletedColumns, + renamed, + }; + } catch (e) { + console.error(e); + throw e; + } +}; + +export const diffTestSchemasPush = async ( + client: PGlite, + left: PostgresSchema, + right: PostgresSchema, + renamesArr: string[], + cli: boolean = false, + schemas: string[] = ['public'], +) => { + const { sqlStatements } = await applyPgDiffs(left); + for (const st of sqlStatements) { + await client.query(st); + } + + // do introspect into PgSchemaInternal + const introspectedSchema = await fromDatabase( + { + query: async (query: string, values?: any[] | undefined) => { + const res = await client.query(query, values); + return res.rows as any[]; + }, + }, + undefined, + schemas, + ); + + const leftTables = Object.values(right).filter((it) => is(it, PgTable)) as PgTable[]; + + const leftSchemas = Object.values(right).filter((it) => is(it, PgSchema)) as PgSchema[]; + + const leftEnums = Object.values(right).filter((it) => isPgEnum(it)) as PgEnum[]; + + const leftSequences = Object.values(right).filter((it) => isPgSequence(it)) as PgSequence[]; + + const serialized2 = generatePgSnapshot( + leftTables, + leftEnums, + leftSchemas, + leftSequences, + ); + + const { version: v1, dialect: d1, ...rest1 } = introspectedSchema; + const { version: v2, dialect: d2, ...rest2 } = serialized2; + + const sch1 = { + version: '7', + dialect: 'postgresql', + id: '0', + prevId: '0', + ...rest1, + } as const; + + const sch2 = { + version: '7', + dialect: 'postgresql', + id: '0', + prevId: '0', + ...rest2, + } as const; + + const sn1 = squashPgScheme(sch1, 'push'); + const sn2 = squashPgScheme(sch2, 'push'); + + const validatedPrev = pgSchema.parse(sch1); + const validatedCur = pgSchema.parse(sch2); + + const renames = new Set(renamesArr); + + if (!cli) { + const { sqlStatements, statements } = await applyPgSnapshotsDiff( + sn1, + sn2, + testSchemasResolver(renames), + testEnumsResolver(renames), + testSequencesResolver(renames), + testTablesResolver(renames), + testColumnsResolver(renames), + validatedPrev, + validatedCur, + 'push', + ); + return { sqlStatements, statements }; + } else { + const { sqlStatements, statements } = await applyPgSnapshotsDiff( + sn1, + sn2, + schemasResolver, + enumsResolver, + sequencesResolver, + tablesResolver, + columnsResolver, + validatedPrev, + validatedCur, + 'push', + ); + return { sqlStatements, statements }; + } +}; + +export const applyPgDiffs = async (sn: PostgresSchema) => { + const dryRun = { + version: '7', + dialect: 'postgresql', + id: '0', + prevId: '0', + tables: {}, + enums: {}, + schemas: {}, + sequences: {}, + _meta: { + schemas: {}, + tables: {}, + columns: {}, + }, + } as const; + + const tables = Object.values(sn).filter((it) => is(it, PgTable)) as PgTable[]; + + const schemas = Object.values(sn).filter((it) => is(it, PgSchema)) as PgSchema[]; + + const enums = Object.values(sn).filter((it) => isPgEnum(it)) as PgEnum[]; + + const sequences = Object.values(sn).filter((it) => isPgSequence(it)) as PgSequence[]; + + const serialized1 = generatePgSnapshot(tables, enums, schemas, sequences); + + const { version: v1, dialect: d1, ...rest1 } = serialized1; + + const sch1 = { + version: '7', + dialect: 'postgresql', + id: '0', + prevId: '0', + ...rest1, + } as const; + + const sn1 = squashPgScheme(sch1); + + const validatedPrev = pgSchema.parse(dryRun); + const validatedCur = pgSchema.parse(sch1); + + const { sqlStatements, statements } = await applyPgSnapshotsDiff( + dryRun, + sn1, + testSchemasResolver(new Set()), + testEnumsResolver(new Set()), + testSequencesResolver(new Set()), + testTablesResolver(new Set()), + testColumnsResolver(new Set()), + validatedPrev, + validatedCur, + ); + return { sqlStatements, statements }; +}; + +export const diffTestSchemas = async ( + left: PostgresSchema, + right: PostgresSchema, + renamesArr: string[], + cli: boolean = false, +) => { + const leftTables = Object.values(left).filter((it) => is(it, PgTable)) as PgTable[]; + + const rightTables = Object.values(right).filter((it) => is(it, PgTable)) as PgTable[]; + + const leftSchemas = Object.values(left).filter((it) => is(it, PgSchema)) as PgSchema[]; + + const rightSchemas = Object.values(right).filter((it) => is(it, PgSchema)) as PgSchema[]; + + const leftEnums = Object.values(left).filter((it) => isPgEnum(it)) as PgEnum[]; + + const rightEnums = Object.values(right).filter((it) => isPgEnum(it)) as PgEnum[]; + + const leftSequences = Object.values(left).filter((it) => isPgSequence(it)) as PgSequence[]; + + const rightSequences = Object.values(right).filter((it) => isPgSequence(it)) as PgSequence[]; + + const serialized1 = generatePgSnapshot( + leftTables, + leftEnums, + leftSchemas, + leftSequences, + ); + const serialized2 = generatePgSnapshot( + rightTables, + rightEnums, + rightSchemas, + rightSequences, + ); + + const { version: v1, dialect: d1, ...rest1 } = serialized1; + const { version: v2, dialect: d2, ...rest2 } = serialized2; + + const sch1 = { + version: '7', + dialect: 'postgresql', + id: '0', + prevId: '0', + ...rest1, + } as const; + + const sch2 = { + version: '7', + dialect: 'postgresql', + id: '0', + prevId: '0', + ...rest2, + } as const; + + const sn1 = squashPgScheme(sch1); + const sn2 = squashPgScheme(sch2); + + const validatedPrev = pgSchema.parse(sch1); + const validatedCur = pgSchema.parse(sch2); + + const renames = new Set(renamesArr); + + if (!cli) { + const { sqlStatements, statements } = await applyPgSnapshotsDiff( + sn1, + sn2, + testSchemasResolver(renames), + testEnumsResolver(renames), + testSequencesResolver(renames), + testTablesResolver(renames), + testColumnsResolver(renames), + validatedPrev, + validatedCur, + ); + return { sqlStatements, statements }; + } else { + const { sqlStatements, statements } = await applyPgSnapshotsDiff( + sn1, + sn2, + schemasResolver, + enumsResolver, + sequencesResolver, + tablesResolver, + columnsResolver, + validatedPrev, + validatedCur, + ); + return { sqlStatements, statements }; + } +}; + +export const diffTestSchemasPushMysql = async ( + client: Connection, + left: MysqlSchema, + right: MysqlSchema, + renamesArr: string[], + schema: string, + cli: boolean = false, +) => { + const { sqlStatements } = await applyMySqlDiffs(left); + for (const st of sqlStatements) { + await client.query(st); + } + // do introspect into PgSchemaInternal + const introspectedSchema = await fromMySqlDatabase( + { + query: async (sql: string, params?: any[]) => { + const res = await client.execute(sql, params); + return res[0] as any; + }, + }, + schema, + ); + + const leftTables = Object.values(right).filter((it) => is(it, MySqlTable)) as MySqlTable[]; + + const serialized2 = generateMySqlSnapshot(leftTables); + + const { version: v1, dialect: d1, ...rest1 } = introspectedSchema; + const { version: v2, dialect: d2, ...rest2 } = serialized2; + + const sch1 = { + version: '5', + dialect: 'mysql', + id: '0', + prevId: '0', + ...rest1, + } as const; + + const sch2 = { + version: '5', + dialect: 'mysql', + id: '0', + prevId: '0', + ...rest2, + } as const; + + const sn1 = squashMysqlScheme(sch1); + const sn2 = squashMysqlScheme(sch2); + + const validatedPrev = mysqlSchema.parse(sch1); + const validatedCur = mysqlSchema.parse(sch2); + + const renames = new Set(renamesArr); + + if (!cli) { + const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( + sn1, + sn2, + testTablesResolver(renames), + testColumnsResolver(renames), + validatedPrev, + validatedCur, + 'push', + ); + return { sqlStatements, statements }; + } else { + const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( + sn1, + sn2, + tablesResolver, + columnsResolver, + validatedPrev, + validatedCur, + 'push', + ); + return { sqlStatements, statements }; + } +}; + +export const applyMySqlDiffs = async (sn: MysqlSchema) => { + const dryRun = { + version: '5', + dialect: 'mysql', + id: '0', + prevId: '0', + tables: {}, + enums: {}, + schemas: {}, + _meta: { + schemas: {}, + tables: {}, + columns: {}, + }, + } as const; + + const tables = Object.values(sn).filter((it) => is(it, MySqlTable)) as MySqlTable[]; + + const serialized1 = generateMySqlSnapshot(tables); + + const { version: v1, dialect: d1, ...rest1 } = serialized1; + + const sch1 = { + version: '5', + dialect: 'mysql', + id: '0', + prevId: '0', + ...rest1, + } as const; + + const sn1 = squashMysqlScheme(sch1); + + const validatedPrev = mysqlSchema.parse(dryRun); + const validatedCur = mysqlSchema.parse(sch1); + + const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( + dryRun, + sn1, + testTablesResolver(new Set()), + testColumnsResolver(new Set()), + validatedPrev, + validatedCur, + ); + return { sqlStatements, statements }; +}; + +export const diffTestSchemasMysql = async ( + left: MysqlSchema, + right: MysqlSchema, + renamesArr: string[], + cli: boolean = false, +) => { + const leftTables = Object.values(left).filter((it) => is(it, MySqlTable)) as MySqlTable[]; + + const rightTables = Object.values(right).filter((it) => is(it, MySqlTable)) as MySqlTable[]; + + const serialized1 = generateMySqlSnapshot(leftTables); + const serialized2 = generateMySqlSnapshot(rightTables); + + const { version: v1, dialect: d1, ...rest1 } = serialized1; + const { version: v2, dialect: d2, ...rest2 } = serialized2; + + const sch1 = { + version: '5', + dialect: 'mysql', + id: '0', + prevId: '0', + ...rest1, + } as const; + + const sch2 = { + version: '5', + dialect: 'mysql', + id: '0', + prevId: '0', + ...rest2, + } as const; + + const sn1 = squashMysqlScheme(sch1); + const sn2 = squashMysqlScheme(sch2); + + const validatedPrev = mysqlSchema.parse(sch1); + const validatedCur = mysqlSchema.parse(sch2); + + const renames = new Set(renamesArr); + + if (!cli) { + const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( + sn1, + sn2, + testTablesResolver(renames), + testColumnsResolver(renames), + validatedPrev, + validatedCur, + ); + return { sqlStatements, statements }; + } + + const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( + sn1, + sn2, + tablesResolver, + columnsResolver, + validatedPrev, + validatedCur, + ); + return { sqlStatements, statements }; +}; + +export const diffTestSchemasSingleStore = async ( + left: SinglestoreSchema, + right: SinglestoreSchema, + renamesArr: string[], + cli: boolean = false, +) => { + const leftTables = Object.values(left).filter((it) => is(it, SingleStoreTable)) as SingleStoreTable[]; + + const rightTables = Object.values(right).filter((it) => is(it, SingleStoreTable)) as SingleStoreTable[]; + + const serialized1 = generateSingleStoreSnapshot(leftTables); + const serialized2 = generateSingleStoreSnapshot(rightTables); + + const { version: v1, dialect: d1, ...rest1 } = serialized1; + const { version: v2, dialect: d2, ...rest2 } = serialized2; + + const sch1 = { + version: '1', + dialect: 'singlestore', + id: '0', + prevId: '0', + ...rest1, + } as const; + + const sch2 = { + version: '1', + dialect: 'singlestore', + id: '0', + prevId: '0', + ...rest2, + } as const; + + const sn1 = squashSingleStoreScheme(sch1); + const sn2 = squashSingleStoreScheme(sch2); + + const validatedPrev = singlestoreSchema.parse(sch1); + const validatedCur = singlestoreSchema.parse(sch2); + + const renames = new Set(renamesArr); + + if (!cli) { + const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff( + sn1, + sn2, + testTablesResolver(renames), + testColumnsResolver(renames), + validatedPrev, + validatedCur, + ); + return { sqlStatements, statements }; + } + + const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff( + sn1, + sn2, + tablesResolver, + columnsResolver, + validatedPrev, + validatedCur, + ); + return { sqlStatements, statements }; +}; + +export const applySingleStoreDiffs = async (sn: SingleStoreSchema) => { + const dryRun = { + version: '1', + dialect: 'singlestore', + id: '0', + prevId: '0', + tables: {}, + enums: {}, + schemas: {}, + _meta: { + schemas: {}, + tables: {}, + columns: {}, + }, + } as const; + + const tables = Object.values(sn).filter((it) => is(it, SingleStoreTable)) as SingleStoreTable[]; + + const serialized1 = generateSingleStoreSnapshot(tables); + + const { version: v1, dialect: d1, ...rest1 } = serialized1; + + const sch1 = { + version: '1', + dialect: 'singlestore', + id: '0', + prevId: '0', + ...rest1, + } as const; + + const sn1 = squashSingleStoreScheme(sch1); + + const validatedPrev = singlestoreSchema.parse(dryRun); + const validatedCur = singlestoreSchema.parse(sch1); + + const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff( + dryRun, + sn1, + testTablesResolver(new Set()), + testColumnsResolver(new Set()), + validatedPrev, + validatedCur, + ); + return { sqlStatements, statements }; +}; + +export const diffTestSchemasPushSqlite = async ( + client: Database, + left: SqliteSchema, + right: SqliteSchema, + renamesArr: string[], + cli: boolean = false, +) => { + const { sqlStatements } = await applySqliteDiffs(left, 'push'); + for (const st of sqlStatements) { + client.exec(st); + } + // do introspect into PgSchemaInternal + const introspectedSchema = await fromSqliteDatabase( + { + query: async (sql: string, params: any[] = []) => { + return client.prepare(sql).bind(params).all() as T[]; + }, + run: async (query: string) => { + client.prepare(query).run(); + }, + }, + undefined, + ); + + const leftTables = Object.values(right).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; + + const serialized2 = generateSqliteSnapshot(leftTables); + + const { version: v1, dialect: d1, ...rest1 } = introspectedSchema; + const { version: v2, dialect: d2, ...rest2 } = serialized2; + + const sch1 = { + version: '6', + dialect: 'sqlite', + id: '0', + prevId: '0', + ...rest1, + } as const; + + const sch2 = { + version: '6', + dialect: 'sqlite', + id: '0', + prevId: '0', + ...rest2, + } as const; + + const sn1 = squashSqliteScheme(sch1, 'push'); + const sn2 = squashSqliteScheme(sch2, 'push'); + + const renames = new Set(renamesArr); + + if (!cli) { + const { sqlStatements, statements, _meta } = await applySqliteSnapshotsDiff( + sn1, + sn2, + testTablesResolver(renames), + testColumnsResolver(renames), + sch1, + sch2, + 'push', + ); + + const { statementsToExecute } = await logSuggestionsAndReturn( + { + query: async (sql: string, params: any[] = []) => { + return client.prepare(sql).bind(params).all() as T[]; + }, + run: async (query: string) => { + client.prepare(query).run(); + }, + }, + statements, + sn1, + sn2, + _meta!, + ); + + return { sqlStatements: statementsToExecute, statements }; + } else { + const { sqlStatements, statements } = await applySqliteSnapshotsDiff( + sn1, + sn2, + tablesResolver, + columnsResolver, + sch1, + sch2, + 'push', + ); + return { sqlStatements, statements }; + } +}; + +export const applySqliteDiffs = async ( + sn: SqliteSchema, + action?: 'push' | undefined, +) => { + const dryRun = { + version: '6', + dialect: 'sqlite', + id: '0', + prevId: '0', + tables: {}, + enums: {}, + schemas: {}, + _meta: { + schemas: {}, + tables: {}, + columns: {}, + }, + } as const; + + const tables = Object.values(sn).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; + + const serialized1 = generateSqliteSnapshot(tables); + + const { version: v1, dialect: d1, ...rest1 } = serialized1; + + const sch1 = { + version: '6', + dialect: 'sqlite', + id: '0', + prevId: '0', + ...rest1, + } as const; + + const sn1 = squashSqliteScheme(sch1, action); + + const { sqlStatements, statements } = await applySqliteSnapshotsDiff( + dryRun, + sn1, + testTablesResolver(new Set()), + testColumnsResolver(new Set()), + dryRun, + sch1, + action, + ); + + return { sqlStatements, statements }; +}; + +export const diffTestSchemasSqlite = async ( + left: SqliteSchema, + right: SqliteSchema, + renamesArr: string[], + cli: boolean = false, +) => { + const leftTables = Object.values(left).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; + + const rightTables = Object.values(right).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; + + const serialized1 = generateSqliteSnapshot(leftTables); + const serialized2 = generateSqliteSnapshot(rightTables); + + const { version: v1, dialect: d1, ...rest1 } = serialized1; + const { version: v2, dialect: d2, ...rest2 } = serialized2; + + const sch1 = { + version: '6', + dialect: 'sqlite', + id: '0', + prevId: '0', + ...rest1, + } as const; + + const sch2 = { + version: '6', + dialect: 'sqlite', + id: '0', + prevId: '0', + ...rest2, + } as const; + + const sn1 = squashSqliteScheme(sch1); + const sn2 = squashSqliteScheme(sch2); + + const renames = new Set(renamesArr); + + if (!cli) { + const { sqlStatements, statements } = await applySqliteSnapshotsDiff( + sn1, + sn2, + testTablesResolver(renames), + testColumnsResolver(renames), + sch1, + sch2, + ); + return { sqlStatements, statements }; + } + + const { sqlStatements, statements } = await applySqliteSnapshotsDiff( + sn1, + sn2, + tablesResolver, + columnsResolver, + sch1, + sch2, + ); + return { sqlStatements, statements }; +}; + +// --- Introspect to file helpers --- + +export const introspectPgToFile = async ( + client: PGlite, + initSchema: PostgresSchema, + testName: string, + schemas: string[] = ['public'], +) => { + // put in db + const { sqlStatements } = await applyPgDiffs(initSchema); + for (const st of sqlStatements) { + await client.query(st); + } + + // introspect to schema + const introspectedSchema = await fromDatabase( + { + query: async (query: string, values?: any[] | undefined) => { + const res = await client.query(query, values); + return res.rows as any[]; + }, + }, + undefined, + schemas, + ); + + const file = schemaToTypeScript(introspectedSchema, 'camel'); + + fs.writeFileSync(`tests/introspect/${testName}.ts`, file.file); + + const response = await prepareFromPgImports([ + `tests/introspect/${testName}.ts`, + ]); + + const afterFileImports = generatePgSnapshot( + response.tables, + response.enums, + response.schemas, + response.sequences, + ); + + const { version: v2, dialect: d2, ...rest2 } = afterFileImports; + + const sch2 = { + version: '7', + dialect: 'postgresql', + id: '0', + prevId: '0', + ...rest2, + } as const; + + const sn2AfterIm = squashPgScheme(sch2); + const validatedCurAfterImport = pgSchema.parse(sch2); + + const leftTables = Object.values(initSchema).filter((it) => is(it, PgTable)) as PgTable[]; + + const leftSchemas = Object.values(initSchema).filter((it) => is(it, PgSchema)) as PgSchema[]; + + const leftEnums = Object.values(initSchema).filter((it) => isPgEnum(it)) as PgEnum[]; + + const leftSequences = Object.values(initSchema).filter((it) => isPgSequence(it)) as PgSequence[]; + + const initSnapshot = generatePgSnapshot( + leftTables, + leftEnums, + leftSchemas, + leftSequences, + ); + + const { version: initV, dialect: initD, ...initRest } = initSnapshot; + + const initSch = { + version: '7', + dialect: 'postgresql', + id: '0', + prevId: '0', + ...initRest, + } as const; + + const initSn = squashPgScheme(initSch); + const validatedCur = pgSchema.parse(initSch); + + const { + sqlStatements: afterFileSqlStatements, + statements: afterFileStatements, + } = await applyPgSnapshotsDiff( + sn2AfterIm, + initSn, + testSchemasResolver(new Set()), + testEnumsResolver(new Set()), + testSequencesResolver(new Set()), + testTablesResolver(new Set()), + testColumnsResolver(new Set()), + validatedCurAfterImport, + validatedCur, + ); + + fs.rmSync(`tests/introspect/${testName}.ts`); + + return { + sqlStatements: afterFileSqlStatements, + statements: afterFileStatements, + }; +}; + +export const introspectMySQLToFile = async ( + client: Connection, + initSchema: MysqlSchema, + testName: string, + schema: string, +) => { + // put in db + const { sqlStatements } = await applyMySqlDiffs(initSchema); + for (const st of sqlStatements) { + await client.query(st); + } + + // introspect to schema + const introspectedSchema = await fromMySqlDatabase( + { + query: async (sql: string, params?: any[] | undefined) => { + const res = await client.execute(sql, params); + return res[0] as any; + }, + }, + schema, + ); + + const file = schemaToTypeScriptMySQL(introspectedSchema, 'camel'); + + fs.writeFileSync(`tests/introspect/mysql/${testName}.ts`, file.file); + + const response = await prepareFromMySqlImports([ + `tests/introspect/mysql/${testName}.ts`, + ]); + + const afterFileImports = generateMySqlSnapshot(response.tables); + + const { version: v2, dialect: d2, ...rest2 } = afterFileImports; + + const sch2 = { + version: '5', + dialect: 'mysql', + id: '0', + prevId: '0', + ...rest2, + } as const; + + const sn2AfterIm = squashMysqlScheme(sch2); + const validatedCurAfterImport = mysqlSchema.parse(sch2); + + const leftTables = Object.values(initSchema).filter((it) => is(it, MySqlTable)) as MySqlTable[]; + + const initSnapshot = generateMySqlSnapshot(leftTables); + + const { version: initV, dialect: initD, ...initRest } = initSnapshot; + + const initSch = { + version: '5', + dialect: 'mysql', + id: '0', + prevId: '0', + ...initRest, + } as const; + + const initSn = squashMysqlScheme(initSch); + const validatedCur = mysqlSchema.parse(initSch); + + const { + sqlStatements: afterFileSqlStatements, + statements: afterFileStatements, + } = await applyMysqlSnapshotsDiff( + sn2AfterIm, + initSn, + testTablesResolver(new Set()), + testColumnsResolver(new Set()), + validatedCurAfterImport, + validatedCur, + ); + + fs.rmSync(`tests/introspect/mysql/${testName}.ts`); + + return { + sqlStatements: afterFileSqlStatements, + statements: afterFileStatements, + }; +}; + +export const introspectSingleStoreToFile = async ( + client: Connection, + initSchema: SingleStoreSchema, + testName: string, + schema: string, +) => { + // put in db + const { sqlStatements } = await applySingleStoreDiffs(initSchema); + for (const st of sqlStatements) { + await client.query(st); + } + + // introspect to schema + const introspectedSchema = await fromSingleStoreDatabase( + { + query: async (sql: string, params?: any[] | undefined) => { + const res = await client.execute(sql, params); + return res[0] as any; + }, + }, + schema, + ); + + const file = schemaToTypeScriptSingleStore(introspectedSchema, 'camel'); + + fs.writeFileSync(`tests/introspect/singlestore/${testName}.ts`, file.file); + + const response = await prepareFromSingleStoreImports([ + `tests/introspect/singlestore/${testName}.ts`, + ]); + + const afterFileImports = generateSingleStoreSnapshot(response.tables); + + const { version: v2, dialect: d2, ...rest2 } = afterFileImports; + + const sch2 = { + version: '1', + dialect: 'singlestore', + id: '0', + prevId: '0', + ...rest2, + } as const; + + const sn2AfterIm = squashSingleStoreScheme(sch2); + const validatedCurAfterImport = singlestoreSchema.parse(sch2); + + const leftTables = Object.values(initSchema).filter((it) => is(it, SingleStoreTable)) as SingleStoreTable[]; + + const initSnapshot = generateSingleStoreSnapshot(leftTables); + + const { version: initV, dialect: initD, ...initRest } = initSnapshot; + + const initSch = { + version: '1', + dialect: 'singlestore', + id: '0', + prevId: '0', + ...initRest, + } as const; + + const initSn = squashSingleStoreScheme(initSch); + const validatedCur = singlestoreSchema.parse(initSch); + + const { + sqlStatements: afterFileSqlStatements, + statements: afterFileStatements, + } = await applySingleStoreSnapshotsDiff( + sn2AfterIm, + initSn, + testTablesResolver(new Set()), + testColumnsResolver(new Set()), + validatedCurAfterImport, + validatedCur, + ); + + fs.rmSync(`tests/introspect/singlestore/${testName}.ts`); + + return { + sqlStatements: afterFileSqlStatements, + statements: afterFileStatements, + }; +}; + +export const introspectSQLiteToFile = async ( + client: Database, + initSchema: SqliteSchema, + testName: string, +) => { + // put in db + const { sqlStatements } = await applySqliteDiffs(initSchema); + for (const st of sqlStatements) { + client.exec(st); + } + + // introspect to schema + const introspectedSchema = await fromSqliteDatabase( + { + query: async (sql: string, params: any[] = []) => { + return client.prepare(sql).bind(params).all() as T[]; + }, + run: async (query: string) => { + client.prepare(query).run(); + }, + }, + undefined, + ); + + const file = schemaToTypeScriptSQLite(introspectedSchema, 'camel'); + + fs.writeFileSync(`tests/introspect/sqlite/${testName}.ts`, file.file); + + const response = await prepareFromSqliteImports([ + `tests/introspect/sqlite/${testName}.ts`, + ]); + + const afterFileImports = generateSqliteSnapshot(response.tables); + + const { version: v2, dialect: d2, ...rest2 } = afterFileImports; + + const sch2 = { + version: '6', + dialect: 'sqlite', + id: '0', + prevId: '0', + ...rest2, + } as const; + + const sn2AfterIm = squashSqliteScheme(sch2); + const validatedCurAfterImport = sqliteSchema.parse(sch2); + + const leftTables = Object.values(initSchema).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; + + const initSnapshot = generateSqliteSnapshot(leftTables); + + const { version: initV, dialect: initD, ...initRest } = initSnapshot; + + const initSch = { + version: '6', + dialect: 'sqlite', + id: '0', + prevId: '0', + ...initRest, + } as const; + + const initSn = squashSqliteScheme(initSch); + const validatedCur = sqliteSchema.parse(initSch); + + const { + sqlStatements: afterFileSqlStatements, + statements: afterFileStatements, + } = await applySqliteSnapshotsDiff( + sn2AfterIm, + initSn, + testTablesResolver(new Set()), + testColumnsResolver(new Set()), + validatedCurAfterImport, + validatedCur, + ); + + fs.rmSync(`tests/introspect/sqlite/${testName}.ts`); + + return { + sqlStatements: afterFileSqlStatements, + statements: afterFileStatements, + }; +}; diff --git a/drizzle-kit/tests/singlestore-generated.test.ts b/drizzle-kit/tests/singlestore-generated.test.ts new file mode 100644 index 000000000..8944f3b21 --- /dev/null +++ b/drizzle-kit/tests/singlestore-generated.test.ts @@ -0,0 +1,1290 @@ +import { SQL, sql } from 'drizzle-orm'; +import { int, singlestoreTable, text } from 'drizzle-orm/singlestore-core'; +import { expect, test } from 'vitest'; +import { diffTestSchemasSingleStore } from './schemaDiffer'; + +test('generated as callback: add column with generated constraint', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + column: { + generated: { + as: "`users`.`name` || 'hello'", + type: 'stored', + }, + autoincrement: false, + name: 'gen_name', + notNull: false, + primaryKey: false, + type: 'text', + }, + schema: '', + tableName: 'users', + type: 'alter_table_add_column', + }, + ]); + expect(sqlStatements).toStrictEqual([ + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", + ]); +}); + +test('generated as callback: add generated constraint to an exisiting column as stored', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs((): SQL => sql`${from.users.name} || 'to add'`, { + mode: 'stored', + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'to add'", + type: 'stored', + }, + columnAutoIncrement: false, + columnName: 'gen_name', + columnNotNull: true, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + "ALTER TABLE `users` MODIFY COLUMN `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') STORED;", + ]); +}); + +test('generated as callback: add generated constraint to an exisiting column as virtual', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs((): SQL => sql`${from.users.name} || 'to add'`, { + mode: 'virtual', + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'to add'", + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: true, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') VIRTUAL;", + ]); +}); + +test('generated as callback: drop generated constraint as stored', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name} || 'to delete'`, + { mode: 'stored' }, + ), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + oldColumn: { + autoincrement: false, + generated: { + as: "`users`.`name` || 'to delete'", + type: 'stored', + }, + name: 'gen_name', + notNull: false, + onUpdate: undefined, + primaryKey: false, + type: 'text', + }, + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;', + ]); +}); + +test('generated as callback: drop generated constraint as virtual', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name} || 'to delete'`, + { mode: 'virtual' }, + ), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + oldColumn: { + autoincrement: false, + generated: { + as: "`users`.`name` || 'to delete'", + type: 'virtual', + }, + name: 'gen_name', + notNull: false, + onUpdate: undefined, + primaryKey: false, + type: 'text', + }, + tableName: 'users', + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + 'ALTER TABLE `users` ADD `gen_name` text;', + ]); +}); + +test('generated as callback: change generated constraint type from virtual to stored', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name}`, + { mode: 'virtual' }, + ), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'stored', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` drop column `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", + ]); +}); + +test('generated as callback: change generated constraint type from stored to virtual', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name}`, + ), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` drop column `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", + ]); +}); + +test('generated as callback: change generated constraint', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name}`, + ), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` drop column `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", + ]); +}); + +// --- + +test('generated as sql: add column with generated constraint', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\`users\`.\`name\` || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + column: { + generated: { + as: "`users`.`name` || 'hello'", + type: 'stored', + }, + autoincrement: false, + name: 'gen_name', + notNull: false, + primaryKey: false, + type: 'text', + }, + schema: '', + tableName: 'users', + type: 'alter_table_add_column', + }, + ]); + expect(sqlStatements).toStrictEqual([ + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", + ]); +}); + +test('generated as sql: add generated constraint to an exisiting column as stored', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs(sql`\`users\`.\`name\` || 'to add'`, { + mode: 'stored', + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'to add'", + type: 'stored', + }, + columnAutoIncrement: false, + columnName: 'gen_name', + columnNotNull: true, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + "ALTER TABLE `users` MODIFY COLUMN `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') STORED;", + ]); +}); + +test('generated as sql: add generated constraint to an exisiting column as virtual', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs(sql`\`users\`.\`name\` || 'to add'`, { + mode: 'virtual', + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'to add'", + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: true, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') VIRTUAL;", + ]); +}); + +test('generated as sql: drop generated constraint as stored', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\`users\`.\`name\` || 'to delete'`, + { mode: 'stored' }, + ), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + oldColumn: { + autoincrement: false, + generated: { + as: "`users`.`name` || 'to delete'", + type: 'stored', + }, + name: 'gen_name', + notNull: false, + onUpdate: undefined, + primaryKey: false, + type: 'text', + }, + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;', + ]); +}); + +test('generated as sql: drop generated constraint as virtual', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\`users\`.\`name\` || 'to delete'`, + { mode: 'virtual' }, + ), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + oldColumn: { + autoincrement: false, + generated: { + as: "`users`.`name` || 'to delete'", + type: 'virtual', + }, + name: 'gen_name', + notNull: false, + onUpdate: undefined, + primaryKey: false, + type: 'text', + }, + tableName: 'users', + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + 'ALTER TABLE `users` ADD `gen_name` text;', + ]); +}); + +test('generated as sql: change generated constraint type from virtual to stored', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\`users\`.\`name\``, + { mode: 'virtual' }, + ), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\`users\`.\`name\` || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'stored', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` drop column `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", + ]); +}); + +test('generated as sql: change generated constraint type from stored to virtual', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\`users\`.\`name\``, + ), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\`users\`.\`name\` || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` drop column `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", + ]); +}); + +test('generated as sql: change generated constraint', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\`users\`.\`name\``, + ), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\`users\`.\`name\` || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` drop column `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", + ]); +}); + +// --- + +test('generated as string: add column with generated constraint', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `\`users\`.\`name\` || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + column: { + generated: { + as: "`users`.`name` || 'hello'", + type: 'stored', + }, + autoincrement: false, + name: 'gen_name', + notNull: false, + primaryKey: false, + type: 'text', + }, + schema: '', + tableName: 'users', + type: 'alter_table_add_column', + }, + ]); + expect(sqlStatements).toStrictEqual([ + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", + ]); +}); + +test('generated as string: add generated constraint to an exisiting column as stored', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs(`\`users\`.\`name\` || 'to add'`, { + mode: 'stored', + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'to add'", + type: 'stored', + }, + columnAutoIncrement: false, + columnName: 'gen_name', + columnNotNull: true, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + "ALTER TABLE `users` MODIFY COLUMN `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') STORED;", + ]); +}); + +test('generated as string: add generated constraint to an exisiting column as virtual', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs(`\`users\`.\`name\` || 'to add'`, { + mode: 'virtual', + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'to add'", + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: true, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') VIRTUAL;", + ]); +}); + +test('generated as string: drop generated constraint as stored', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `\`users\`.\`name\` || 'to delete'`, + { mode: 'stored' }, + ), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + oldColumn: { + autoincrement: false, + generated: { + as: "`users`.`name` || 'to delete'", + type: 'stored', + }, + name: 'gen_name', + notNull: false, + onUpdate: undefined, + primaryKey: false, + type: 'text', + }, + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;', + ]); +}); + +test('generated as string: drop generated constraint as virtual', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `\`users\`.\`name\` || 'to delete'`, + { mode: 'virtual' }, + ), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + oldColumn: { + autoincrement: false, + generated: { + as: "`users`.`name` || 'to delete'", + type: 'virtual', + }, + name: 'gen_name', + notNull: false, + onUpdate: undefined, + primaryKey: false, + type: 'text', + }, + tableName: 'users', + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + 'ALTER TABLE `users` ADD `gen_name` text;', + ]); +}); + +test('generated as string: change generated constraint type from virtual to stored', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs(`\`users\`.\`name\``, { + mode: 'virtual', + }), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `\`users\`.\`name\` || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'stored', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` drop column `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", + ]); +}); + +test('generated as string: change generated constraint type from stored to virtual', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs(`\`users\`.\`name\``), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `\`users\`.\`name\` || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` drop column `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", + ]); +}); + +test('generated as string: change generated constraint', async () => { + const from = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs(`\`users\`.\`name\``), + }), + }; + const to = { + users: singlestoreTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `\`users\`.\`name\` || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSingleStore( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: "`users`.`name` || 'hello'", + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` drop column `gen_name`;', + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", + ]); +}); diff --git a/drizzle-kit/tests/singlestore-schemas.test.ts b/drizzle-kit/tests/singlestore-schemas.test.ts new file mode 100644 index 000000000..db9fe0480 --- /dev/null +++ b/drizzle-kit/tests/singlestore-schemas.test.ts @@ -0,0 +1,155 @@ +import { singlestoreSchema, singlestoreTable } from 'drizzle-orm/singlestore-core'; +import { expect, test } from 'vitest'; +import { diffTestSchemasSingleStore } from './schemaDiffer'; + +// We don't manage databases(schemas) in MySQL with Drizzle Kit +test('add schema #1', async () => { + const to = { + devSchema: singlestoreSchema('dev'), + }; + + const { statements } = await diffTestSchemasSingleStore({}, to, []); + + expect(statements.length).toBe(0); +}); + +test('add schema #2', async () => { + const from = { + devSchema: singlestoreSchema('dev'), + }; + const to = { + devSchema: singlestoreSchema('dev'), + devSchema2: singlestoreSchema('dev2'), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, []); + + expect(statements.length).toBe(0); +}); + +test('delete schema #1', async () => { + const from = { + devSchema: singlestoreSchema('dev'), + }; + + const { statements } = await diffTestSchemasSingleStore(from, {}, []); + + expect(statements.length).toBe(0); +}); + +test('delete schema #2', async () => { + const from = { + devSchema: singlestoreSchema('dev'), + devSchema2: singlestoreSchema('dev2'), + }; + const to = { + devSchema: singlestoreSchema('dev'), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, []); + + expect(statements.length).toBe(0); +}); + +test('rename schema #1', async () => { + const from = { + devSchema: singlestoreSchema('dev'), + }; + const to = { + devSchema2: singlestoreSchema('dev2'), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, ['dev->dev2']); + + expect(statements.length).toBe(0); +}); + +test('rename schema #2', async () => { + const from = { + devSchema: singlestoreSchema('dev'), + devSchema1: singlestoreSchema('dev1'), + }; + const to = { + devSchema: singlestoreSchema('dev'), + devSchema2: singlestoreSchema('dev2'), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']); + + expect(statements.length).toBe(0); +}); + +test('add table to schema #1', async () => { + const dev = singlestoreSchema('dev'); + const from = {}; + const to = { + dev, + users: dev.table('users', {}), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']); + + expect(statements.length).toBe(0); +}); + +test('add table to schema #2', async () => { + const dev = singlestoreSchema('dev'); + const from = { dev }; + const to = { + dev, + users: dev.table('users', {}), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']); + + expect(statements.length).toBe(0); +}); + +test('add table to schema #3', async () => { + const dev = singlestoreSchema('dev'); + const from = { dev }; + const to = { + dev, + usersInDev: dev.table('users', {}), + users: singlestoreTable('users', {}), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: undefined, + columns: [], + uniqueConstraints: [], + internals: { + tables: {}, + indexes: {}, + }, + compositePkName: '', + compositePKs: [], + }); +}); + +test('remove table from schema #1', async () => { + const dev = singlestoreSchema('dev'); + const from = { dev, users: dev.table('users', {}) }; + const to = { + dev, + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']); + + expect(statements.length).toBe(0); +}); + +test('remove table from schema #2', async () => { + const dev = singlestoreSchema('dev'); + const from = { dev, users: dev.table('users', {}) }; + const to = {}; + + const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']); + + expect(statements.length).toBe(0); +}); diff --git a/drizzle-kit/tests/singlestore.test.ts b/drizzle-kit/tests/singlestore.test.ts new file mode 100644 index 000000000..63abf1755 --- /dev/null +++ b/drizzle-kit/tests/singlestore.test.ts @@ -0,0 +1,578 @@ +import { sql } from 'drizzle-orm'; +import { + index, + json, + primaryKey, + serial, + singlestoreSchema, + singlestoreTable, + text, + uniqueIndex, +} from 'drizzle-orm/singlestore-core'; +import { expect, test } from 'vitest'; +import { diffTestSchemasSingleStore } from './schemaDiffer'; + +test('add table #1', async () => { + const to = { + users: singlestoreTable('users', {}), + }; + + const { statements } = await diffTestSchemasSingleStore({}, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: undefined, + columns: [], + compositePKs: [], + internals: { + tables: {}, + indexes: {}, + }, + uniqueConstraints: [], + compositePkName: '', + }); +}); + +test('add table #2', async () => { + const to = { + users: singlestoreTable('users', { + id: serial('id').primaryKey(), + }), + }; + + const { statements } = await diffTestSchemasSingleStore({}, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: undefined, + columns: [ + { + name: 'id', + notNull: true, + primaryKey: false, + type: 'serial', + autoincrement: true, + }, + ], + compositePKs: ['users_id;id'], + compositePkName: 'users_id', + uniqueConstraints: [], + internals: { + tables: {}, + indexes: {}, + }, + }); +}); + +test('add table #3', async () => { + const to = { + users: singlestoreTable( + 'users', + { + id: serial('id'), + }, + (t) => { + return { + pk: primaryKey({ + name: 'users_pk', + columns: [t.id], + }), + }; + }, + ), + }; + + const { statements } = await diffTestSchemasSingleStore({}, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: undefined, + columns: [ + { + name: 'id', + notNull: true, + primaryKey: false, + type: 'serial', + autoincrement: true, + }, + ], + compositePKs: ['users_pk;id'], + uniqueConstraints: [], + compositePkName: 'users_pk', + internals: { + tables: {}, + indexes: {}, + }, + }); +}); + +test('add table #4', async () => { + const to = { + users: singlestoreTable('users', {}), + posts: singlestoreTable('posts', {}), + }; + + const { statements } = await diffTestSchemasSingleStore({}, to, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: undefined, + columns: [], + internals: { + tables: {}, + indexes: {}, + }, + compositePKs: [], + uniqueConstraints: [], + compositePkName: '', + }); + expect(statements[1]).toStrictEqual({ + type: 'create_table', + tableName: 'posts', + schema: undefined, + columns: [], + compositePKs: [], + internals: { + tables: {}, + indexes: {}, + }, + uniqueConstraints: [], + compositePkName: '', + }); +}); + +test('add table #5', async () => { + const schema = singlestoreSchema('folder'); + const from = { + schema, + }; + + const to = { + schema, + users: schema.table('users', {}), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, []); + + expect(statements.length).toBe(0); +}); + +test('add table #6', async () => { + const from = { + users1: singlestoreTable('users1', {}), + }; + + const to = { + users2: singlestoreTable('users2', {}), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users2', + schema: undefined, + columns: [], + internals: { + tables: {}, + indexes: {}, + }, + compositePKs: [], + uniqueConstraints: [], + compositePkName: '', + }); + expect(statements[1]).toStrictEqual({ + type: 'drop_table', + tableName: 'users1', + schema: undefined, + }); +}); + +test('add table #7', async () => { + const from = { + users1: singlestoreTable('users1', {}), + }; + + const to = { + users: singlestoreTable('users', {}), + users2: singlestoreTable('users2', {}), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, [ + 'public.users1->public.users2', + ]); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: undefined, + columns: [], + compositePKs: [], + uniqueConstraints: [], + internals: { + tables: {}, + indexes: {}, + }, + compositePkName: '', + }); + expect(statements[1]).toStrictEqual({ + type: 'rename_table', + tableNameFrom: 'users1', + tableNameTo: 'users2', + fromSchema: undefined, + toSchema: undefined, + }); +}); + +test('add schema + table #1', async () => { + const schema = singlestoreSchema('folder'); + + const to = { + schema, + users: schema.table('users', {}), + }; + + const { statements } = await diffTestSchemasSingleStore({}, to, []); + + expect(statements.length).toBe(0); +}); + +test('change schema with tables #1', async () => { + const schema = singlestoreSchema('folder'); + const schema2 = singlestoreSchema('folder2'); + const from = { + schema, + users: schema.table('users', {}), + }; + const to = { + schema2, + users: schema2.table('users', {}), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, [ + 'folder->folder2', + ]); + + expect(statements.length).toBe(0); +}); + +test('change table schema #1', async () => { + const schema = singlestoreSchema('folder'); + const from = { + schema, + users: singlestoreTable('users', {}), + }; + const to = { + schema, + users: schema.table('users', {}), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, [ + 'public.users->folder.users', + ]); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'drop_table', + tableName: 'users', + schema: undefined, + }); +}); + +test('change table schema #2', async () => { + const schema = singlestoreSchema('folder'); + const from = { + schema, + users: schema.table('users', {}), + }; + const to = { + schema, + users: singlestoreTable('users', {}), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, [ + 'folder.users->public.users', + ]); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: undefined, + columns: [], + uniqueConstraints: [], + compositePkName: '', + compositePKs: [], + internals: { + tables: {}, + indexes: {}, + }, + }); +}); + +test('change table schema #3', async () => { + const schema1 = singlestoreSchema('folder1'); + const schema2 = singlestoreSchema('folder2'); + const from = { + schema1, + schema2, + users: schema1.table('users', {}), + }; + const to = { + schema1, + schema2, + users: schema2.table('users', {}), + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, [ + 'folder1.users->folder2.users', + ]); + + expect(statements.length).toBe(0); +}); + +test('change table schema #4', async () => { + const schema1 = singlestoreSchema('folder1'); + const schema2 = singlestoreSchema('folder2'); + const from = { + schema1, + users: schema1.table('users', {}), + }; + const to = { + schema1, + schema2, // add schema + users: schema2.table('users', {}), // move table + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, [ + 'folder1.users->folder2.users', + ]); + + expect(statements.length).toBe(0); +}); + +test('change table schema #5', async () => { + const schema1 = singlestoreSchema('folder1'); + const schema2 = singlestoreSchema('folder2'); + const from = { + schema1, // remove schema + users: schema1.table('users', {}), + }; + const to = { + schema2, // add schema + users: schema2.table('users', {}), // move table + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, [ + 'folder1.users->folder2.users', + ]); + + expect(statements.length).toBe(0); +}); + +test('change table schema #5', async () => { + const schema1 = singlestoreSchema('folder1'); + const schema2 = singlestoreSchema('folder2'); + const from = { + schema1, + schema2, + users: schema1.table('users', {}), + }; + const to = { + schema1, + schema2, + users: schema2.table('users2', {}), // rename and move table + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, [ + 'folder1.users->folder2.users2', + ]); + + expect(statements.length).toBe(0); +}); + +test('change table schema #6', async () => { + const schema1 = singlestoreSchema('folder1'); + const schema2 = singlestoreSchema('folder2'); + const from = { + schema1, + users: schema1.table('users', {}), + }; + const to = { + schema2, // rename schema + users: schema2.table('users2', {}), // rename table + }; + + const { statements } = await diffTestSchemasSingleStore(from, to, [ + 'folder1->folder2', + 'folder2.users->folder2.users2', + ]); + + expect(statements.length).toBe(0); +}); + +test('add table #10', async () => { + const to = { + users: singlestoreTable('table', { + json: json('json').default({}), + }), + }; + + const { sqlStatements } = await diffTestSchemasSingleStore({}, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + "CREATE TABLE `table` (\n\t`json` json DEFAULT ('{}')\n);\n", + ); +}); + +test('add table #11', async () => { + const to = { + users: singlestoreTable('table', { + json: json('json').default([]), + }), + }; + + const { sqlStatements } = await diffTestSchemasSingleStore({}, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + "CREATE TABLE `table` (\n\t`json` json DEFAULT ('[]')\n);\n", + ); +}); + +test('add table #12', async () => { + const to = { + users: singlestoreTable('table', { + json: json('json').default([1, 2, 3]), + }), + }; + + const { sqlStatements } = await diffTestSchemasSingleStore({}, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + "CREATE TABLE `table` (\n\t`json` json DEFAULT ('[1,2,3]')\n);\n", + ); +}); + +test('add table #13', async () => { + const to = { + users: singlestoreTable('table', { + json: json('json').default({ key: 'value' }), + }), + }; + + const { sqlStatements } = await diffTestSchemasSingleStore({}, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + 'CREATE TABLE `table` (\n\t`json` json DEFAULT (\'{"key":"value"}\')\n);\n', + ); +}); + +test('add table #14', async () => { + const to = { + users: singlestoreTable('table', { + json: json('json').default({ + key: 'value', + arr: [1, 2, 3], + }), + }), + }; + + const { sqlStatements } = await diffTestSchemasSingleStore({}, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + 'CREATE TABLE `table` (\n\t`json` json DEFAULT (\'{"key":"value","arr":[1,2,3]}\')\n);\n', + ); +}); + +// TODO: add bson type tests + +// TODO: add blob type tests + +// TODO: add uuid type tests + +// TODO: add guid type tests + +// TODO: add vector type tests + +// TODO: add geopoint type tests + +test('drop index', async () => { + const from = { + users: singlestoreTable( + 'table', + { + name: text('name'), + }, + (t) => { + return { + idx: index('name_idx').on(t.name), + }; + }, + ), + }; + + const to = { + users: singlestoreTable('table', { + name: text('name'), + }), + }; + + const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe('DROP INDEX `name_idx` ON `table`;'); +}); + +test('add table with indexes', async () => { + const from = {}; + + const to = { + users: singlestoreTable( + 'users', + { + id: serial('id').primaryKey(), + name: text('name'), + email: text('email'), + }, + (t) => ({ + uniqueExpr: uniqueIndex('uniqueExpr').on(sql`(lower(${t.email}))`), + indexExpr: index('indexExpr').on(sql`(lower(${t.email}))`), + indexExprMultiple: index('indexExprMultiple').on( + sql`(lower(${t.email}))`, + sql`(lower(${t.email}))`, + ), + + uniqueCol: uniqueIndex('uniqueCol').on(t.email), + indexCol: index('indexCol').on(t.email), + indexColMultiple: index('indexColMultiple').on(t.email, t.email), + + indexColExpr: index('indexColExpr').on( + sql`(lower(${t.email}))`, + t.email, + ), + }), + ), + }; + + const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); + expect(sqlStatements.length).toBe(6); + expect(sqlStatements).toStrictEqual([ + `CREATE TABLE \`users\` (\n\t\`id\` serial AUTO_INCREMENT NOT NULL,\n\t\`name\` text,\n\t\`email\` text,\n\tCONSTRAINT \`users_id\` PRIMARY KEY(\`id\`),\n\tCONSTRAINT \`uniqueExpr\` UNIQUE((lower(\`email\`))),\n\tCONSTRAINT \`uniqueCol\` UNIQUE(\`email\`) +); +`, + 'CREATE INDEX `indexExpr` ON `users` ((lower(`email`)));', + 'CREATE INDEX `indexExprMultiple` ON `users` ((lower(`email`)),(lower(`email`)));', + 'CREATE INDEX `indexCol` ON `users` (`email`);', + 'CREATE INDEX `indexColMultiple` ON `users` (`email`,`email`);', + 'CREATE INDEX `indexColExpr` ON `users` ((lower(`email`)),`email`);', + ]); +}); diff --git a/drizzle-kit/tests/sqlite-columns.test.ts b/drizzle-kit/tests/sqlite-columns.test.ts new file mode 100644 index 000000000..8a258072a --- /dev/null +++ b/drizzle-kit/tests/sqlite-columns.test.ts @@ -0,0 +1,750 @@ +import { + AnySQLiteColumn, + foreignKey, + index, + int, + integer, + primaryKey, + sqliteTable, + text, +} from 'drizzle-orm/sqlite-core'; +import { expect, test } from 'vitest'; +import { diffTestSchemasSqlite } from './schemaDiffer'; + +test('create table with id', async (t) => { + const schema = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }), + }; + + const { statements } = await diffTestSchemasSqlite({}, schema, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'sqlite_create_table', + tableName: 'users', + columns: [ + { + name: 'id', + type: 'integer', + primaryKey: true, + notNull: true, + autoincrement: true, + }, + ], + uniqueConstraints: [], + referenceData: [], + compositePKs: [], + }); +}); + +test('add columns #1', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }), + }; + + const schema2 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name').notNull(), + }), + }; + + const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'sqlite_alter_table_add_column', + tableName: 'users', + referenceData: undefined, + column: { + name: 'name', + type: 'text', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + }); +}); + +test('add columns #2', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }), + }; + + const schema2 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name'), + email: text('email'), + }), + }; + + const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'sqlite_alter_table_add_column', + tableName: 'users', + referenceData: undefined, + column: { + name: 'name', + type: 'text', + primaryKey: false, + notNull: false, + autoincrement: false, // TODO: add column has autoincrement??? + }, + }); + expect(statements[1]).toStrictEqual({ + type: 'sqlite_alter_table_add_column', + tableName: 'users', + referenceData: undefined, + column: { + name: 'email', + type: 'text', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + }); +}); + +test('add columns #3', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }), + }; + + const schema2 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name1: text('name1').default('name'), + name2: text('name2').notNull(), + name3: text('name3').default('name').notNull(), + }), + }; + + const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); + + expect(statements.length).toBe(3); + expect(statements[0]).toStrictEqual({ + type: 'sqlite_alter_table_add_column', + tableName: 'users', + referenceData: undefined, + column: { + name: 'name1', + type: 'text', + primaryKey: false, + notNull: false, + autoincrement: false, // TODO: add column has autoincrement??? + default: "'name'", + }, + }); + expect(statements[1]).toStrictEqual({ + type: 'sqlite_alter_table_add_column', + tableName: 'users', + referenceData: undefined, + column: { + name: 'name2', + type: 'text', + primaryKey: false, + notNull: true, + autoincrement: false, // TODO: add column has autoincrement??? + }, + }); + expect(statements[2]).toStrictEqual({ + type: 'sqlite_alter_table_add_column', + tableName: 'users', + referenceData: undefined, + column: { + name: 'name3', + type: 'text', + primaryKey: false, + notNull: true, + autoincrement: false, // TODO: add column has autoincrement??? + default: "'name'", + }, + }); +}); + +test('add columns #4', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }), + }; + + const schema2 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name', { enum: ['one', 'two'] }), + }), + }; + + const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'sqlite_alter_table_add_column', + tableName: 'users', + referenceData: undefined, + column: { + name: 'name', + type: 'text', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + }); +}); + +test('add columns #5', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }), + }; + + const users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + reporteeId: int('report_to').references((): AnySQLiteColumn => users.id), + }); + + const schema2 = { + users, + }; + + const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); + + // TODO: Fix here + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'sqlite_alter_table_add_column', + tableName: 'users', + referenceData: 'users_report_to_users_id_fk;users;report_to;users;id;no action;no action', + column: { + name: 'report_to', + type: 'integer', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + }); +}); + +test('add columns #6', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: integer('id').primaryKey({ autoIncrement: true }), + name: text('name'), + email: text('email').unique().notNull(), + }), + }; + + const schema2 = { + users: sqliteTable('users', { + id: integer('id').primaryKey({ autoIncrement: true }), + name: text('name'), + email: text('email').unique().notNull(), + password: text('password').notNull(), + }), + }; + + const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'sqlite_alter_table_add_column', + tableName: 'users', + referenceData: undefined, + column: { + name: 'password', + type: 'text', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + }); +}); + +test('add index #1', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + reporteeId: int('report_to').references((): AnySQLiteColumn => users.id), + }), + }; + + const users = sqliteTable( + 'users', + { + id: int('id').primaryKey({ autoIncrement: true }), + reporteeId: int('report_to').references((): AnySQLiteColumn => users.id), + }, + (t) => { + return { + reporteeIdx: index('reportee_idx').on(t.reporteeId), + }; + }, + ); + + const schema2 = { + users, + }; + + const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_index', + tableName: 'users', + internal: { + indexes: {}, + }, + schema: '', + data: 'reportee_idx;report_to;false;', + }); +}); + +test('add foreign key #1', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + reporteeId: int('report_to'), + }), + }; + + const users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + reporteeId: int('report_to').references((): AnySQLiteColumn => users.id), + }); + + const schema2 = { + users, + }; + + const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_reference', + tableName: 'users', + schema: '', + data: 'users_report_to_users_id_fk;users;report_to;users;id;no action;no action', + }); +}); + +test('add foreign key #2', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + reporteeId: int('report_to'), + }), + }; + + const schema2 = { + users: sqliteTable( + 'users', + { + id: int('id').primaryKey({ autoIncrement: true }), + reporteeId: int('report_to'), + }, + (t) => { + return { + reporteeFk: foreignKey({ + columns: [t.reporteeId], + foreignColumns: [t.id], + name: 'reportee_fk', + }), + }; + }, + ), + }; + + const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_reference', + tableName: 'users', + schema: '', + data: 'reportee_fk;users;report_to;users;id;no action;no action', + }); +}); + +test('alter column change name #1', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name'), + }), + }; + + const schema2 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name1'), + }), + }; + + const { statements } = await diffTestSchemasSqlite(schema1, schema2, [ + 'public.users.name->public.users.name1', + ]); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_rename_column', + tableName: 'users', + schema: '', + oldColumnName: 'name', + newColumnName: 'name1', + }); +}); + +test('alter column change name #2', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name'), + }), + }; + + const schema2 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name1'), + email: text('email'), + }), + }; + + const { statements } = await diffTestSchemasSqlite(schema1, schema2, [ + 'public.users.name->public.users.name1', + ]); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_rename_column', + tableName: 'users', + schema: '', + oldColumnName: 'name', + newColumnName: 'name1', + }); + expect(statements[1]).toStrictEqual({ + type: 'sqlite_alter_table_add_column', + tableName: 'users', + referenceData: undefined, + column: { + name: 'email', + notNull: false, + primaryKey: false, + type: 'text', + autoincrement: false, + }, + }); +}); + +test('alter column change name #3', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name'), + email: text('email'), + }), + }; + + const schema2 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name1'), + }), + }; + + const { statements } = await diffTestSchemasSqlite(schema1, schema2, [ + 'public.users.name->public.users.name1', + ]); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_rename_column', + tableName: 'users', + schema: '', + oldColumnName: 'name', + newColumnName: 'name1', + }); + + expect(statements[1]).toStrictEqual({ + type: 'alter_table_drop_column', + tableName: 'users', + schema: '', + columnName: 'email', + }); +}); + +test('alter table add composite pk', async (t) => { + const schema1 = { + table: sqliteTable('table', { + id1: integer('id1'), + id2: integer('id2'), + }), + }; + + const schema2 = { + table: sqliteTable( + 'table', + { + id1: integer('id1'), + id2: integer('id2'), + }, + (t) => { + return { + pk: primaryKey({ columns: [t.id1, t.id2] }), + }; + }, + ), + }; + + const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_composite_pk', + tableName: 'table', + data: 'id1,id2', + }); +}); + +test('alter column drop not null', async (t) => { + const from = { + users: sqliteTable('table', { + name: text('name').notNull(), + }), + }; + + const to = { + users: sqliteTable('table', { + name: text('name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_alter_column_drop_notnull', + tableName: 'table', + columnName: 'name', + schema: '', + newDataType: 'text', + columnDefault: undefined, + columnOnUpdate: undefined, + columnNotNull: false, + columnAutoIncrement: false, + columnPk: false, + }); +}); + +test('alter column add not null', async (t) => { + const from = { + users: sqliteTable('table', { + name: text('name'), + }), + }; + + const to = { + users: sqliteTable('table', { + name: text('name').notNull(), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_alter_column_set_notnull', + tableName: 'table', + columnName: 'name', + schema: '', + newDataType: 'text', + columnDefault: undefined, + columnOnUpdate: undefined, + columnNotNull: true, + columnAutoIncrement: false, + columnPk: false, + }); +}); + +test('alter column add default', async (t) => { + const from = { + users: sqliteTable('table', { + name: text('name'), + }), + }; + + const to = { + users: sqliteTable('table', { + name: text('name').default('dan'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_alter_column_set_default', + tableName: 'table', + columnName: 'name', + schema: '', + newDataType: 'text', + columnNotNull: false, + columnOnUpdate: undefined, + columnAutoIncrement: false, + newDefaultValue: "'dan'", + columnPk: false, + }); +}); + +test('alter column drop default', async (t) => { + const from = { + users: sqliteTable('table', { + name: text('name').default('dan'), + }), + }; + + const to = { + users: sqliteTable('table', { + name: text('name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_alter_column_drop_default', + tableName: 'table', + columnName: 'name', + schema: '', + newDataType: 'text', + columnNotNull: false, + columnOnUpdate: undefined, + columnDefault: undefined, + columnAutoIncrement: false, + columnPk: false, + }); +}); + +test('alter column add default not null', async (t) => { + const from = { + users: sqliteTable('table', { + name: text('name'), + }), + }; + + const to = { + users: sqliteTable('table', { + name: text('name').notNull().default('dan'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + columnAutoIncrement: false, + columnName: 'name', + columnNotNull: true, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + newDefaultValue: "'dan'", + schema: '', + tableName: 'table', + type: 'alter_table_alter_column_set_default', + }); + + expect(statements[0]).toStrictEqual({ + columnAutoIncrement: false, + columnName: 'name', + columnNotNull: true, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + newDefaultValue: "'dan'", + schema: '', + tableName: 'table', + type: 'alter_table_alter_column_set_default', + }); +}); + +test('alter column drop default not null', async (t) => { + const from = { + users: sqliteTable('table', { + name: text('name').notNull().default('dan'), + }), + }; + + const to = { + users: sqliteTable('table', { + name: text('name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + columnAutoIncrement: false, + columnDefault: undefined, + columnName: 'name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'table', + type: 'alter_table_alter_column_drop_default', + }); + + expect(statements[0]).toStrictEqual({ + columnAutoIncrement: false, + columnDefault: undefined, + columnName: 'name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'table', + type: 'alter_table_alter_column_drop_default', + }); +}); diff --git a/drizzle-kit/tests/sqlite-generated.test.ts b/drizzle-kit/tests/sqlite-generated.test.ts new file mode 100644 index 000000000..749dde825 --- /dev/null +++ b/drizzle-kit/tests/sqlite-generated.test.ts @@ -0,0 +1,1720 @@ +// 1. add stored column to existing table - not supported + +// 2. add virtual column to existing table - supported + +// 3. create table with stored/virtual columns(pg, mysql, sqlite) +// 4. add stored generated to column -> not supported + +// 5. add virtual generated to column -> supported with drop+add column + +// 6. drop stored/virtual expression -> supported with drop+add column +// 7. alter generated expession -> stored not supported, virtual supported + +import { SQL, sql } from 'drizzle-orm'; +import { int, sqliteTable, text } from 'drizzle-orm/sqlite-core'; +import { expect, test } from 'vitest'; +import { diffTestSchemasSqlite } from './schemaDiffer'; + +// should generate 0 statements + warning/error in console +test('generated as callback: add column with stored generated constraint', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test('generated as callback: add column with virtual generated constraint', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + { mode: 'virtual' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + column: { + generated: { + as: '("name" || \'hello\')', + type: 'virtual', + }, + autoincrement: false, + name: 'gen_name', + notNull: false, + primaryKey: false, + type: 'text', + }, + referenceData: undefined, + tableName: 'users', + type: 'sqlite_alter_table_add_column', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') VIRTUAL;', + ]); +}); + +test('generated as callback: add generated constraint to an exisiting column as stored', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs((): SQL => sql`${from.users.name} || 'to add'`, { + mode: 'stored', + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test('generated as callback: add generated constraint to an exisiting column as virtual', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs((): SQL => sql`${from.users.name} || 'to add'`, { + mode: 'virtual', + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: '("name" || \'to add\')', + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: true, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("name" || \'to add\') VIRTUAL NOT NULL;', + ]); +}); + +test('generated as callback: drop generated constraint as stored', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name} || 'to delete'`, + { mode: 'stored' }, + ), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + 'ALTER TABLE `users` ADD `gen_name` text;', + ]); +}); + +test('generated as callback: drop generated constraint as virtual', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name} || 'to delete'`, + { mode: 'virtual' }, + ), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + 'ALTER TABLE `users` ADD `gen_name` text;', + ]); +}); + +// no way to do it +test('generated as callback: change generated constraint type from virtual to stored', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name}`, + { mode: 'virtual' }, + ), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test('generated as callback: change generated constraint type from stored to virtual', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name}`, + { mode: 'stored' }, + ), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + { mode: 'virtual' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: '("name" || \'hello\')', + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') VIRTUAL;', + ]); +}); + +// not supported +test('generated as callback: change stored generated constraint', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name}`, + { mode: 'stored' }, + ), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test('generated as callback: change virtual generated constraint', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name}`, + ), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: '("name" || \'hello\')', + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') VIRTUAL;', + ]); +}); + +test('generated as callback: add table with column with stored generated constraint', async () => { + const from = {}; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columns: [ + { + autoincrement: false, + name: 'id', + notNull: false, + primaryKey: false, + type: 'integer', + }, + { + autoincrement: false, + name: 'id2', + notNull: false, + primaryKey: false, + type: 'integer', + }, + { + autoincrement: false, + name: 'name', + notNull: false, + primaryKey: false, + type: 'text', + }, + { + autoincrement: false, + generated: { + as: '("name" || \'hello\')', + type: 'stored', + }, + name: 'gen_name', + notNull: false, + primaryKey: false, + type: 'text', + }, + ], + compositePKs: [], + referenceData: [], + tableName: 'users', + type: 'sqlite_create_table', + uniqueConstraints: [], + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'CREATE TABLE `users` (\n\t`id` integer,\n\t`id2` integer,\n\t`name` text,\n\t`gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') STORED\n);\n', + ]); +}); + +test('generated as callback: add table with column with virtual generated constraint', async () => { + const from = {}; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + { mode: 'virtual' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columns: [ + { + autoincrement: false, + name: 'id', + notNull: false, + primaryKey: false, + type: 'integer', + }, + { + autoincrement: false, + name: 'id2', + notNull: false, + primaryKey: false, + type: 'integer', + }, + { + autoincrement: false, + name: 'name', + notNull: false, + primaryKey: false, + type: 'text', + }, + { + autoincrement: false, + generated: { + as: '("name" || \'hello\')', + type: 'virtual', + }, + name: 'gen_name', + notNull: false, + primaryKey: false, + type: 'text', + }, + ], + compositePKs: [], + referenceData: [], + tableName: 'users', + type: 'sqlite_create_table', + uniqueConstraints: [], + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'CREATE TABLE `users` (\n\t`id` integer,\n\t`id2` integer,\n\t`name` text,\n\t`gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') VIRTUAL\n);\n', + ]); +}); + +// --- + +test('generated as sql: add column with stored generated constraint', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`"users"."name" || \'hello\' || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test('generated as sql: add column with virtual generated constraint', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`"users"."name" || \'hello\'`, + { mode: 'virtual' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + column: { + generated: { + as: '("users"."name" || \'hello\')', + type: 'virtual', + }, + autoincrement: false, + name: 'gen_name', + notNull: false, + primaryKey: false, + type: 'text', + }, + referenceData: undefined, + tableName: 'users', + type: 'sqlite_alter_table_add_column', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') VIRTUAL;', + ]); +}); + +test('generated as sql: add generated constraint to an exisiting column as stored', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs(sql`"users"."name" || 'to add'`, { + mode: 'stored', + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test('generated as sql: add generated constraint to an exisiting column as virtual', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs(sql`"users"."name" || 'to add'`, { + mode: 'virtual', + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: '("users"."name" || \'to add\')', + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: true, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("users"."name" || \'to add\') VIRTUAL NOT NULL;', + ]); +}); + +test('generated as sql: drop generated constraint as stored', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`"users"."name" || 'to delete'`, + { mode: 'stored' }, + ), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + 'ALTER TABLE `users` ADD `gen_name` text;', + ]); +}); + +test('generated as sql: drop generated constraint as virtual', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`"users"."name" || 'to delete'`, + { mode: 'virtual' }, + ), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + 'ALTER TABLE `users` ADD `gen_name` text;', + ]); +}); + +// no way to do it +test('generated as sql: change generated constraint type from virtual to stored', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs(sql`"users"."name"`, { + mode: 'virtual', + }), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`"users"."name" || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test('generated as sql: change generated constraint type from stored to virtual', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs(sql`"users"."name"`, { + mode: 'stored', + }), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`"users"."name" || 'hello'`, + { mode: 'virtual' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: '("users"."name" || \'hello\')', + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') VIRTUAL;', + ]); +}); + +// not supported +test('generated as sql: change stored generated constraint', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs(sql`"users"."name"`, { + mode: 'stored', + }), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`"users"."name" || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test('generated as sql: change virtual generated constraint', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs(sql`"users"."name"`), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`"users"."name" || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: '("users"."name" || \'hello\')', + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') VIRTUAL;', + ]); +}); + +test('generated as sql: add table with column with stored generated constraint', async () => { + const from = {}; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`"users"."name" || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columns: [ + { + autoincrement: false, + name: 'id', + notNull: false, + primaryKey: false, + type: 'integer', + }, + { + autoincrement: false, + name: 'id2', + notNull: false, + primaryKey: false, + type: 'integer', + }, + { + autoincrement: false, + name: 'name', + notNull: false, + primaryKey: false, + type: 'text', + }, + { + autoincrement: false, + generated: { + as: '("users"."name" || \'hello\')', + type: 'stored', + }, + name: 'gen_name', + notNull: false, + primaryKey: false, + type: 'text', + }, + ], + compositePKs: [], + referenceData: [], + tableName: 'users', + type: 'sqlite_create_table', + uniqueConstraints: [], + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'CREATE TABLE `users` (\n\t`id` integer,\n\t`id2` integer,\n\t`name` text,\n\t`gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') STORED\n);\n', + ]); +}); + +test('generated as sql: add table with column with virtual generated constraint', async () => { + const from = {}; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`"users"."name" || 'hello'`, + { mode: 'virtual' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columns: [ + { + autoincrement: false, + name: 'id', + notNull: false, + primaryKey: false, + type: 'integer', + }, + { + autoincrement: false, + name: 'id2', + notNull: false, + primaryKey: false, + type: 'integer', + }, + { + autoincrement: false, + name: 'name', + notNull: false, + primaryKey: false, + type: 'text', + }, + { + autoincrement: false, + generated: { + as: '("users"."name" || \'hello\')', + type: 'virtual', + }, + name: 'gen_name', + notNull: false, + primaryKey: false, + type: 'text', + }, + ], + compositePKs: [], + referenceData: [], + tableName: 'users', + type: 'sqlite_create_table', + uniqueConstraints: [], + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'CREATE TABLE `users` (\n\t`id` integer,\n\t`id2` integer,\n\t`name` text,\n\t`gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') VIRTUAL\n);\n', + ]); +}); + +// --- + +test('generated as string: add column with stored generated constraint', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `"users"."name" || \'hello\'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test('generated as string: add column with virtual generated constraint', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `"users"."name" || \'hello\'`, + { mode: 'virtual' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + column: { + generated: { + as: '("users"."name" || \'hello\')', + type: 'virtual', + }, + autoincrement: false, + name: 'gen_name', + notNull: false, + primaryKey: false, + type: 'text', + }, + referenceData: undefined, + tableName: 'users', + type: 'sqlite_alter_table_add_column', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') VIRTUAL;', + ]); +}); + +test('generated as string: add generated constraint to an exisiting column as stored', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs(`"users"."name" || 'to add'`, { + mode: 'stored', + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test('generated as string: add generated constraint to an exisiting column as virtual', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs(`"users"."name" || 'to add'`, { + mode: 'virtual', + }), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: '("users"."name" || \'to add\')', + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: true, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("users"."name" || \'to add\') VIRTUAL NOT NULL;', + ]); +}); + +test('generated as string: drop generated constraint as stored', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `"users"."name" || 'to delete'`, + { mode: 'stored' }, + ), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + 'ALTER TABLE `users` ADD `gen_name` text;', + ]); +}); + +test('generated as string: drop generated constraint as virtual', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `"users"."name" || 'to delete'`, + { mode: 'virtual' }, + ), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: undefined, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_drop_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + 'ALTER TABLE `users` ADD `gen_name` text;', + ]); +}); + +// no way to do it +test('generated as string: change generated constraint type from virtual to stored', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs(`"users"."name"`, { + mode: 'virtual', + }), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `"users"."name" || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test('generated as string: change generated constraint type from stored to virtual', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs(`"users"."name"`, { + mode: 'stored', + }), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `"users"."name" || 'hello'`, + { mode: 'virtual' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: '("users"."name" || \'hello\')', + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') VIRTUAL;', + ]); +}); + +// not supported +test('generated as string: change stored generated constraint', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs(`"users"."name"`, { + mode: 'stored', + }), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `"users"."name" || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test('generated as string: change virtual generated constraint', async () => { + const from = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs(`"users"."name"`), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `"users"."name" || 'hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columnAutoIncrement: false, + columnDefault: undefined, + columnGenerated: { + as: '("users"."name" || \'hello\')', + type: 'virtual', + }, + columnName: 'gen_name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_alter_generated', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', + 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') VIRTUAL;', + ]); +}); + +test('generated as string: add table with column with stored generated constraint', async () => { + const from = {}; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `"users"."name" || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columns: [ + { + autoincrement: false, + name: 'id', + notNull: false, + primaryKey: false, + type: 'integer', + }, + { + autoincrement: false, + name: 'id2', + notNull: false, + primaryKey: false, + type: 'integer', + }, + { + autoincrement: false, + name: 'name', + notNull: false, + primaryKey: false, + type: 'text', + }, + { + autoincrement: false, + generated: { + as: '("users"."name" || \'hello\')', + type: 'stored', + }, + name: 'gen_name', + notNull: false, + primaryKey: false, + type: 'text', + }, + ], + compositePKs: [], + referenceData: [], + tableName: 'users', + type: 'sqlite_create_table', + uniqueConstraints: [], + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'CREATE TABLE `users` (\n\t`id` integer,\n\t`id2` integer,\n\t`name` text,\n\t`gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') STORED\n);\n', + ]); +}); + +test('generated as string: add table with column with virtual generated constraint', async () => { + const from = {}; + const to = { + users: sqliteTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `"users"."name" || 'hello'`, + { mode: 'virtual' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite( + from, + to, + [], + ); + + expect(statements).toStrictEqual([ + { + columns: [ + { + autoincrement: false, + name: 'id', + notNull: false, + primaryKey: false, + type: 'integer', + }, + { + autoincrement: false, + name: 'id2', + notNull: false, + primaryKey: false, + type: 'integer', + }, + { + autoincrement: false, + name: 'name', + notNull: false, + primaryKey: false, + type: 'text', + }, + { + autoincrement: false, + generated: { + as: '("users"."name" || \'hello\')', + type: 'virtual', + }, + name: 'gen_name', + notNull: false, + primaryKey: false, + type: 'text', + }, + ], + compositePKs: [], + referenceData: [], + tableName: 'users', + type: 'sqlite_create_table', + uniqueConstraints: [], + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'CREATE TABLE `users` (\n\t`id` integer,\n\t`id2` integer,\n\t`name` text,\n\t`gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') VIRTUAL\n);\n', + ]); +}); diff --git a/drizzle-kit/tests/sqlite-tables.test.ts b/drizzle-kit/tests/sqlite-tables.test.ts new file mode 100644 index 000000000..d7781f150 --- /dev/null +++ b/drizzle-kit/tests/sqlite-tables.test.ts @@ -0,0 +1,399 @@ +import { sql } from 'drizzle-orm'; +import { AnySQLiteColumn, index, int, primaryKey, sqliteTable, text, uniqueIndex } from 'drizzle-orm/sqlite-core'; +import { expect, test } from 'vitest'; +import { diffTestSchemasSqlite } from './schemaDiffer'; + +test('add table #1', async () => { + const to = { + users: sqliteTable('users', {}), + }; + + const { statements } = await diffTestSchemasSqlite({}, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'sqlite_create_table', + tableName: 'users', + columns: [], + compositePKs: [], + uniqueConstraints: [], + referenceData: [], + }); +}); + +test('add table #2', async () => { + const to = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }), + }; + + const { statements } = await diffTestSchemasSqlite({}, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'sqlite_create_table', + tableName: 'users', + columns: [ + { + name: 'id', + notNull: true, + primaryKey: true, + type: 'integer', + autoincrement: true, + }, + ], + compositePKs: [], + referenceData: [], + uniqueConstraints: [], + }); +}); + +test('add table #3', async () => { + const to = { + users: sqliteTable( + 'users', + { + id: int('id'), + }, + (t) => { + return { + pk: primaryKey({ + name: 'users_pk', + columns: [t.id], + }), + }; + }, + ), + }; + + const { statements } = await diffTestSchemasSqlite({}, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'sqlite_create_table', + tableName: 'users', + columns: [ + { + name: 'id', + notNull: false, + primaryKey: true, + type: 'integer', + autoincrement: false, + }, + ], + compositePKs: [], + uniqueConstraints: [], + referenceData: [], + }); +}); + +test('add table #4', async () => { + const to = { + users: sqliteTable('users', {}), + posts: sqliteTable('posts', {}), + }; + + const { statements } = await diffTestSchemasSqlite({}, to, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'sqlite_create_table', + tableName: 'users', + columns: [], + compositePKs: [], + uniqueConstraints: [], + referenceData: [], + }); + expect(statements[1]).toStrictEqual({ + type: 'sqlite_create_table', + tableName: 'posts', + columns: [], + compositePKs: [], + uniqueConstraints: [], + referenceData: [], + }); +}); + +test('add table #5', async () => { + // no schemas in sqlite +}); + +test('add table #6', async () => { + const from = { + users1: sqliteTable('users1', {}), + }; + + const to = { + users2: sqliteTable('users2', {}), + }; + + const { statements } = await diffTestSchemasSqlite(from, to, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'sqlite_create_table', + tableName: 'users2', + columns: [], + compositePKs: [], + uniqueConstraints: [], + referenceData: [], + }); + expect(statements[1]).toStrictEqual({ + type: 'drop_table', + tableName: 'users1', + schema: undefined, + }); +}); + +test('add table #7', async () => { + const from = { + users1: sqliteTable('users1', {}), + }; + + const to = { + users: sqliteTable('users', {}), + users2: sqliteTable('users2', {}), + }; + + const { statements } = await diffTestSchemasSqlite(from, to, [ + 'public.users1->public.users2', + ]); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'sqlite_create_table', + tableName: 'users', + columns: [], + compositePKs: [], + uniqueConstraints: [], + referenceData: [], + }); + expect(statements[1]).toStrictEqual({ + type: 'rename_table', + tableNameFrom: 'users1', + tableNameTo: 'users2', + fromSchema: undefined, + toSchema: undefined, + }); +}); + +test('add table #8', async () => { + const users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + reporteeId: int('reportee_id').references((): AnySQLiteColumn => users.id), + }); + + const to = { + users, + }; + + const { statements } = await diffTestSchemasSqlite({}, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'sqlite_create_table', + tableName: 'users', + columns: [ + { + autoincrement: true, + name: 'id', + notNull: true, + primaryKey: true, + type: 'integer', + }, + { + autoincrement: false, + name: 'reportee_id', + notNull: false, + primaryKey: false, + type: 'integer', + }, + ], + compositePKs: [], + uniqueConstraints: [], + referenceData: [ + { + columnsFrom: ['reportee_id'], + columnsTo: ['id'], + name: 'users_reportee_id_users_id_fk', + onDelete: 'no action', + onUpdate: 'no action', + tableFrom: 'users', + tableTo: 'users', + }, + ], + }); +}); + +test('add table #9', async () => { + const to = { + users: sqliteTable( + 'users', + { + id: int('id').primaryKey({ autoIncrement: true }), + reporteeId: int('reportee_id'), + }, + (t) => { + return { + reporteeIdx: index('reportee_idx').on(t.reporteeId), + }; + }, + ), + }; + + const { statements } = await diffTestSchemasSqlite({}, to, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'sqlite_create_table', + tableName: 'users', + columns: [ + { + autoincrement: true, + name: 'id', + notNull: true, + primaryKey: true, + type: 'integer', + }, + { + autoincrement: false, + name: 'reportee_id', + notNull: false, + primaryKey: false, + type: 'integer', + }, + ], + compositePKs: [], + uniqueConstraints: [], + referenceData: [], + }); + + expect(statements[1]).toStrictEqual({ + type: 'create_index', + tableName: 'users', + internal: { + indexes: {}, + }, + schema: undefined, + data: 'reportee_idx;reportee_id;false;', + }); +}); + +test('add table #10', async () => { + const to = { + users: sqliteTable('table', { + json: text('json', { mode: 'json' }).default({}), + }), + }; + + const { sqlStatements } = await diffTestSchemasSqlite({}, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + "CREATE TABLE `table` (\n\t`json` text DEFAULT '{}'\n);\n", + ); +}); + +test('add table #11', async () => { + const to = { + users: sqliteTable('table', { + json: text('json', { mode: 'json' }).default([]), + }), + }; + + const { sqlStatements } = await diffTestSchemasSqlite({}, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + "CREATE TABLE `table` (\n\t`json` text DEFAULT '[]'\n);\n", + ); +}); + +test('add table #12', async () => { + const to = { + users: sqliteTable('table', { + json: text('json', { mode: 'json' }).default([1, 2, 3]), + }), + }; + + const { sqlStatements } = await diffTestSchemasSqlite({}, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + "CREATE TABLE `table` (\n\t`json` text DEFAULT '[1,2,3]'\n);\n", + ); +}); + +test('add table #13', async () => { + const to = { + users: sqliteTable('table', { + json: text('json', { mode: 'json' }).default({ key: 'value' }), + }), + }; + + const { sqlStatements } = await diffTestSchemasSqlite({}, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + 'CREATE TABLE `table` (\n\t`json` text DEFAULT \'{"key":"value"}\'\n);\n', + ); +}); + +test('add table #14', async () => { + const to = { + users: sqliteTable('table', { + json: text('json', { mode: 'json' }).default({ + key: 'value', + arr: [1, 2, 3], + }), + }), + }; + + const { sqlStatements } = await diffTestSchemasSqlite({}, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + 'CREATE TABLE `table` (\n\t`json` text DEFAULT \'{"key":"value","arr":[1,2,3]}\'\n);\n', + ); +}); + +test('add table with indexes', async () => { + const from = {}; + + const to = { + users: sqliteTable( + 'users', + { + id: int('id').primaryKey(), + name: text('name'), + email: text('email'), + }, + (t) => ({ + uniqueExpr: uniqueIndex('uniqueExpr').on(sql`(lower(${t.email}))`), + indexExpr: index('indexExpr').on(sql`(lower(${t.email}))`), + indexExprMultiple: index('indexExprMultiple').on( + sql`(lower(${t.email}))`, + sql`(lower(${t.email}))`, + ), + + uniqueCol: uniqueIndex('uniqueCol').on(t.email), + indexCol: index('indexCol').on(t.email), + indexColMultiple: index('indexColMultiple').on(t.email, t.email), + + indexColExpr: index('indexColExpr').on( + sql`(lower(${t.email}))`, + t.email, + ), + }), + ), + }; + + const { sqlStatements } = await diffTestSchemasSqlite(from, to, []); + expect(sqlStatements.length).toBe(8); + expect(sqlStatements).toStrictEqual([ + 'CREATE TABLE `users` (\n\t`id` integer PRIMARY KEY NOT NULL,\n\t`name` text,\n\t`email` text\n);\n', + 'CREATE UNIQUE INDEX `uniqueExpr` ON `users` ((lower("email")));', + 'CREATE INDEX `indexExpr` ON `users` ((lower("email")));', + 'CREATE INDEX `indexExprMultiple` ON `users` ((lower("email")),(lower("email")));', + 'CREATE UNIQUE INDEX `uniqueCol` ON `users` (`email`);', + 'CREATE INDEX `indexCol` ON `users` (`email`);', + 'CREATE INDEX `indexColMultiple` ON `users` (`email`,`email`);', + 'CREATE INDEX `indexColExpr` ON `users` ((lower("email")),`email`);', + ]); +}); diff --git a/drizzle-kit/tests/test/sqlite.test.ts b/drizzle-kit/tests/test/sqlite.test.ts new file mode 100644 index 000000000..9a00e8def --- /dev/null +++ b/drizzle-kit/tests/test/sqlite.test.ts @@ -0,0 +1,39 @@ +import { int, sqliteTable, text } from 'drizzle-orm/sqlite-core'; +import { diffTestSchemasSqlite } from 'tests/schemaDiffer'; +import { expect } from 'vitest'; +import { DialectSuite, run } from '../common'; + +const sqliteSuite: DialectSuite = { + async columns1() { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }), + }; + + const schema2 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name'), + }), + }; + + const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'sqlite_alter_table_add_column', + tableName: 'users', + referenceData: undefined, + column: { + name: 'name', + type: 'text', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + }); + }, +}; + +run(sqliteSuite); diff --git a/drizzle-kit/tests/testmysql.ts b/drizzle-kit/tests/testmysql.ts new file mode 100644 index 000000000..092f0a9e1 --- /dev/null +++ b/drizzle-kit/tests/testmysql.ts @@ -0,0 +1,27 @@ +import { index, mysqlTable, text } from 'drizzle-orm/mysql-core'; +import { diffTestSchemasMysql } from './schemaDiffer'; + +const from = { + users: mysqlTable( + 'table', + { + name: text('name'), + }, + (t) => { + return { + idx: index('name_idx').on(t.name), + }; + }, + ), +}; + +const to = { + users: mysqlTable('table', { + name: text('name'), + }), +}; + +const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); + +console.log(statements); +console.log(sqlStatements); diff --git a/drizzle-kit/tests/testsinglestore.ts b/drizzle-kit/tests/testsinglestore.ts new file mode 100644 index 000000000..1dc97d9c3 --- /dev/null +++ b/drizzle-kit/tests/testsinglestore.ts @@ -0,0 +1,29 @@ +import { index, singlestoreTable, text } from 'drizzle-orm/singlestore-core'; +import { diffTestSchemasSingleStore } from './schemaDiffer'; + +const from = { + users: singlestoreTable( + 'table', + { + name: text('name'), + }, + (t) => { + return { + idx: index('name_idx').on(t.name), + }; + }, + ), +}; + +const to = { + users: singlestoreTable('table', { + name: text('name'), + }), +}; + +diffTestSchemasSingleStore(from, to, []).then((res) => { + const { statements, sqlStatements } = res; + + console.log(statements); + console.log(sqlStatements); +}); diff --git a/drizzle-kit/tests/testsqlite.ts b/drizzle-kit/tests/testsqlite.ts new file mode 100644 index 000000000..e68bbc195 --- /dev/null +++ b/drizzle-kit/tests/testsqlite.ts @@ -0,0 +1,19 @@ +import { sqliteTable, text } from 'drizzle-orm/sqlite-core'; +import { diffTestSchemasMysql, diffTestSchemasSqlite } from './schemaDiffer'; + +const from = { + users: sqliteTable('table', { + password: text('password'), + }), +}; + +const to = { + users: sqliteTable('table1', { + password_hash: text('password_hash'), + }), +}; + +const { statements, sqlStatements } = await diffTestSchemasSqlite(from, to, [], true); + +console.log(statements); +console.log(sqlStatements); diff --git a/drizzle-kit/tests/validations.test.ts b/drizzle-kit/tests/validations.test.ts new file mode 100644 index 000000000..8a64603bb --- /dev/null +++ b/drizzle-kit/tests/validations.test.ts @@ -0,0 +1,869 @@ +import { mysqlCredentials } from 'src/cli/validations/mysql'; +import { postgresCredentials } from 'src/cli/validations/postgres'; +import { singlestoreCredentials } from 'src/cli/validations/singlestore'; +import { sqliteCredentials } from 'src/cli/validations/sqlite'; +import { expect, test } from 'vitest'; + +test('turso #1', () => { + sqliteCredentials.parse({ + dialect: 'sqlite', + driver: 'turso', + url: 'https://turso.tech', + }); +}); + +test('turso #2', () => { + sqliteCredentials.parse({ + dialect: 'sqlite', + driver: 'turso', + url: 'https://turso.tech', + authToken: 'token', + }); +}); + +test('turso #3', () => { + expect(() => + sqliteCredentials.parse({ + dialect: 'sqlite', + driver: 'turso', + url: 'https://turso.tech', + authToken: '', + }) + ).toThrowError(); +}); + +test('turso #4', () => { + expect(() => { + sqliteCredentials.parse({ + dialect: 'sqlite', + driver: 'turso', + url: '', + authToken: 'token', + }); + }).toThrowError(); +}); + +test('turso #5', () => { + expect(() => { + sqliteCredentials.parse({ + dialect: 'sqlite', + driver: 'turso', + url: '', + authToken: '', + }); + }).toThrowError(); +}); + +test('d1-http #1', () => { + sqliteCredentials.parse({ + dialect: 'sqlite', + driver: 'd1-http', + accountId: 'accountId', + databaseId: 'databaseId', + token: 'token', + }); +}); + +test('d1-http #2', () => { + expect(() => + sqliteCredentials.parse({ + dialect: 'sqlite', + driver: 'd1-http', + accountId: 'accountId', + databaseId: 'databaseId', + // token: "token", + }) + ).toThrowError(); +}); + +test('d1-http #3', () => { + expect(() => + sqliteCredentials.parse({ + dialect: 'sqlite', + driver: 'd1-http', + accountId: 'accountId', + databaseId: 'databaseId', + token: '', + }) + ).toThrowError(); +}); + +test('d1-http #4', () => { + expect(() => + sqliteCredentials.parse({ + dialect: 'sqlite', + driver: 'd1-http', + accountId: 'accountId', + // databaseId: "databaseId", + token: 'token', + }) + ).toThrowError(); +}); + +test('d1-http #5', () => { + expect(() => + sqliteCredentials.parse({ + dialect: 'sqlite', + driver: 'd1-http', + accountId: 'accountId', + databaseId: '', + token: 'token', + }) + ).toThrowError(); +}); + +test('d1-http #6', () => { + expect(() => + sqliteCredentials.parse({ + dialect: 'sqlite', + driver: 'd1-http', + // accountId: "accountId", + databaseId: 'databaseId', + token: 'token', + }) + ).toThrowError(); +}); + +test('d1-http #7', () => { + expect(() => + sqliteCredentials.parse({ + dialect: 'sqlite', + driver: 'd1-http', + accountId: '', + databaseId: 'databaseId', + token: 'token', + }) + ).toThrowError(); +}); + +// omit undefined driver +test('sqlite #1', () => { + expect( + sqliteCredentials.parse({ + dialect: 'sqlite', + driver: undefined, + url: 'https://turso.tech', + }), + ).toStrictEqual({ + url: 'https://turso.tech', + }); +}); + +test('sqlite #2', () => { + expect( + sqliteCredentials.parse({ + dialect: 'sqlite', + url: 'https://turso.tech', + }), + ).toStrictEqual({ + url: 'https://turso.tech', + }); +}); + +test('sqlite #3', () => { + expect(() => + sqliteCredentials.parse({ + dialect: 'sqlite', + url: '', + }) + ).toThrowError(); +}); + +test('AWS Data API #1', () => { + expect( + postgresCredentials.parse({ + dialect: 'postgres', + url: 'https://turso.tech', + }), + ).toStrictEqual({ + url: 'https://turso.tech', + }); +}); + +test('AWS Data API #1', () => { + expect( + postgresCredentials.parse({ + dialect: 'postgres', + driver: 'aws-data-api', + database: 'database', + secretArn: 'secretArn', + resourceArn: 'resourceArn', + }), + ).toStrictEqual({ + driver: 'aws-data-api', + database: 'database', + secretArn: 'secretArn', + resourceArn: 'resourceArn', + }); +}); + +test('AWS Data API #2', () => { + expect(() => { + postgresCredentials.parse({ + dialect: 'postgres', + driver: 'aws-data-api', + database: 'database', + secretArn: '', + resourceArn: 'resourceArn', + }); + }).toThrowError(); +}); +test('AWS Data API #3', () => { + expect(() => { + postgresCredentials.parse({ + dialect: 'postgres', + driver: 'aws-data-api', + database: 'database', + secretArn: 'secretArn', + resourceArn: '', + }); + }).toThrowError(); +}); +test('AWS Data API #4', () => { + expect(() => { + postgresCredentials.parse({ + dialect: 'postgres', + driver: 'aws-data-api', + database: '', + secretArn: 'secretArn', + resourceArn: 'resourceArn', + }); + }).toThrowError(); +}); + +test('AWS Data API #5', () => { + expect(() => { + postgresCredentials.parse({ + dialect: 'postgres', + driver: 'aws-data-api', + database: 'database', + resourceArn: 'resourceArn', + }); + }).toThrowError(); +}); +test('AWS Data API #6', () => { + expect(() => { + postgresCredentials.parse({ + dialect: 'postgres', + driver: 'aws-data-api', + secretArn: 'secretArn', + resourceArn: 'resourceArn', + }); + }).toThrowError(); +}); +test('AWS Data API #7', () => { + expect(() => { + postgresCredentials.parse({ + dialect: 'postgres', + driver: 'aws-data-api', + database: 'database', + secretArn: 'secretArn', + }); + }).toThrowError(); +}); + +test('AWS Data API #8', () => { + expect(() => { + postgresCredentials.parse({ + dialect: 'postgres', + driver: 'aws-data-api', + }); + }).toThrowError(); +}); + +test('PGlite #1', () => { + expect( + postgresCredentials.parse({ + dialect: 'postgres', + driver: 'pglite', + url: './my.db', + }), + ).toStrictEqual({ + driver: 'pglite', + url: './my.db', + }); +}); + +test('PGlite #2', () => { + expect(() => { + postgresCredentials.parse({ + dialect: 'postgres', + driver: 'pglite', + url: '', + }); + }).toThrowError(); +}); + +test('PGlite #3', () => { + expect(() => { + postgresCredentials.parse({ + dialect: 'postgres', + driver: 'pglite', + }); + }).toThrowError(); +}); + +test('postgres #1', () => { + expect( + postgresCredentials.parse({ + dialect: 'postgres', + url: 'https://turso.tech', + }), + ).toStrictEqual({ + url: 'https://turso.tech', + }); +}); + +test('postgres #2', () => { + expect( + postgresCredentials.parse({ + dialect: 'postgres', + driver: undefined, + url: 'https://turso.tech', + }), + ).toStrictEqual({ + url: 'https://turso.tech', + }); +}); + +test('postgres #3', () => { + expect( + postgresCredentials.parse({ + dialect: 'postgres', + database: 'database', + host: 'host', + }), + ).toStrictEqual({ + database: 'database', + host: 'host', + }); +}); + +test('postgres #4', () => { + expect( + postgresCredentials.parse({ + dialect: 'postgres', + database: 'database', + host: 'host', + }), + ).toStrictEqual({ + database: 'database', + host: 'host', + }); +}); + +test('postgres #5', () => { + expect( + postgresCredentials.parse({ + dialect: 'postgres', + host: 'host', + port: 1234, + user: 'user', + password: 'password', + database: 'database', + ssl: 'require', + }), + ).toStrictEqual({ + host: 'host', + port: 1234, + user: 'user', + password: 'password', + database: 'database', + ssl: 'require', + }); +}); + +test('postgres #6', () => { + expect( + postgresCredentials.parse({ + dialect: 'postgres', + host: 'host', + database: 'database', + ssl: true, + }), + ).toStrictEqual({ + host: 'host', + database: 'database', + ssl: true, + }); +}); + +test('postgres #7', () => { + expect( + postgresCredentials.parse({ + dialect: 'postgres', + host: 'host', + database: 'database', + ssl: 'allow', + }), + ).toStrictEqual({ + host: 'host', + database: 'database', + ssl: 'allow', + }); +}); + +test('postgres #8', () => { + expect( + postgresCredentials.parse({ + dialect: 'postgres', + host: 'host', + database: 'database', + ssl: { + ca: 'ca', + cert: 'cert', + }, + }), + ).toStrictEqual({ + host: 'host', + database: 'database', + ssl: { + ca: 'ca', + cert: 'cert', + }, + }); +}); + +test('postgres #9', () => { + expect(() => { + postgresCredentials.parse({ + dialect: 'postgres', + }); + }).toThrowError(); +}); + +test('postgres #10', () => { + expect(() => { + postgresCredentials.parse({ + dialect: 'postgres', + url: undefined, + }); + }).toThrowError(); +}); + +test('postgres #11', () => { + expect(() => { + postgresCredentials.parse({ + dialect: 'postgres', + url: '', + }); + }).toThrowError(); +}); + +test('postgres #12', () => { + expect(() => { + postgresCredentials.parse({ + dialect: 'postgres', + host: '', + database: '', + }); + }).toThrowError(); +}); + +test('postgres #13', () => { + expect(() => { + postgresCredentials.parse({ + dialect: 'postgres', + database: '', + }); + }).toThrowError(); +}); + +test('postgres #14', () => { + expect(() => { + postgresCredentials.parse({ + dialect: 'postgres', + host: '', + }); + }).toThrowError(); +}); + +test('postgres #15', () => { + expect(() => { + postgresCredentials.parse({ + dialect: 'postgres', + database: ' ', + host: '', + }); + }).toThrowError(); +}); + +test('postgres #16', () => { + expect(() => { + postgresCredentials.parse({ + dialect: 'postgres', + database: '', + host: ' ', + }); + }).toThrowError(); +}); + +test('postgres #17', () => { + expect(() => { + postgresCredentials.parse({ + dialect: 'postgres', + database: ' ', + host: ' ', + port: '', + }); + }).toThrowError(); +}); + +test('mysql #1', () => { + expect( + mysqlCredentials.parse({ + dialect: 'mysql', + url: 'https://turso.tech', + }), + ).toStrictEqual({ + url: 'https://turso.tech', + }); +}); + +test('mysql #2', () => { + expect( + mysqlCredentials.parse({ + dialect: 'mysql', + driver: undefined, + url: 'https://turso.tech', + }), + ).toStrictEqual({ + url: 'https://turso.tech', + }); +}); + +test('mysql #3', () => { + expect( + mysqlCredentials.parse({ + dialect: 'mysql', + database: 'database', + host: 'host', + }), + ).toStrictEqual({ + database: 'database', + host: 'host', + }); +}); + +test('mysql #4', () => { + expect( + mysqlCredentials.parse({ + dialect: 'mysql', + database: 'database', + host: 'host', + }), + ).toStrictEqual({ + database: 'database', + host: 'host', + }); +}); + +test('mysql #5', () => { + expect( + mysqlCredentials.parse({ + dialect: 'mysql', + host: 'host', + port: 1234, + user: 'user', + password: 'password', + database: 'database', + ssl: 'require', + }), + ).toStrictEqual({ + host: 'host', + port: 1234, + user: 'user', + password: 'password', + database: 'database', + ssl: 'require', + }); +}); + +test('mysql #7', () => { + expect( + mysqlCredentials.parse({ + dialect: 'mysql', + host: 'host', + database: 'database', + ssl: 'allow', + }), + ).toStrictEqual({ + host: 'host', + database: 'database', + ssl: 'allow', + }); +}); + +test('mysql #8', () => { + expect( + mysqlCredentials.parse({ + dialect: 'mysql', + host: 'host', + database: 'database', + ssl: { + ca: 'ca', + cert: 'cert', + }, + }), + ).toStrictEqual({ + host: 'host', + database: 'database', + ssl: { + ca: 'ca', + cert: 'cert', + }, + }); +}); + +test('mysql #9', () => { + expect(() => { + mysqlCredentials.parse({ + dialect: 'mysql', + }); + }).toThrowError(); +}); + +test('mysql #10', () => { + expect(() => { + mysqlCredentials.parse({ + dialect: 'mysql', + url: undefined, + }); + }).toThrowError(); +}); + +test('mysql #11', () => { + expect(() => { + mysqlCredentials.parse({ + dialect: 'mysql', + url: '', + }); + }).toThrowError(); +}); + +test('mysql #12', () => { + expect(() => { + mysqlCredentials.parse({ + dialect: 'mysql', + host: '', + database: '', + }); + }).toThrowError(); +}); + +test('mysql #13', () => { + expect(() => { + mysqlCredentials.parse({ + dialect: 'mysql', + database: '', + }); + }).toThrowError(); +}); + +test('mysql #14', () => { + expect(() => { + mysqlCredentials.parse({ + dialect: 'mysql', + host: '', + }); + }).toThrowError(); +}); + +test('mysql #15', () => { + expect(() => { + mysqlCredentials.parse({ + dialect: 'mysql', + database: ' ', + host: '', + }); + }).toThrowError(); +}); + +test('mysql #16', () => { + expect(() => { + mysqlCredentials.parse({ + dialect: 'mysql', + database: '', + host: ' ', + }); + }).toThrowError(); +}); + +test('mysql #17', () => { + expect(() => { + mysqlCredentials.parse({ + dialect: 'mysql', + database: ' ', + host: ' ', + port: '', + }); + }).toThrowError(); +}); + +test('singlestore #1', () => { + expect( + singlestoreCredentials.parse({ + dialect: 'singlestore', + database: 'database', + host: 'host', + }), + ).toStrictEqual({ + database: 'database', + host: 'host', + }); +}); + +test('singlestore #2', () => { + expect( + singlestoreCredentials.parse({ + dialect: 'singlestore', + database: 'database', + host: 'host', + }), + ).toStrictEqual({ + database: 'database', + host: 'host', + }); +}); + +test('singlestore #3', () => { + expect( + singlestoreCredentials.parse({ + dialect: 'singlestore', + host: 'host', + port: 1234, + user: 'user', + password: 'password', + database: 'database', + ssl: 'require', + }), + ).toStrictEqual({ + host: 'host', + port: 1234, + user: 'user', + password: 'password', + database: 'database', + ssl: 'require', + }); +}); + +test('singlestore #4', () => { + expect( + singlestoreCredentials.parse({ + dialect: 'singlestore', + host: 'host', + database: 'database', + ssl: 'allow', + }), + ).toStrictEqual({ + host: 'host', + database: 'database', + ssl: 'allow', + }); +}); + +test('singlestore #5', () => { + expect( + singlestoreCredentials.parse({ + dialect: 'singlestore', + host: 'host', + database: 'database', + ssl: { + ca: 'ca', + cert: 'cert', + }, + }), + ).toStrictEqual({ + host: 'host', + database: 'database', + ssl: { + ca: 'ca', + cert: 'cert', + }, + }); +}); + +test('singlestore #6', () => { + expect(() => { + singlestoreCredentials.parse({ + dialect: 'singlestore', + }); + }).toThrowError(); +}); + +test('singlestore #7', () => { + expect(() => { + singlestoreCredentials.parse({ + dialect: 'singlestore', + url: undefined, + }); + }).toThrowError(); +}); + +test('singlestore #8', () => { + expect(() => { + singlestoreCredentials.parse({ + dialect: 'singlestore', + url: '', + }); + }).toThrowError(); +}); + +test('singlestore #9', () => { + expect(() => { + singlestoreCredentials.parse({ + dialect: 'singlestore', + host: '', + database: '', + }); + }).toThrowError(); +}); + +test('singlestore #10', () => { + expect(() => { + singlestoreCredentials.parse({ + dialect: 'singlestore', + database: '', + }); + }).toThrowError(); +}); + +test('singlestore #11', () => { + expect(() => { + singlestoreCredentials.parse({ + dialect: 'singlestore', + host: '', + }); + }).toThrowError(); +}); + +test('singlestore #12', () => { + expect(() => { + singlestoreCredentials.parse({ + dialect: 'singlestore', + database: ' ', + host: '', + }); + }).toThrowError(); +}); + +test('singlestore #13', () => { + expect(() => { + singlestoreCredentials.parse({ + dialect: 'singlestore', + database: '', + host: ' ', + }); + }).toThrowError(); +}); + +test('singlestore #14', () => { + expect(() => { + singlestoreCredentials.parse({ + dialect: 'singlestore', + database: ' ', + host: ' ', + port: '', + }); + }).toThrowError(); +}); diff --git a/drizzle-kit/tests/wrap-param.test.ts b/drizzle-kit/tests/wrap-param.test.ts new file mode 100644 index 000000000..a27d27d45 --- /dev/null +++ b/drizzle-kit/tests/wrap-param.test.ts @@ -0,0 +1,16 @@ +import chalk from 'chalk'; +import { assert, expect, test } from 'vitest'; +import { wrapParam } from '../src/cli/validations/common'; + +test('wrapParam', () => { + expect(wrapParam('password', 'password123', false, 'secret')).toBe(` [${chalk.green('✓')}] password: '*****'`); + expect(wrapParam('url', 'mysql://user:password@localhost:3306/database', false, 'url')).toBe( + ` [${chalk.green('✓')}] url: 'mysql://user:****@localhost:3306/database'`, + ); + expect(wrapParam('url', 'singlestore://user:password@localhost:3306/database', false, 'url')).toBe( + ` [${chalk.green('✓')}] url: 'singlestore://user:****@localhost:3306/database'`, + ); + expect(wrapParam('url', 'postgresql://user:password@localhost:5432/database', false, 'url')).toBe( + ` [${chalk.green('✓')}] url: 'postgresql://user:****@localhost:5432/database'`, + ); +}); diff --git a/drizzle-kit/tsconfig.cli-types.json b/drizzle-kit/tsconfig.cli-types.json new file mode 100644 index 000000000..aa75aa3ed --- /dev/null +++ b/drizzle-kit/tsconfig.cli-types.json @@ -0,0 +1,9 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "declaration": true, + "emitDeclarationOnly": true, + "noEmit": false + }, + "include": ["src/index.ts", "src/utils.ts", "src/utils-studio.ts", "src/api.ts"] +} diff --git a/drizzle-kit/tsconfig.json b/drizzle-kit/tsconfig.json new file mode 100644 index 000000000..814139e47 --- /dev/null +++ b/drizzle-kit/tsconfig.json @@ -0,0 +1,28 @@ +{ + "compilerOptions": { + "target": "es2021", + "lib": ["es2021"], + "types": ["node"], + "strictNullChecks": true, + "strictFunctionTypes": false, + "allowJs": true, + "skipLibCheck": true, + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "strict": true, + "noImplicitOverride": true, + "forceConsistentCasingInFileNames": true, + "module": "CommonJS", + "moduleResolution": "node", + "resolveJsonModule": true, + "noErrorTruncation": true, + "isolatedModules": true, + "sourceMap": true, + "baseUrl": ".", + "outDir": "dist", + "noEmit": true, + "typeRoots": ["node_modules/@types", "src/@types"] + }, + "include": ["src", "dev", "tests", "drizzle.config.ts", "test.ts"], + "exclude": ["node_modules"] +} diff --git a/drizzle-kit/vitest.config.ts b/drizzle-kit/vitest.config.ts new file mode 100644 index 000000000..602e96ede --- /dev/null +++ b/drizzle-kit/vitest.config.ts @@ -0,0 +1,25 @@ +import tsconfigPaths from 'vite-tsconfig-paths'; +import { defineConfig } from 'vitest/config'; + +export default defineConfig({ + test: { + include: [ + 'tests/**/*.test.ts', + ], + + typecheck: { + tsconfig: 'tsconfig.json', + }, + testTimeout: 100000, + hookTimeout: 100000, + isolate: true, + poolOptions: { + threads: { + singleThread: true, + }, + }, + maxWorkers: 1, + fileParallelism: false, + }, + plugins: [tsconfigPaths()], +}); diff --git a/drizzle-orm/package.json b/drizzle-orm/package.json index 74c3726f7..888f7efcb 100644 --- a/drizzle-orm/package.json +++ b/drizzle-orm/package.json @@ -1,6 +1,6 @@ { "name": "drizzle-orm", - "version": "0.32.1", + "version": "0.33.0", "description": "Drizzle ORM package for SQL databases", "type": "module", "scripts": { diff --git a/drizzle-orm/src/column-builder.ts b/drizzle-orm/src/column-builder.ts index 4a19a79a9..ad278e29d 100644 --- a/drizzle-orm/src/column-builder.ts +++ b/drizzle-orm/src/column-builder.ts @@ -2,6 +2,7 @@ import { entityKind } from '~/entity.ts'; import type { Column } from './column.ts'; import type { MySqlColumn } from './mysql-core/index.ts'; import type { ExtraConfigColumn, PgColumn, PgSequenceOptions } from './pg-core/index.ts'; +import type { SingleStoreColumn } from './singlestore-core/index.ts'; import type { SQL } from './sql/sql.ts'; import type { SQLiteColumn } from './sqlite-core/index.ts'; import type { Simplify } from './utils.ts'; @@ -17,7 +18,7 @@ export type ColumnDataType = | 'custom' | 'buffer'; -export type Dialect = 'pg' | 'mysql' | 'sqlite' | 'common'; +export type Dialect = 'pg' | 'mysql' | 'sqlite' | 'singlestore' | 'common'; export type GeneratedStorageMode = 'virtual' | 'stored'; @@ -299,7 +300,8 @@ export type BuildColumn< TTableName extends string, TBuilder extends ColumnBuilderBase, TDialect extends Dialect, -> = TDialect extends 'pg' ? PgColumn> +> = TDialect extends 'singlestore' ? SingleStoreColumn> + : TDialect extends 'pg' ? PgColumn> : TDialect extends 'mysql' ? MySqlColumn> : TDialect extends 'sqlite' ? SQLiteColumn> : TDialect extends 'common' ? Column> @@ -337,7 +339,8 @@ export type BuildExtraConfigColumns< & {}; export type ChangeColumnTableName = - TDialect extends 'pg' ? PgColumn> + TDialect extends 'singlestore' ? SingleStoreColumn> + : TDialect extends 'pg' ? PgColumn> : TDialect extends 'mysql' ? MySqlColumn> : TDialect extends 'sqlite' ? SQLiteColumn> : never; diff --git a/drizzle-orm/src/expo-sqlite/query.ts b/drizzle-orm/src/expo-sqlite/query.ts index f687efb54..db467ce2c 100644 --- a/drizzle-orm/src/expo-sqlite/query.ts +++ b/drizzle-orm/src/expo-sqlite/query.ts @@ -7,6 +7,7 @@ import { SQLiteRelationalQuery } from '~/sqlite-core/query-builders/query.ts'; export const useLiveQuery = | SQLiteRelationalQuery<'sync', unknown>>( query: T, + deps: unknown[] = [], ) => { const [data, setData] = useState>( (is(query, SQLiteRelationalQuery) && query.mode === 'first' ? undefined : []) as Awaited, @@ -43,7 +44,7 @@ export const useLiveQuery = | SQL return () => { listener?.remove(); }; - }, []); + }, deps); return { data, diff --git a/drizzle-orm/src/mysql-core/session.ts b/drizzle-orm/src/mysql-core/session.ts index 08ad1ebb6..6b6269639 100644 --- a/drizzle-orm/src/mysql-core/session.ts +++ b/drizzle-orm/src/mysql-core/session.ts @@ -98,7 +98,7 @@ export abstract class MySqlSession< parts.push(`isolation level ${config.isolationLevel}`); } - return parts.length ? sql.join(['set transaction ', parts.join(' ')]) : undefined; + return parts.length ? sql`set transaction ${sql.raw(parts.join(' '))}` : undefined; } protected getStartTransactionSQL(config: MySqlTransactionConfig): SQL | undefined { @@ -112,7 +112,7 @@ export abstract class MySqlSession< parts.push(config.accessMode); } - return parts.length ? sql.join(['start transaction ', parts.join(' ')]) : undefined; + return parts.length ? sql`start transaction ${sql.raw(parts.join(' '))}` : undefined; } } diff --git a/drizzle-orm/src/postgres-js/driver.ts b/drizzle-orm/src/postgres-js/driver.ts index ae1b48a21..7f44344e8 100644 --- a/drizzle-orm/src/postgres-js/driver.ts +++ b/drizzle-orm/src/postgres-js/driver.ts @@ -27,6 +27,8 @@ export function drizzle = Record( + table: TTable, + alias: TAlias, +): BuildAliasTable { + return new Proxy(table, new TableAliasProxyHandler(alias, false)) as any; +} diff --git a/drizzle-orm/src/singlestore-core/columns/bigint.ts b/drizzle-orm/src/singlestore-core/columns/bigint.ts new file mode 100644 index 000000000..6ea4a7297 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/bigint.ts @@ -0,0 +1,115 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; + +export type SingleStoreBigInt53BuilderInitial = SingleStoreBigInt53Builder<{ + name: TName; + dataType: 'number'; + columnType: 'SingleStoreBigInt53'; + data: number; + driverParam: number | string; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreBigInt53Builder> + extends SingleStoreColumnBuilderWithAutoIncrement +{ + static readonly [entityKind]: string = 'SingleStoreBigInt53Builder'; + + constructor(name: T['name'], unsigned: boolean = false) { + super(name, 'number', 'SingleStoreBigInt53'); + this.config.unsigned = unsigned; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreBigInt53> { + return new SingleStoreBigInt53>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreBigInt53> + extends SingleStoreColumnWithAutoIncrement +{ + static readonly [entityKind]: string = 'SingleStoreBigInt53'; + + getSQLType(): string { + return `bigint${this.config.unsigned ? ' unsigned' : ''}`; + } + + override mapFromDriverValue(value: number | string): number { + if (typeof value === 'number') { + return value; + } + return Number(value); + } +} + +export type SingleStoreBigInt64BuilderInitial = SingleStoreBigInt64Builder<{ + name: TName; + dataType: 'bigint'; + columnType: 'SingleStoreBigInt64'; + data: bigint; + driverParam: string; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreBigInt64Builder> + extends SingleStoreColumnBuilderWithAutoIncrement +{ + static readonly [entityKind]: string = 'SingleStoreBigInt64Builder'; + + constructor(name: T['name'], unsigned: boolean = false) { + super(name, 'bigint', 'SingleStoreBigInt64'); + this.config.unsigned = unsigned; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreBigInt64> { + return new SingleStoreBigInt64>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreBigInt64> + extends SingleStoreColumnWithAutoIncrement +{ + static readonly [entityKind]: string = 'SingleStoreBigInt64'; + + getSQLType(): string { + return `bigint${this.config.unsigned ? ' unsigned' : ''}`; + } + + // eslint-disable-next-line unicorn/prefer-native-coercion-functions + override mapFromDriverValue(value: string): bigint { + return BigInt(value); + } +} + +interface SingleStoreBigIntConfig { + mode: T; + unsigned?: boolean; +} + +export function bigint( + name: TName, + config: SingleStoreBigIntConfig, +): TMode extends 'number' ? SingleStoreBigInt53BuilderInitial : SingleStoreBigInt64BuilderInitial; +export function bigint(name: string, config: SingleStoreBigIntConfig) { + if (config.mode === 'number') { + return new SingleStoreBigInt53Builder(name, config.unsigned); + } + return new SingleStoreBigInt64Builder(name, config.unsigned); +} diff --git a/drizzle-orm/src/singlestore-core/columns/binary.ts b/drizzle-orm/src/singlestore-core/columns/binary.ts new file mode 100644 index 000000000..9cb05ac53 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/binary.ts @@ -0,0 +1,63 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +export type SingleStoreBinaryBuilderInitial = SingleStoreBinaryBuilder<{ + name: TName; + dataType: 'string'; + columnType: 'SingleStoreBinary'; + data: string; + driverParam: string; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreBinaryBuilder> + extends SingleStoreColumnBuilder< + T, + SingleStoreBinaryConfig + > +{ + static readonly [entityKind]: string = 'SingleStoreBinaryBuilder'; + + constructor(name: T['name'], length: number | undefined) { + super(name, 'string', 'SingleStoreBinary'); + this.config.length = length; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreBinary> { + return new SingleStoreBinary>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreBinary> extends SingleStoreColumn< + T, + SingleStoreBinaryConfig +> { + static readonly [entityKind]: string = 'SingleStoreBinary'; + + length: number | undefined = this.config.length; + + getSQLType(): string { + return this.length === undefined ? `binary` : `binary(${this.length})`; + } +} + +export interface SingleStoreBinaryConfig { + length?: number; +} + +export function binary( + name: TName, + config: SingleStoreBinaryConfig = {}, +): SingleStoreBinaryBuilderInitial { + return new SingleStoreBinaryBuilder(name, config.length); +} diff --git a/drizzle-orm/src/singlestore-core/columns/blob.ts b/drizzle-orm/src/singlestore-core/columns/blob.ts new file mode 100644 index 000000000..0885253f1 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/blob.ts @@ -0,0 +1,168 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { Equal } from '~/utils'; +import type { AnySingleStoreTable } from '../table'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +type BlobMode = 'buffer' | 'json' | 'bigint'; + +export type SingleStoreBigIntBuilderInitial = SingleStoreBigIntBuilder<{ + name: TName; + dataType: 'bigint'; + columnType: 'SingleStoreBigInt'; + data: bigint; + driverParam: Buffer; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreBigIntBuilder> + extends SingleStoreColumnBuilder +{ + static readonly [entityKind]: string = 'SingleStoreBigIntBuilder'; + + constructor(name: T['name']) { + super(name, 'bigint', 'SingleStoreBigInt'); + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreBigInt> { + return new SingleStoreBigInt>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreBigInt> extends SingleStoreColumn { + static readonly [entityKind]: string = 'SingleStoreBigInt'; + + getSQLType(): string { + return 'blob'; + } + + override mapFromDriverValue(value: Buffer): bigint { + return BigInt(value.toString()); + } + + override mapToDriverValue(value: bigint): Buffer { + return Buffer.from(value.toString()); + } +} + +export type SingleStoreBlobJsonBuilderInitial = SingleStoreBlobJsonBuilder<{ + name: TName; + dataType: 'json'; + columnType: 'SingleStoreBlobJson'; + data: unknown; + driverParam: Buffer; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreBlobJsonBuilder> + extends SingleStoreColumnBuilder +{ + static readonly [entityKind]: string = 'SingleStoreBlobJsonBuilder'; + + constructor(name: T['name']) { + super(name, 'json', 'SingleStoreBlobJson'); + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreBlobJson> { + return new SingleStoreBlobJson>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreBlobJson> + extends SingleStoreColumn +{ + static readonly [entityKind]: string = 'SingleStoreBlobJson'; + + getSQLType(): string { + return 'blob'; + } + + override mapFromDriverValue(value: Buffer): T['data'] { + return JSON.parse(value.toString()); + } + + override mapToDriverValue(value: T['data']): Buffer { + return Buffer.from(JSON.stringify(value)); + } +} + +export type SingleStoreBlobBufferBuilderInitial = SingleStoreBlobBufferBuilder<{ + name: TName; + dataType: 'buffer'; + columnType: 'SingleStoreBlobBuffer'; + data: Buffer; + driverParam: Buffer; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreBlobBufferBuilder> + extends SingleStoreColumnBuilder +{ + static readonly [entityKind]: string = 'SingleStoreBlobBufferBuilder'; + + constructor(name: T['name']) { + super(name, 'buffer', 'SingleStoreBlobBuffer'); + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreBlobBuffer> { + return new SingleStoreBlobBuffer>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreBlobBuffer> + extends SingleStoreColumn +{ + static readonly [entityKind]: string = 'SingleStoreBlobBuffer'; + + getSQLType(): string { + return 'blob'; + } +} + +export interface BlobConfig { + mode: TMode; +} + +/** + * It's recommended to use `text('...', { mode: 'json' })` instead of `blob` in JSON mode, because it supports JSON functions: + * >All JSON functions currently throw an error if any of their arguments are BLOBs because BLOBs are reserved for a future enhancement in which BLOBs will store the binary encoding for JSON. + * + * https://www.sqlite.org/json1.html + */ +export function blob( + name: TName, + config?: BlobConfig, +): Equal extends true ? SingleStoreBigIntBuilderInitial + : Equal extends true ? SingleStoreBlobBufferBuilderInitial + : SingleStoreBlobJsonBuilderInitial; +export function blob(name: string, config?: BlobConfig) { + if (config?.mode === 'json') { + return new SingleStoreBlobJsonBuilder(name); + } + if (config?.mode === 'bigint') { + return new SingleStoreBigIntBuilder(name); + } + return new SingleStoreBlobBufferBuilder(name); +} diff --git a/drizzle-orm/src/singlestore-core/columns/boolean.ts b/drizzle-orm/src/singlestore-core/columns/boolean.ts new file mode 100644 index 000000000..795b12e7f --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/boolean.ts @@ -0,0 +1,56 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +export type SingleStoreBooleanBuilderInitial = SingleStoreBooleanBuilder<{ + name: TName; + dataType: 'boolean'; + columnType: 'SingleStoreBoolean'; + data: boolean; + driverParam: number | boolean; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreBooleanBuilder> + extends SingleStoreColumnBuilder +{ + static readonly [entityKind]: string = 'SingleStoreBooleanBuilder'; + + constructor(name: T['name']) { + super(name, 'boolean', 'SingleStoreBoolean'); + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreBoolean> { + return new SingleStoreBoolean>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreBoolean> + extends SingleStoreColumn +{ + static readonly [entityKind]: string = 'SingleStoreBoolean'; + + getSQLType(): string { + return 'boolean'; + } + + override mapFromDriverValue(value: number | boolean): boolean { + if (typeof value === 'boolean') { + return value; + } + return value === 1; + } +} + +export function boolean(name: TName): SingleStoreBooleanBuilderInitial { + return new SingleStoreBooleanBuilder(name); +} diff --git a/drizzle-orm/src/singlestore-core/columns/char.ts b/drizzle-orm/src/singlestore-core/columns/char.ts new file mode 100644 index 000000000..f59c173cb --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/char.ts @@ -0,0 +1,67 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import type { Writable } from '~/utils.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +export type SingleStoreCharBuilderInitial = + SingleStoreCharBuilder<{ + name: TName; + dataType: 'string'; + columnType: 'SingleStoreChar'; + data: TEnum[number]; + driverParam: number | string; + enumValues: TEnum; + generated: undefined; + }>; + +export class SingleStoreCharBuilder> + extends SingleStoreColumnBuilder< + T, + SingleStoreCharConfig + > +{ + static readonly [entityKind]: string = 'SingleStoreCharBuilder'; + + constructor(name: T['name'], config: SingleStoreCharConfig) { + super(name, 'string', 'SingleStoreChar'); + this.config.length = config.length; + this.config.enum = config.enum; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreChar & { enumValues: T['enumValues'] }> { + return new SingleStoreChar & { enumValues: T['enumValues'] }>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreChar> + extends SingleStoreColumn> +{ + static readonly [entityKind]: string = 'SingleStoreChar'; + + readonly length: number | undefined = this.config.length; + override readonly enumValues = this.config.enum; + + getSQLType(): string { + return this.length === undefined ? `char` : `char(${this.length})`; + } +} + +export interface SingleStoreCharConfig { + length?: number; + enum?: TEnum; +} + +export function char>( + name: TName, + config: SingleStoreCharConfig> = {}, +): SingleStoreCharBuilderInitial> { + return new SingleStoreCharBuilder(name, config); +} diff --git a/drizzle-orm/src/singlestore-core/columns/common.ts b/drizzle-orm/src/singlestore-core/columns/common.ts new file mode 100644 index 000000000..176265677 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/common.ts @@ -0,0 +1,116 @@ +import { ColumnBuilder } from '~/column-builder.ts'; +import type { + ColumnBuilderBase, + ColumnBuilderBaseConfig, + ColumnBuilderExtraConfig, + ColumnBuilderRuntimeConfig, + ColumnDataType, + HasDefault, + HasGenerated, + IsAutoincrement, + MakeColumnConfig, +} from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { Column } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable, SingleStoreTable } from '~/singlestore-core/table.ts'; +import type { SQL } from '~/sql/sql.ts'; +import type { Update } from '~/utils.ts'; +import { uniqueKeyName } from '../unique-constraint.ts'; + +export interface SingleStoreColumnBuilderBase< + T extends ColumnBuilderBaseConfig = ColumnBuilderBaseConfig, + TTypeConfig extends object = object, +> extends ColumnBuilderBase {} + +export interface SingleStoreGeneratedColumnConfig { + mode?: 'virtual' | 'stored'; +} + +export abstract class SingleStoreColumnBuilder< + T extends ColumnBuilderBaseConfig = ColumnBuilderBaseConfig & { + data: any; + }, + TRuntimeConfig extends object = object, + TTypeConfig extends object = object, + TExtraConfig extends ColumnBuilderExtraConfig = ColumnBuilderExtraConfig, +> extends ColumnBuilder + implements SingleStoreColumnBuilderBase +{ + static readonly [entityKind]: string = 'SingleStoreColumnBuilder'; + + unique(name?: string): this { + this.config.isUnique = true; + this.config.uniqueName = name; + return this; + } + + generatedAlwaysAs(as: SQL | T['data'] | (() => SQL), config?: SingleStoreGeneratedColumnConfig): HasGenerated { + this.config.generated = { + as, + type: 'always', + mode: config?.mode ?? 'virtual', + }; + return this as any; + } + + /** @internal */ + abstract build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreColumn>; +} + +// To understand how to use `SingleStoreColumn` and `AnySingleStoreColumn`, see `Column` and `AnyColumn` documentation. +export abstract class SingleStoreColumn< + T extends ColumnBaseConfig = ColumnBaseConfig, + TRuntimeConfig extends object = object, +> extends Column { + static readonly [entityKind]: string = 'SingleStoreColumn'; + + constructor( + override readonly table: SingleStoreTable, + config: ColumnBuilderRuntimeConfig, + ) { + if (!config.uniqueName) { + config.uniqueName = uniqueKeyName(table, [config.name]); + } + super(table, config); + } +} + +export type AnySingleStoreColumn> = {}> = + SingleStoreColumn< + Required, TPartial>> + >; + +export interface SingleStoreColumnWithAutoIncrementConfig { + autoIncrement: boolean; +} + +export abstract class SingleStoreColumnBuilderWithAutoIncrement< + T extends ColumnBuilderBaseConfig = ColumnBuilderBaseConfig, + TRuntimeConfig extends object = object, + TExtraConfig extends ColumnBuilderExtraConfig = ColumnBuilderExtraConfig, +> extends SingleStoreColumnBuilder { + static readonly [entityKind]: string = 'SingleStoreColumnBuilderWithAutoIncrement'; + + constructor(name: NonNullable, dataType: T['dataType'], columnType: T['columnType']) { + super(name, dataType, columnType); + this.config.autoIncrement = false; + } + + autoincrement(): IsAutoincrement> { + this.config.autoIncrement = true; + this.config.hasDefault = true; + return this as IsAutoincrement>; + } +} + +export abstract class SingleStoreColumnWithAutoIncrement< + T extends ColumnBaseConfig = ColumnBaseConfig, + TRuntimeConfig extends object = object, +> extends SingleStoreColumn { + static readonly [entityKind]: string = 'SingleStoreColumnWithAutoIncrement'; + + readonly autoIncrement: boolean = this.config.autoIncrement; +} diff --git a/drizzle-orm/src/singlestore-core/columns/custom.ts b/drizzle-orm/src/singlestore-core/columns/custom.ts new file mode 100644 index 000000000..727099884 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/custom.ts @@ -0,0 +1,227 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import type { SQL } from '~/sql/sql.ts'; +import type { Equal } from '~/utils.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +export type ConvertCustomConfig> = + & { + name: TName; + dataType: 'custom'; + columnType: 'SingleStoreCustomColumn'; + data: T['data']; + driverParam: T['driverData']; + enumValues: undefined; + generated: undefined; + } + & (T['notNull'] extends true ? { notNull: true } : {}) + & (T['default'] extends true ? { hasDefault: true } : {}); + +export interface SingleStoreCustomColumnInnerConfig { + customTypeValues: CustomTypeValues; +} + +export class SingleStoreCustomColumnBuilder> + extends SingleStoreColumnBuilder< + T, + { + fieldConfig: CustomTypeValues['config']; + customTypeParams: CustomTypeParams; + }, + { + singlestoreColumnBuilderBrand: 'SingleStoreCustomColumnBuilderBrand'; + } + > +{ + static readonly [entityKind]: string = 'SingleStoreCustomColumnBuilder'; + + constructor( + name: T['name'], + fieldConfig: CustomTypeValues['config'], + customTypeParams: CustomTypeParams, + ) { + super(name, 'custom', 'SingleStoreCustomColumn'); + this.config.fieldConfig = fieldConfig; + this.config.customTypeParams = customTypeParams; + } + + /** @internal */ + build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreCustomColumn> { + return new SingleStoreCustomColumn>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreCustomColumn> + extends SingleStoreColumn +{ + static readonly [entityKind]: string = 'SingleStoreCustomColumn'; + + private sqlName: string; + private mapTo?: (value: T['data']) => T['driverParam']; + private mapFrom?: (value: T['driverParam']) => T['data']; + + constructor( + table: AnySingleStoreTable<{ name: T['tableName'] }>, + config: SingleStoreCustomColumnBuilder['config'], + ) { + super(table, config); + this.sqlName = config.customTypeParams.dataType(config.fieldConfig); + this.mapTo = config.customTypeParams.toDriver; + this.mapFrom = config.customTypeParams.fromDriver; + } + + getSQLType(): string { + return this.sqlName; + } + + override mapFromDriverValue(value: T['driverParam']): T['data'] { + return typeof this.mapFrom === 'function' ? this.mapFrom(value) : value as T['data']; + } + + override mapToDriverValue(value: T['data']): T['driverParam'] { + return typeof this.mapTo === 'function' ? this.mapTo(value) : value as T['data']; + } +} + +export type CustomTypeValues = { + /** + * Required type for custom column, that will infer proper type model + * + * Examples: + * + * If you want your column to be `string` type after selecting/or on inserting - use `data: string`. Like `text`, `varchar` + * + * If you want your column to be `number` type after selecting/or on inserting - use `data: number`. Like `integer` + */ + data: unknown; + + /** + * Type helper, that represents what type database driver is accepting for specific database data type + */ + driverData?: unknown; + + /** + * What config type should be used for {@link CustomTypeParams} `dataType` generation + */ + config?: unknown; + + /** + * Whether the config argument should be required or not + * @default false + */ + configRequired?: boolean; + + /** + * If your custom data type should be notNull by default you can use `notNull: true` + * + * @example + * const customSerial = customType<{ data: number, notNull: true, default: true }>({ + * dataType() { + * return 'serial'; + * }, + * }); + */ + notNull?: boolean; + + /** + * If your custom data type has default you can use `default: true` + * + * @example + * const customSerial = customType<{ data: number, notNull: true, default: true }>({ + * dataType() { + * return 'serial'; + * }, + * }); + */ + default?: boolean; +}; + +export interface CustomTypeParams { + /** + * Database data type string representation, that is used for migrations + * @example + * ``` + * `jsonb`, `text` + * ``` + * + * If database data type needs additional params you can use them from `config` param + * @example + * ``` + * `varchar(256)`, `numeric(2,3)` + * ``` + * + * To make `config` be of specific type please use config generic in {@link CustomTypeValues} + * + * @example + * Usage example + * ``` + * dataType() { + * return 'boolean'; + * }, + * ``` + * Or + * ``` + * dataType(config) { + * return typeof config.length !== 'undefined' ? `varchar(${config.length})` : `varchar`; + * } + * ``` + */ + dataType: (config: T['config'] | (Equal extends true ? never : undefined)) => string; + + /** + * Optional mapping function, between user input and driver + * @example + * For example, when using jsonb we need to map JS/TS object to string before writing to database + * ``` + * toDriver(value: TData): string { + * return JSON.stringify(value); + * } + * ``` + */ + toDriver?: (value: T['data']) => T['driverData'] | SQL; + + /** + * Optional mapping function, that is responsible for data mapping from database to JS/TS code + * @example + * For example, when using timestamp we need to map string Date representation to JS Date + * ``` + * fromDriver(value: string): Date { + * return new Date(value); + * }, + * ``` + */ + fromDriver?: (value: T['driverData']) => T['data']; +} + +/** + * Custom singlestore database data type generator + */ +export function customType( + customTypeParams: CustomTypeParams, +): Equal extends true ? ( + dbName: TName, + fieldConfig: T['config'], + ) => SingleStoreCustomColumnBuilder> + : ( + dbName: TName, + fieldConfig?: T['config'], + ) => SingleStoreCustomColumnBuilder> +{ + return ( + dbName: TName, + fieldConfig?: T['config'], + ): SingleStoreCustomColumnBuilder> => { + return new SingleStoreCustomColumnBuilder( + dbName as ConvertCustomConfig['name'], + fieldConfig, + customTypeParams, + ); + }; +} diff --git a/drizzle-orm/src/singlestore-core/columns/date.common.ts b/drizzle-orm/src/singlestore-core/columns/date.common.ts new file mode 100644 index 000000000..a02e7cc00 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/date.common.ts @@ -0,0 +1,52 @@ +import type { + ColumnBuilderBaseConfig, + ColumnBuilderExtraConfig, + ColumnDataType, + HasDefault, +} from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import { sql } from '~/sql/sql.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; +import type { DatetimeFsp } from './datetime.ts'; + +export interface SingleStoreDateColumnBaseConfig { + hasOnUpdateNow: boolean; +} + +export abstract class SingleStoreDateColumnBaseBuilder< + T extends ColumnBuilderBaseConfig, + TRuntimeConfig extends object = object, + TExtraConfig extends ColumnBuilderExtraConfig = ColumnBuilderExtraConfig, +> extends SingleStoreColumnBuilder { + static readonly [entityKind]: string = 'SingleStoreDateColumnBuilder'; + + defaultNow(fsp?: DatetimeFsp | undefined) { + return fsp + ? this.default(sql`(now(${fsp}))`) + : this.default(sql`(now())`); + } + + defaultCurrentTimestamp(fsp?: DatetimeFsp | undefined) { + return fsp + ? this.default(sql`(current_timestamp(${fsp}))`) + : this.default(sql`(current_timestamp())`); + } + + // "on update now" also adds an implicit default value to the column - https://dev.mysql.com/doc/refman/8.0/en/timestamp-initialization.html + // TODO(singlestore) + onUpdateNow(): HasDefault { + this.config.hasOnUpdateNow = true; + this.config.hasDefault = true; + return this as HasDefault; + } +} + +export abstract class SingleStoreDateBaseColumn< + T extends ColumnBaseConfig, + TRuntimeConfig extends object = object, +> extends SingleStoreColumn { + static readonly [entityKind]: string = 'SingleStoreDateColumn'; + + readonly hasOnUpdateNow: boolean = this.config.hasOnUpdateNow; +} diff --git a/drizzle-orm/src/singlestore-core/columns/date.ts b/drizzle-orm/src/singlestore-core/columns/date.ts new file mode 100644 index 000000000..1c64fe3f1 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/date.ts @@ -0,0 +1,118 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import type { Equal } from '~/utils.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +export type SingleStoreDateBuilderInitial = SingleStoreDateBuilder<{ + name: TName; + dataType: 'date'; + columnType: 'SingleStoreDate'; + data: Date; + driverParam: string | number; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreDateBuilder> + extends SingleStoreColumnBuilder +{ + static readonly [entityKind]: string = 'SingleStoreDateBuilder'; + + constructor(name: T['name']) { + super(name, 'date', 'SingleStoreDate'); + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreDate> { + return new SingleStoreDate>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreDate> extends SingleStoreColumn { + static readonly [entityKind]: string = 'SingleStoreDate'; + + constructor( + table: AnySingleStoreTable<{ name: T['tableName'] }>, + config: SingleStoreDateBuilder['config'], + ) { + super(table, config); + } + + getSQLType(): string { + return `date`; + } + + override mapFromDriverValue(value: string): Date { + return new Date(value); + } +} + +export type SingleStoreDateStringBuilderInitial = SingleStoreDateStringBuilder<{ + name: TName; + dataType: 'string'; + columnType: 'SingleStoreDateString'; + data: string; + driverParam: string | number; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreDateStringBuilder> + extends SingleStoreColumnBuilder +{ + static readonly [entityKind]: string = 'SingleStoreDateStringBuilder'; + + constructor(name: T['name']) { + super(name, 'string', 'SingleStoreDateString'); + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreDateString> { + return new SingleStoreDateString>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreDateString> + extends SingleStoreColumn +{ + static readonly [entityKind]: string = 'SingleStoreDateString'; + + constructor( + table: AnySingleStoreTable<{ name: T['tableName'] }>, + config: SingleStoreDateStringBuilder['config'], + ) { + super(table, config); + } + + getSQLType(): string { + return `date`; + } +} + +export interface SingleStoreDateConfig { + mode?: TMode; +} + +export function date( + name: TName, + config?: SingleStoreDateConfig, +): Equal extends true ? SingleStoreDateStringBuilderInitial + : SingleStoreDateBuilderInitial; +export function date(name: string, config: SingleStoreDateConfig = {}) { + if (config.mode === 'string') { + return new SingleStoreDateStringBuilder(name); + } + return new SingleStoreDateBuilder(name); +} diff --git a/drizzle-orm/src/singlestore-core/columns/datetime.ts b/drizzle-orm/src/singlestore-core/columns/datetime.ts new file mode 100644 index 000000000..0b000c030 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/datetime.ts @@ -0,0 +1,150 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { sql } from '~/sql/sql.ts'; +import type { Equal } from '~/utils.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +export type SingleStoreDateTimeBuilderInitial = SingleStoreDateTimeBuilder<{ + name: TName; + dataType: 'date'; + columnType: 'SingleStoreDateTime'; + data: Date; + driverParam: string | number; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreDateTimeBuilder> + extends SingleStoreColumnBuilder +{ + static readonly [entityKind]: string = 'SingleStoreDateTimeBuilder'; + + constructor(name: T['name'], config: SingleStoreDatetimeConfig | undefined) { + super(name, 'date', 'SingleStoreDateTime'); + this.config.fsp = config?.fsp; + } + + defaultNow(fsp?: DatetimeFsp | undefined) { + return fsp + ? this.default(sql`(now(${fsp}))`) + : this.default(sql`(now())`); + } + + defaultCurrentTimestamp(fsp?: DatetimeFsp | undefined) { + return fsp + ? this.default(sql`(current_timestamp(${fsp}))`) + : this.default(sql`(current_timestamp())`); + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreDateTime> { + return new SingleStoreDateTime>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreDateTime> + extends SingleStoreColumn +{ + static readonly [entityKind]: string = 'SingleStoreDateTime'; + + readonly fsp: number | undefined; + + constructor( + table: AnySingleStoreTable<{ name: T['tableName'] }>, + config: SingleStoreDateTimeBuilder['config'], + ) { + super(table, config); + this.fsp = config.fsp; + } + + getSQLType(): string { + const precision = this.fsp === undefined ? '' : `(${this.fsp})`; + return `datetime${precision}`; + } + + override mapToDriverValue(value: Date): unknown { + return value.toISOString().replace('T', ' ').replace('Z', ''); + } + + override mapFromDriverValue(value: string): Date { + return new Date(value.replace(' ', 'T') + 'Z'); + } +} + +export type SingleStoreDateTimeStringBuilderInitial = SingleStoreDateTimeStringBuilder<{ + name: TName; + dataType: 'string'; + columnType: 'SingleStoreDateTimeString'; + data: string; + driverParam: string | number; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreDateTimeStringBuilder> + extends SingleStoreColumnBuilder +{ + static readonly [entityKind]: string = 'SingleStoreDateTimeStringBuilder'; + + constructor(name: T['name'], config: SingleStoreDatetimeConfig | undefined) { + super(name, 'string', 'SingleStoreDateTimeString'); + this.config.fsp = config?.fsp; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreDateTimeString> { + return new SingleStoreDateTimeString>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreDateTimeString> + extends SingleStoreColumn +{ + static readonly [entityKind]: string = 'SingleStoreDateTimeString'; + + readonly fsp: number | undefined; + + constructor( + table: AnySingleStoreTable<{ name: T['tableName'] }>, + config: SingleStoreDateTimeStringBuilder['config'], + ) { + super(table, config); + this.fsp = config.fsp; + } + + getSQLType(): string { + const precision = this.fsp === undefined ? '' : `(${this.fsp})`; + return `datetime${precision}`; + } +} + +export type DatetimeFsp = 0 | 1 | 2 | 3 | 4 | 5 | 6; + +export interface SingleStoreDatetimeConfig { + mode?: TMode; + fsp?: DatetimeFsp; +} + +export function datetime( + name: TName, + config?: SingleStoreDatetimeConfig, +): Equal extends true ? SingleStoreDateTimeStringBuilderInitial + : SingleStoreDateTimeBuilderInitial; +export function datetime(name: string, config: SingleStoreDatetimeConfig = {}) { + if (config.mode === 'string') { + return new SingleStoreDateTimeStringBuilder(name, config); + } + return new SingleStoreDateTimeBuilder(name, config); +} diff --git a/drizzle-orm/src/singlestore-core/columns/decimal.ts b/drizzle-orm/src/singlestore-core/columns/decimal.ts new file mode 100644 index 000000000..e5095c4d8 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/decimal.ts @@ -0,0 +1,68 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; + +export type SingleStoreDecimalBuilderInitial = SingleStoreDecimalBuilder<{ + name: TName; + dataType: 'string'; + columnType: 'SingleStoreDecimal'; + data: string; + driverParam: string; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreDecimalBuilder< + T extends ColumnBuilderBaseConfig<'string', 'SingleStoreDecimal'>, +> extends SingleStoreColumnBuilderWithAutoIncrement { + static readonly [entityKind]: string = 'SingleStoreDecimalBuilder'; + + constructor(name: T['name'], precision?: number, scale?: number) { + super(name, 'string', 'SingleStoreDecimal'); + this.config.precision = precision; + this.config.scale = scale; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreDecimal> { + return new SingleStoreDecimal>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreDecimal> + extends SingleStoreColumnWithAutoIncrement +{ + static readonly [entityKind]: string = 'SingleStoreDecimal'; + + readonly precision: number | undefined = this.config.precision; + readonly scale: number | undefined = this.config.scale; + + getSQLType(): string { + if (this.precision !== undefined && this.scale !== undefined) { + return `decimal(${this.precision},${this.scale})`; + } else if (this.precision === undefined) { + return 'decimal'; + } else { + return `decimal(${this.precision})`; + } + } +} + +export interface SingleStoreDecimalConfig { + precision?: number; + scale?: number; +} + +export function decimal( + name: TName, + config: SingleStoreDecimalConfig = {}, +): SingleStoreDecimalBuilderInitial { + return new SingleStoreDecimalBuilder(name, config.precision, config.scale); +} diff --git a/drizzle-orm/src/singlestore-core/columns/double.ts b/drizzle-orm/src/singlestore-core/columns/double.ts new file mode 100644 index 000000000..6ca945431 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/double.ts @@ -0,0 +1,68 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; + +export type SingleStoreDoubleBuilderInitial = SingleStoreDoubleBuilder<{ + name: TName; + dataType: 'number'; + columnType: 'SingleStoreDouble'; + data: number; + driverParam: number | string; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreDoubleBuilder> + extends SingleStoreColumnBuilderWithAutoIncrement +{ + static readonly [entityKind]: string = 'SingleStoreDoubleBuilder'; + + constructor(name: T['name'], config: SingleStoreDoubleConfig | undefined) { + super(name, 'number', 'SingleStoreDouble'); + this.config.precision = config?.precision; + this.config.scale = config?.scale; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreDouble> { + return new SingleStoreDouble>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreDouble> + extends SingleStoreColumnWithAutoIncrement +{ + static readonly [entityKind]: string = 'SingleStoreDouble'; + + precision: number | undefined = this.config.precision; + scale: number | undefined = this.config.scale; + + getSQLType(): string { + if (this.precision !== undefined && this.scale !== undefined) { + return `double(${this.precision},${this.scale})`; + } else if (this.precision === undefined) { + return 'double'; + } else { + return `double(${this.precision})`; + } + } +} + +export interface SingleStoreDoubleConfig { + precision?: number; + scale?: number; +} + +export function double( + name: TName, + config?: SingleStoreDoubleConfig, +): SingleStoreDoubleBuilderInitial { + return new SingleStoreDoubleBuilder(name, config); +} diff --git a/drizzle-orm/src/singlestore-core/columns/enum.ts b/drizzle-orm/src/singlestore-core/columns/enum.ts new file mode 100644 index 000000000..af04c50a5 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/enum.ts @@ -0,0 +1,61 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import type { Writable } from '~/utils.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +export type SingleStoreEnumColumnBuilderInitial = + SingleStoreEnumColumnBuilder<{ + name: TName; + dataType: 'string'; + columnType: 'SingleStoreEnumColumn'; + data: TEnum[number]; + driverParam: string; + enumValues: TEnum; + generated: undefined; + }>; + +export class SingleStoreEnumColumnBuilder> + extends SingleStoreColumnBuilder +{ + static readonly [entityKind]: string = 'SingleStoreEnumColumnBuilder'; + + constructor(name: T['name'], values: T['enumValues']) { + super(name, 'string', 'SingleStoreEnumColumn'); + this.config.enumValues = values; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreEnumColumn & { enumValues: T['enumValues'] }> { + return new SingleStoreEnumColumn & { enumValues: T['enumValues'] }>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreEnumColumn> + extends SingleStoreColumn +{ + static readonly [entityKind]: string = 'SingleStoreEnumColumn'; + + override readonly enumValues = this.config.enumValues; + + getSQLType(): string { + return `enum(${this.enumValues!.map((value) => `'${value}'`).join(',')})`; + } +} + +export function singlestoreEnum>( + name: TName, + values: T | Writable, +): SingleStoreEnumColumnBuilderInitial> { + if (values.length === 0) { + throw new Error(`You have an empty array for "${name}" enum values`); + } + + return new SingleStoreEnumColumnBuilder(name, values); +} diff --git a/drizzle-orm/src/singlestore-core/columns/float.ts b/drizzle-orm/src/singlestore-core/columns/float.ts new file mode 100644 index 000000000..a54ee06a1 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/float.ts @@ -0,0 +1,49 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; + +export type SingleStoreFloatBuilderInitial = SingleStoreFloatBuilder<{ + name: TName; + dataType: 'number'; + columnType: 'SingleStoreFloat'; + data: number; + driverParam: number | string; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreFloatBuilder> + extends SingleStoreColumnBuilderWithAutoIncrement +{ + static readonly [entityKind]: string = 'SingleStoreFloatBuilder'; + + constructor(name: T['name']) { + super(name, 'number', 'SingleStoreFloat'); + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreFloat> { + return new SingleStoreFloat>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreFloat> + extends SingleStoreColumnWithAutoIncrement +{ + static readonly [entityKind]: string = 'SingleStoreFloat'; + + getSQLType(): string { + return 'float'; + } +} + +export function float(name: TName): SingleStoreFloatBuilderInitial { + return new SingleStoreFloatBuilder(name); +} diff --git a/drizzle-orm/src/singlestore-core/columns/index.ts b/drizzle-orm/src/singlestore-core/columns/index.ts new file mode 100644 index 000000000..b51f0fac4 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/index.ts @@ -0,0 +1,25 @@ +export * from './bigint.ts'; +export * from './binary.ts'; +export * from './boolean.ts'; +export * from './char.ts'; +export * from './common.ts'; +export * from './custom.ts'; +export * from './date.ts'; +export * from './datetime.ts'; +export * from './decimal.ts'; +export * from './double.ts'; +export * from './enum.ts'; +export * from './float.ts'; +export * from './int.ts'; +export * from './json.ts'; +export * from './mediumint.ts'; +export * from './real.ts'; +export * from './serial.ts'; +export * from './smallint.ts'; +export * from './text.ts'; +export * from './time.ts'; +export * from './timestamp.ts'; +export * from './tinyint.ts'; +export * from './varbinary.ts'; +export * from './varchar.ts'; +export * from './year.ts'; diff --git a/drizzle-orm/src/singlestore-core/columns/int.ts b/drizzle-orm/src/singlestore-core/columns/int.ts new file mode 100644 index 000000000..e9ca0a682 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/int.ts @@ -0,0 +1,64 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; + +export type SingleStoreIntBuilderInitial = SingleStoreIntBuilder<{ + name: TName; + dataType: 'number'; + columnType: 'SingleStoreInt'; + data: number; + driverParam: number | string; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreIntBuilder> + extends SingleStoreColumnBuilderWithAutoIncrement +{ + static readonly [entityKind]: string = 'SingleStoreIntBuilder'; + + constructor(name: T['name'], config?: SingleStoreIntConfig) { + super(name, 'number', 'SingleStoreInt'); + this.config.unsigned = config ? config.unsigned : false; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreInt> { + return new SingleStoreInt>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreInt> + extends SingleStoreColumnWithAutoIncrement +{ + static readonly [entityKind]: string = 'SingleStoreInt'; + + getSQLType(): string { + return `int${this.config.unsigned ? ' unsigned' : ''}`; + } + + override mapFromDriverValue(value: number | string): number { + if (typeof value === 'string') { + return Number(value); + } + return value; + } +} + +export interface SingleStoreIntConfig { + unsigned?: boolean; +} + +export function int( + name: TName, + config?: SingleStoreIntConfig, +): SingleStoreIntBuilderInitial { + return new SingleStoreIntBuilder(name, config); +} diff --git a/drizzle-orm/src/singlestore-core/columns/json.ts b/drizzle-orm/src/singlestore-core/columns/json.ts new file mode 100644 index 000000000..0b069f256 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/json.ts @@ -0,0 +1,51 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +export type SingleStoreJsonBuilderInitial = SingleStoreJsonBuilder<{ + name: TName; + dataType: 'json'; + columnType: 'SingleStoreJson'; + data: unknown; + driverParam: string; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreJsonBuilder> + extends SingleStoreColumnBuilder +{ + static readonly [entityKind]: string = 'SingleStoreJsonBuilder'; + + constructor(name: T['name']) { + super(name, 'json', 'SingleStoreJson'); + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreJson> { + return new SingleStoreJson>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreJson> extends SingleStoreColumn { + static readonly [entityKind]: string = 'SingleStoreJson'; + + getSQLType(): string { + return 'json'; + } + + override mapToDriverValue(value: T['data']): string { + return JSON.stringify(value); + } +} + +export function json(name: TName): SingleStoreJsonBuilderInitial { + return new SingleStoreJsonBuilder(name); +} diff --git a/drizzle-orm/src/singlestore-core/columns/mediumint.ts b/drizzle-orm/src/singlestore-core/columns/mediumint.ts new file mode 100644 index 000000000..b963ee6e5 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/mediumint.ts @@ -0,0 +1,61 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; +import type { SingleStoreIntConfig } from './int.ts'; + +export type SingleStoreMediumIntBuilderInitial = SingleStoreMediumIntBuilder<{ + name: TName; + dataType: 'number'; + columnType: 'SingleStoreMediumInt'; + data: number; + driverParam: number | string; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreMediumIntBuilder> + extends SingleStoreColumnBuilderWithAutoIncrement +{ + static readonly [entityKind]: string = 'SingleStoreMediumIntBuilder'; + + constructor(name: T['name'], config?: SingleStoreIntConfig) { + super(name, 'number', 'SingleStoreMediumInt'); + this.config.unsigned = config ? config.unsigned : false; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreMediumInt> { + return new SingleStoreMediumInt>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreMediumInt> + extends SingleStoreColumnWithAutoIncrement +{ + static readonly [entityKind]: string = 'SingleStoreMediumInt'; + + getSQLType(): string { + return `mediumint${this.config.unsigned ? ' unsigned' : ''}`; + } + + override mapFromDriverValue(value: number | string): number { + if (typeof value === 'string') { + return Number(value); + } + return value; + } +} + +export function mediumint( + name: TName, + config?: SingleStoreIntConfig, +): SingleStoreMediumIntBuilderInitial { + return new SingleStoreMediumIntBuilder(name, config); +} diff --git a/drizzle-orm/src/singlestore-core/columns/real.ts b/drizzle-orm/src/singlestore-core/columns/real.ts new file mode 100644 index 000000000..cc66f6c56 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/real.ts @@ -0,0 +1,74 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; + +export type SingleStoreRealBuilderInitial = SingleStoreRealBuilder<{ + name: TName; + dataType: 'number'; + columnType: 'SingleStoreReal'; + data: number; + driverParam: number | string; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreRealBuilder> + extends SingleStoreColumnBuilderWithAutoIncrement< + T, + SingleStoreRealConfig + > +{ + static readonly [entityKind]: string = 'SingleStoreRealBuilder'; + + constructor(name: T['name'], config: SingleStoreRealConfig | undefined) { + super(name, 'number', 'SingleStoreReal'); + this.config.precision = config?.precision; + this.config.scale = config?.scale; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreReal> { + return new SingleStoreReal>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreReal> + extends SingleStoreColumnWithAutoIncrement< + T, + SingleStoreRealConfig + > +{ + static readonly [entityKind]: string = 'SingleStoreReal'; + + precision: number | undefined = this.config.precision; + scale: number | undefined = this.config.scale; + + getSQLType(): string { + if (this.precision !== undefined && this.scale !== undefined) { + return `real(${this.precision}, ${this.scale})`; + } else if (this.precision === undefined) { + return 'real'; + } else { + return `real(${this.precision})`; + } + } +} + +export interface SingleStoreRealConfig { + precision?: number; + scale?: number; +} + +export function real( + name: TName, + config: SingleStoreRealConfig = {}, +): SingleStoreRealBuilderInitial { + return new SingleStoreRealBuilder(name, config); +} diff --git a/drizzle-orm/src/singlestore-core/columns/serial.ts b/drizzle-orm/src/singlestore-core/columns/serial.ts new file mode 100644 index 000000000..30fb7a40e --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/serial.ts @@ -0,0 +1,74 @@ +import type { + ColumnBuilderBaseConfig, + ColumnBuilderRuntimeConfig, + HasDefault, + IsAutoincrement, + IsPrimaryKey, + MakeColumnConfig, + NotNull, +} from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; + +export type SingleStoreSerialBuilderInitial = IsAutoincrement< + IsPrimaryKey< + NotNull< + HasDefault< + SingleStoreSerialBuilder<{ + name: TName; + dataType: 'number'; + columnType: 'SingleStoreSerial'; + data: number; + driverParam: number; + enumValues: undefined; + generated: undefined; + }> + > + > + > +>; + +export class SingleStoreSerialBuilder> + extends SingleStoreColumnBuilderWithAutoIncrement +{ + static readonly [entityKind]: string = 'SingleStoreSerialBuilder'; + + constructor(name: T['name']) { + super(name, 'number', 'SingleStoreSerial'); + this.config.hasDefault = true; + this.config.autoIncrement = true; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreSerial> { + return new SingleStoreSerial>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreSerial< + T extends ColumnBaseConfig<'number', 'SingleStoreSerial'>, +> extends SingleStoreColumnWithAutoIncrement { + static readonly [entityKind]: string = 'SingleStoreSerial'; + + getSQLType(): string { + return 'serial'; + } + + override mapFromDriverValue(value: number | string): number { + if (typeof value === 'string') { + return Number(value); + } + return value; + } +} + +export function serial(name: TName): SingleStoreSerialBuilderInitial { + return new SingleStoreSerialBuilder(name) as SingleStoreSerialBuilderInitial; +} diff --git a/drizzle-orm/src/singlestore-core/columns/smallint.ts b/drizzle-orm/src/singlestore-core/columns/smallint.ts new file mode 100644 index 000000000..02e172608 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/smallint.ts @@ -0,0 +1,61 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; +import type { SingleStoreIntConfig } from './int.ts'; + +export type SingleStoreSmallIntBuilderInitial = SingleStoreSmallIntBuilder<{ + name: TName; + dataType: 'number'; + columnType: 'SingleStoreSmallInt'; + data: number; + driverParam: number | string; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreSmallIntBuilder> + extends SingleStoreColumnBuilderWithAutoIncrement +{ + static readonly [entityKind]: string = 'SingleStoreSmallIntBuilder'; + + constructor(name: T['name'], config?: SingleStoreIntConfig) { + super(name, 'number', 'SingleStoreSmallInt'); + this.config.unsigned = config ? config.unsigned : false; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreSmallInt> { + return new SingleStoreSmallInt>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreSmallInt> + extends SingleStoreColumnWithAutoIncrement +{ + static readonly [entityKind]: string = 'SingleStoreSmallInt'; + + getSQLType(): string { + return `smallint${this.config.unsigned ? ' unsigned' : ''}`; + } + + override mapFromDriverValue(value: number | string): number { + if (typeof value === 'string') { + return Number(value); + } + return value; + } +} + +export function smallint( + name: TName, + config?: SingleStoreIntConfig, +): SingleStoreSmallIntBuilderInitial { + return new SingleStoreSmallIntBuilder(name, config); +} diff --git a/drizzle-orm/src/singlestore-core/columns/text.ts b/drizzle-orm/src/singlestore-core/columns/text.ts new file mode 100644 index 000000000..8c40039f0 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/text.ts @@ -0,0 +1,90 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import type { Writable } from '~/utils.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +export type SingleStoreTextColumnType = 'tinytext' | 'text' | 'mediumtext' | 'longtext'; + +export type SingleStoreTextBuilderInitial = + SingleStoreTextBuilder<{ + name: TName; + dataType: 'string'; + columnType: 'SingleStoreText'; + data: TEnum[number]; + driverParam: string; + enumValues: TEnum; + generated: undefined; + }>; + +export class SingleStoreTextBuilder> + extends SingleStoreColumnBuilder< + T, + { textType: SingleStoreTextColumnType; enumValues: T['enumValues'] } + > +{ + static readonly [entityKind]: string = 'SingleStoreTextBuilder'; + + constructor(name: T['name'], textType: SingleStoreTextColumnType, config: SingleStoreTextConfig) { + super(name, 'string', 'SingleStoreText'); + this.config.textType = textType; + this.config.enumValues = config.enum; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreText> { + return new SingleStoreText>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreText> + extends SingleStoreColumn +{ + static readonly [entityKind]: string = 'SingleStoreText'; + + private textType: SingleStoreTextColumnType = this.config.textType; + + override readonly enumValues = this.config.enumValues; + + getSQLType(): string { + return this.textType; + } +} + +export interface SingleStoreTextConfig { + enum?: TEnum; +} + +export function text>( + name: TName, + config: SingleStoreTextConfig> = {}, +): SingleStoreTextBuilderInitial> { + return new SingleStoreTextBuilder(name, 'text', config); +} + +export function tinytext>( + name: TName, + config: SingleStoreTextConfig> = {}, +): SingleStoreTextBuilderInitial> { + return new SingleStoreTextBuilder(name, 'tinytext', config); +} + +export function mediumtext>( + name: TName, + config: SingleStoreTextConfig> = {}, +): SingleStoreTextBuilderInitial> { + return new SingleStoreTextBuilder(name, 'mediumtext', config); +} + +export function longtext>( + name: TName, + config: SingleStoreTextConfig> = {}, +): SingleStoreTextBuilderInitial> { + return new SingleStoreTextBuilder(name, 'longtext', config); +} diff --git a/drizzle-orm/src/singlestore-core/columns/time.ts b/drizzle-orm/src/singlestore-core/columns/time.ts new file mode 100644 index 000000000..a5259adbb --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/time.ts @@ -0,0 +1,63 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +export type SingleStoreTimeBuilderInitial = SingleStoreTimeBuilder<{ + name: TName; + dataType: 'string'; + columnType: 'SingleStoreTime'; + data: string; + driverParam: string | number; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreTimeBuilder> + extends SingleStoreColumnBuilder< + T, + TimeConfig + > +{ + static readonly [entityKind]: string = 'SingleStoreTimeBuilder'; + + constructor( + name: T['name'], + config: TimeConfig | undefined, + ) { + super(name, 'string', 'SingleStoreTime'); + this.config.fsp = config?.fsp; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreTime> { + return new SingleStoreTime>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreTime< + T extends ColumnBaseConfig<'string', 'SingleStoreTime'>, +> extends SingleStoreColumn { + static readonly [entityKind]: string = 'SingleStoreTime'; + + readonly fsp: number | undefined = this.config.fsp; + + getSQLType(): string { + const precision = this.fsp === undefined ? '' : `(${this.fsp})`; + return `time${precision}`; + } +} + +export type TimeConfig = { + fsp?: 0 | 1 | 2 | 3 | 4 | 5 | 6; +}; + +export function time(name: TName, config?: TimeConfig): SingleStoreTimeBuilderInitial { + return new SingleStoreTimeBuilder(name, config); +} diff --git a/drizzle-orm/src/singlestore-core/columns/timestamp.ts b/drizzle-orm/src/singlestore-core/columns/timestamp.ts new file mode 100644 index 000000000..db770b6c7 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/timestamp.ts @@ -0,0 +1,121 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import type { Equal } from '~/utils.ts'; +import { SingleStoreDateBaseColumn, SingleStoreDateColumnBaseBuilder } from './date.common.ts'; + +export type SingleStoreTimestampBuilderInitial = SingleStoreTimestampBuilder<{ + name: TName; + dataType: 'date'; + columnType: 'SingleStoreTimestamp'; + data: Date; + driverParam: string | number; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreTimestampBuilder> + extends SingleStoreDateColumnBaseBuilder +{ + static readonly [entityKind]: string = 'SingleStoreTimestampBuilder'; + + constructor(name: T['name'], config: SingleStoreTimestampConfig | undefined) { + super(name, 'date', 'SingleStoreTimestamp'); + this.config.fsp = config?.fsp; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreTimestamp> { + return new SingleStoreTimestamp>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreTimestamp> + extends SingleStoreDateBaseColumn +{ + static readonly [entityKind]: string = 'SingleStoreTimestamp'; + + readonly fsp: number | undefined = this.config.fsp; + + getSQLType(): string { + const precision = this.fsp === undefined ? '' : `(${this.fsp})`; + return `timestamp${precision}`; + } + + override mapFromDriverValue(value: string): Date { + return new Date(value + '+0000'); + } + + override mapToDriverValue(value: Date): string { + return value.toISOString().slice(0, -1).replace('T', ' '); + } +} + +export type SingleStoreTimestampStringBuilderInitial = SingleStoreTimestampStringBuilder<{ + name: TName; + dataType: 'string'; + columnType: 'SingleStoreTimestampString'; + data: string; + driverParam: string | number; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreTimestampStringBuilder< + T extends ColumnBuilderBaseConfig<'string', 'SingleStoreTimestampString'>, +> extends SingleStoreDateColumnBaseBuilder { + static readonly [entityKind]: string = 'SingleStoreTimestampStringBuilder'; + + constructor(name: T['name'], config: SingleStoreTimestampConfig | undefined) { + super(name, 'string', 'SingleStoreTimestampString'); + this.config.fsp = config?.fsp; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreTimestampString> { + return new SingleStoreTimestampString>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreTimestampString> + extends SingleStoreDateBaseColumn +{ + static readonly [entityKind]: string = 'SingleStoreTimestampString'; + + readonly fsp: number | undefined = this.config.fsp; + + getSQLType(): string { + const precision = this.fsp === undefined ? '' : `(${this.fsp})`; + return `timestamp${precision}`; + } +} + +export type TimestampFsp = 0 | 1 | 2 | 3 | 4 | 5 | 6; + +export interface SingleStoreTimestampConfig { + mode?: TMode; + fsp?: TimestampFsp; +} + +export function timestamp( + name: TName, + config?: SingleStoreTimestampConfig, +): Equal extends true ? SingleStoreTimestampStringBuilderInitial + : SingleStoreTimestampBuilderInitial; +export function timestamp(name: string, config: SingleStoreTimestampConfig = {}) { + if (config.mode === 'string') { + return new SingleStoreTimestampStringBuilder(name, config); + } + return new SingleStoreTimestampBuilder(name, config); +} diff --git a/drizzle-orm/src/singlestore-core/columns/tinyint.ts b/drizzle-orm/src/singlestore-core/columns/tinyint.ts new file mode 100644 index 000000000..d911cae96 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/tinyint.ts @@ -0,0 +1,61 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; +import type { SingleStoreIntConfig } from './int.ts'; + +export type SingleStoreTinyIntBuilderInitial = SingleStoreTinyIntBuilder<{ + name: TName; + dataType: 'number'; + columnType: 'SingleStoreTinyInt'; + data: number; + driverParam: number | string; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreTinyIntBuilder> + extends SingleStoreColumnBuilderWithAutoIncrement +{ + static readonly [entityKind]: string = 'SingleStoreTinyIntBuilder'; + + constructor(name: T['name'], config?: SingleStoreIntConfig) { + super(name, 'number', 'SingleStoreTinyInt'); + this.config.unsigned = config ? config.unsigned : false; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreTinyInt> { + return new SingleStoreTinyInt>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreTinyInt> + extends SingleStoreColumnWithAutoIncrement +{ + static readonly [entityKind]: string = 'SingleStoreTinyInt'; + + getSQLType(): string { + return `tinyint${this.config.unsigned ? ' unsigned' : ''}`; + } + + override mapFromDriverValue(value: number | string): number { + if (typeof value === 'string') { + return Number(value); + } + return value; + } +} + +export function tinyint( + name: TName, + config?: SingleStoreIntConfig, +): SingleStoreTinyIntBuilderInitial { + return new SingleStoreTinyIntBuilder(name, config); +} diff --git a/drizzle-orm/src/singlestore-core/columns/varbinary.ts b/drizzle-orm/src/singlestore-core/columns/varbinary.ts new file mode 100644 index 000000000..545b87476 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/varbinary.ts @@ -0,0 +1,60 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +export type SingleStoreVarBinaryBuilderInitial = SingleStoreVarBinaryBuilder<{ + name: TName; + dataType: 'string'; + columnType: 'SingleStoreVarBinary'; + data: string; + driverParam: string; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreVarBinaryBuilder> + extends SingleStoreColumnBuilder +{ + static readonly [entityKind]: string = 'SingleStoreVarBinaryBuilder'; + + /** @internal */ + constructor(name: T['name'], config: SingleStoreVarbinaryOptions) { + super(name, 'string', 'SingleStoreVarBinary'); + this.config.length = config?.length; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreVarBinary> { + return new SingleStoreVarBinary>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreVarBinary< + T extends ColumnBaseConfig<'string', 'SingleStoreVarBinary'>, +> extends SingleStoreColumn { + static readonly [entityKind]: string = 'SingleStoreVarBinary'; + + length: number | undefined = this.config.length; + + getSQLType(): string { + return this.length === undefined ? `varbinary` : `varbinary(${this.length})`; + } +} + +export interface SingleStoreVarbinaryOptions { + length: number; +} + +export function varbinary( + name: TName, + options: SingleStoreVarbinaryOptions, +): SingleStoreVarBinaryBuilderInitial { + return new SingleStoreVarBinaryBuilder(name, options); +} diff --git a/drizzle-orm/src/singlestore-core/columns/varchar.ts b/drizzle-orm/src/singlestore-core/columns/varchar.ts new file mode 100644 index 000000000..415b3c27b --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/varchar.ts @@ -0,0 +1,68 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import type { Writable } from '~/utils.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +export type SingleStoreVarCharBuilderInitial = + SingleStoreVarCharBuilder< + { + name: TName; + dataType: 'string'; + columnType: 'SingleStoreVarChar'; + data: TEnum[number]; + driverParam: number | string; + enumValues: TEnum; + generated: undefined; + } + >; + +export class SingleStoreVarCharBuilder> + extends SingleStoreColumnBuilder> +{ + static readonly [entityKind]: string = 'SingleStoreVarCharBuilder'; + + /** @internal */ + constructor(name: T['name'], config: SingleStoreVarCharConfig) { + super(name, 'string', 'SingleStoreVarChar'); + this.config.length = config.length; + this.config.enum = config.enum; + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreVarChar & { enumValues: T['enumValues'] }> { + return new SingleStoreVarChar & { enumValues: T['enumValues'] }>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreVarChar> + extends SingleStoreColumn> +{ + static readonly [entityKind]: string = 'SingleStoreVarChar'; + + readonly length: number | undefined = this.config.length; + + override readonly enumValues = this.config.enum; + + getSQLType(): string { + return this.length === undefined ? `varchar` : `varchar(${this.length})`; + } +} + +export interface SingleStoreVarCharConfig { + length: number; + enum?: TEnum; +} + +export function varchar>( + name: TName, + config: SingleStoreVarCharConfig>, +): SingleStoreVarCharBuilderInitial> { + return new SingleStoreVarCharBuilder(name, config); +} diff --git a/drizzle-orm/src/singlestore-core/columns/year.ts b/drizzle-orm/src/singlestore-core/columns/year.ts new file mode 100644 index 000000000..c774b44d8 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/columns/year.ts @@ -0,0 +1,49 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; +import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; + +export type SingleStoreYearBuilderInitial = SingleStoreYearBuilder<{ + name: TName; + dataType: 'number'; + columnType: 'SingleStoreYear'; + data: number; + driverParam: number; + enumValues: undefined; + generated: undefined; +}>; + +export class SingleStoreYearBuilder> + extends SingleStoreColumnBuilder +{ + static readonly [entityKind]: string = 'SingleStoreYearBuilder'; + + constructor(name: T['name']) { + super(name, 'number', 'SingleStoreYear'); + } + + /** @internal */ + override build( + table: AnySingleStoreTable<{ name: TTableName }>, + ): SingleStoreYear> { + return new SingleStoreYear>( + table, + this.config as ColumnBuilderRuntimeConfig, + ); + } +} + +export class SingleStoreYear< + T extends ColumnBaseConfig<'number', 'SingleStoreYear'>, +> extends SingleStoreColumn { + static readonly [entityKind]: string = 'SingleStoreYear'; + + getSQLType(): string { + return `year`; + } +} + +export function year(name: TName): SingleStoreYearBuilderInitial { + return new SingleStoreYearBuilder(name); +} diff --git a/drizzle-orm/src/singlestore-core/db.ts b/drizzle-orm/src/singlestore-core/db.ts new file mode 100644 index 000000000..581b3fa1b --- /dev/null +++ b/drizzle-orm/src/singlestore-core/db.ts @@ -0,0 +1,566 @@ +import type { ResultSetHeader } from 'mysql2/promise'; +import { entityKind } from '~/entity.ts'; +import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; +import type { ExtractTablesWithRelations, RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; +import { SelectionProxyHandler } from '~/selection-proxy.ts'; +import type { ColumnsSelection, SQLWrapper } from '~/sql/sql.ts'; +import { WithSubquery } from '~/subquery.ts'; +import type { DrizzleTypeError } from '~/utils.ts'; +import type { SingleStoreDialect } from './dialect.ts'; +import { SingleStoreAttachBase } from './query-builders/attach.ts'; +import { SingleStoreBranchBase } from './query-builders/branch.ts'; +import { SingleStoreCreateMilestoneBase } from './query-builders/createMilestone.ts'; +import { SingleStoreDetachBase } from './query-builders/detach.ts'; +import { SingleStoreDropMilestoneBase } from './query-builders/dropMilestone.ts'; +import { + QueryBuilder, + SingleStoreDeleteBase, + SingleStoreInsertBuilder, + SingleStoreSelectBuilder, + SingleStoreUpdateBuilder, +} from './query-builders/index.ts'; +import { SingleStoreOptimizeTableBase } from './query-builders/optimizeTable.ts'; +import type { OptimizeTableArgument } from './query-builders/optimizeTable.types.ts'; +import { RelationalQueryBuilder } from './query-builders/query.ts'; +import type { SelectedFields } from './query-builders/select.types.ts'; +import type { + PreparedQueryHKTBase, + SingleStoreQueryResultHKT, + SingleStoreQueryResultKind, + SingleStoreSession, + SingleStoreTransaction, + SingleStoreTransactionConfig, +} from './session.ts'; +import type { WithSubqueryWithSelection } from './subquery.ts'; +import type { SingleStoreTable } from './table.ts'; + +export class SingleStoreDatabase< + TQueryResult extends SingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TFullSchema extends Record = {}, + TSchema extends TablesRelationalConfig = ExtractTablesWithRelations, +> { + static readonly [entityKind]: string = 'SingleStoreDatabase'; + + declare readonly _: { + readonly schema: TSchema | undefined; + readonly fullSchema: TFullSchema; + readonly tableNamesMap: Record; + }; + + query: TFullSchema extends Record + ? DrizzleTypeError<'Seems like the schema generic is missing - did you forget to add it to your DB type?'> + : { + [K in keyof TSchema]: RelationalQueryBuilder; + }; + + constructor( + /** @internal */ + readonly dialect: SingleStoreDialect, + /** @internal */ + readonly session: SingleStoreSession, + schema: RelationalSchemaConfig | undefined, + ) { + this._ = schema + ? { + schema: schema.schema, + fullSchema: schema.fullSchema as TFullSchema, + tableNamesMap: schema.tableNamesMap, + } + : { + schema: undefined, + fullSchema: {} as TFullSchema, + tableNamesMap: {}, + }; + this.query = {} as typeof this['query']; + if (this._.schema) { + for (const [tableName, columns] of Object.entries(this._.schema)) { + (this.query as SingleStoreDatabase>['query'])[tableName] = + new RelationalQueryBuilder( + schema!.fullSchema, + this._.schema, + this._.tableNamesMap, + schema!.fullSchema[tableName] as SingleStoreTable, + columns, + dialect, + session, + ); + } + } + } + + /** + * Creates a subquery that defines a temporary named result set as a CTE. + * + * It is useful for breaking down complex queries into simpler parts and for reusing the result set in subsequent parts of the query. + * + * See docs: {@link https://orm.drizzle.team/docs/select#with-clause} + * + * @param alias The alias for the subquery. + * + * Failure to provide an alias will result in a DrizzleTypeError, preventing the subquery from being referenced in other queries. + * + * @example + * + * ```ts + * // Create a subquery with alias 'sq' and use it in the select query + * const sq = db.$with('sq').as(db.select().from(users).where(eq(users.id, 42))); + * + * const result = await db.with(sq).select().from(sq); + * ``` + * + * To select arbitrary SQL values as fields in a CTE and reference them in other CTEs or in the main query, you need to add aliases to them: + * + * ```ts + * // Select an arbitrary SQL value as a field in a CTE and reference it in the main query + * const sq = db.$with('sq').as(db.select({ + * name: sql`upper(${users.name})`.as('name'), + * }) + * .from(users)); + * + * const result = await db.with(sq).select({ name: sq.name }).from(sq); + * ``` + */ + $with(alias: TAlias) { + return { + as( + qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), + ): WithSubqueryWithSelection { + if (typeof qb === 'function') { + qb = qb(new QueryBuilder()); + } + + return new Proxy( + new WithSubquery(qb.getSQL(), qb.getSelectedFields() as SelectedFields, alias, true), + new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), + ) as WithSubqueryWithSelection; + }, + }; + } + + /** + * Incorporates a previously defined CTE (using `$with`) into the main query. + * + * This method allows the main query to reference a temporary named result set. + * + * See docs: {@link https://orm.drizzle.team/docs/select#with-clause} + * + * @param queries The CTEs to incorporate into the main query. + * + * @example + * + * ```ts + * // Define a subquery 'sq' as a CTE using $with + * const sq = db.$with('sq').as(db.select().from(users).where(eq(users.id, 42))); + * + * // Incorporate the CTE 'sq' into the main query and select from it + * const result = await db.with(sq).select().from(sq); + * ``` + */ + with(...queries: WithSubquery[]) { + const self = this; + + /** + * Creates a select query. + * + * Calling this method with no arguments will select all columns from the table. Pass a selection object to specify the columns you want to select. + * + * Use `.from()` method to specify which table to select from. + * + * See docs: {@link https://orm.drizzle.team/docs/select} + * + * @param fields The selection object. + * + * @example + * + * ```ts + * // Select all columns and all rows from the 'cars' table + * const allCars: Car[] = await db.select().from(cars); + * + * // Select specific columns and all rows from the 'cars' table + * const carsIdsAndBrands: { id: number; brand: string }[] = await db.select({ + * id: cars.id, + * brand: cars.brand + * }) + * .from(cars); + * ``` + * + * Like in SQL, you can use arbitrary expressions as selection fields, not just table columns: + * + * ```ts + * // Select specific columns along with expression and all rows from the 'cars' table + * const carsIdsAndLowerNames: { id: number; lowerBrand: string }[] = await db.select({ + * id: cars.id, + * lowerBrand: sql`lower(${cars.brand})`, + * }) + * .from(cars); + * ``` + */ + function select(): SingleStoreSelectBuilder; + function select( + fields: TSelection, + ): SingleStoreSelectBuilder; + function select(fields?: SelectedFields): SingleStoreSelectBuilder { + return new SingleStoreSelectBuilder({ + fields: fields ?? undefined, + session: self.session, + dialect: self.dialect, + withList: queries, + }); + } + + /** + * Adds `distinct` expression to the select query. + * + * Calling this method will return only unique values. When multiple columns are selected, it returns rows with unique combinations of values in these columns. + * + * Use `.from()` method to specify which table to select from. + * + * See docs: {@link https://orm.drizzle.team/docs/select#distinct} + * + * @param fields The selection object. + * + * @example + * ```ts + * // Select all unique rows from the 'cars' table + * await db.selectDistinct() + * .from(cars) + * .orderBy(cars.id, cars.brand, cars.color); + * + * // Select all unique brands from the 'cars' table + * await db.selectDistinct({ brand: cars.brand }) + * .from(cars) + * .orderBy(cars.brand); + * ``` + */ + function selectDistinct(): SingleStoreSelectBuilder; + function selectDistinct( + fields: TSelection, + ): SingleStoreSelectBuilder; + function selectDistinct( + fields?: SelectedFields, + ): SingleStoreSelectBuilder { + return new SingleStoreSelectBuilder({ + fields: fields ?? undefined, + session: self.session, + dialect: self.dialect, + withList: queries, + distinct: true, + }); + } + + /** + * Creates an update query. + * + * Calling this method without `.where()` clause will update all rows in a table. The `.where()` clause specifies which rows should be updated. + * + * Use `.set()` method to specify which values to update. + * + * See docs: {@link https://orm.drizzle.team/docs/update} + * + * @param table The table to update. + * + * @example + * + * ```ts + * // Update all rows in the 'cars' table + * await db.update(cars).set({ color: 'red' }); + * + * // Update rows with filters and conditions + * await db.update(cars).set({ color: 'red' }).where(eq(cars.brand, 'BMW')); + * ``` + */ + function update( + table: TTable, + ): SingleStoreUpdateBuilder { + return new SingleStoreUpdateBuilder(table, self.session, self.dialect, queries); + } + + /** + * Creates a delete query. + * + * Calling this method without `.where()` clause will delete all rows in a table. The `.where()` clause specifies which rows should be deleted. + * + * See docs: {@link https://orm.drizzle.team/docs/delete} + * + * @param table The table to delete from. + * + * @example + * + * ```ts + * // Delete all rows in the 'cars' table + * await db.delete(cars); + * + * // Delete rows with filters and conditions + * await db.delete(cars).where(eq(cars.color, 'green')); + * ``` + */ + function delete_( + table: TTable, + ): SingleStoreDeleteBase { + return new SingleStoreDeleteBase(table, self.session, self.dialect, queries); + } + + return { select, selectDistinct, update, delete: delete_ }; + } + + /** + * Creates a select query. + * + * Calling this method with no arguments will select all columns from the table. Pass a selection object to specify the columns you want to select. + * + * Use `.from()` method to specify which table to select from. + * + * See docs: {@link https://orm.drizzle.team/docs/select} + * + * @param fields The selection object. + * + * @example + * + * ```ts + * // Select all columns and all rows from the 'cars' table + * const allCars: Car[] = await db.select().from(cars); + * + * // Select specific columns and all rows from the 'cars' table + * const carsIdsAndBrands: { id: number; brand: string }[] = await db.select({ + * id: cars.id, + * brand: cars.brand + * }) + * .from(cars); + * ``` + * + * Like in SQL, you can use arbitrary expressions as selection fields, not just table columns: + * + * ```ts + * // Select specific columns along with expression and all rows from the 'cars' table + * const carsIdsAndLowerNames: { id: number; lowerBrand: string }[] = await db.select({ + * id: cars.id, + * lowerBrand: sql`lower(${cars.brand})`, + * }) + * .from(cars); + * ``` + */ + select(): SingleStoreSelectBuilder; + select( + fields: TSelection, + ): SingleStoreSelectBuilder; + select(fields?: SelectedFields): SingleStoreSelectBuilder { + return new SingleStoreSelectBuilder({ fields: fields ?? undefined, session: this.session, dialect: this.dialect }); + } + + /** + * Adds `distinct` expression to the select query. + * + * Calling this method will return only unique values. When multiple columns are selected, it returns rows with unique combinations of values in these columns. + * + * Use `.from()` method to specify which table to select from. + * + * See docs: {@link https://orm.drizzle.team/docs/select#distinct} + * + * @param fields The selection object. + * + * @example + * ```ts + * // Select all unique rows from the 'cars' table + * await db.selectDistinct() + * .from(cars) + * .orderBy(cars.id, cars.brand, cars.color); + * + * // Select all unique brands from the 'cars' table + * await db.selectDistinct({ brand: cars.brand }) + * .from(cars) + * .orderBy(cars.brand); + * ``` + */ + selectDistinct(): SingleStoreSelectBuilder; + selectDistinct( + fields: TSelection, + ): SingleStoreSelectBuilder; + selectDistinct(fields?: SelectedFields): SingleStoreSelectBuilder { + return new SingleStoreSelectBuilder({ + fields: fields ?? undefined, + session: this.session, + dialect: this.dialect, + distinct: true, + }); + } + + /** + * Creates an update query. + * + * Calling this method without `.where()` clause will update all rows in a table. The `.where()` clause specifies which rows should be updated. + * + * Use `.set()` method to specify which values to update. + * + * See docs: {@link https://orm.drizzle.team/docs/update} + * + * @param table The table to update. + * + * @example + * + * ```ts + * // Update all rows in the 'cars' table + * await db.update(cars).set({ color: 'red' }); + * + * // Update rows with filters and conditions + * await db.update(cars).set({ color: 'red' }).where(eq(cars.brand, 'BMW')); + * ``` + */ + update( + table: TTable, + ): SingleStoreUpdateBuilder { + return new SingleStoreUpdateBuilder(table, this.session, this.dialect); + } + + /** + * Creates an insert query. + * + * Calling this method will create new rows in a table. Use `.values()` method to specify which values to insert. + * + * See docs: {@link https://orm.drizzle.team/docs/insert} + * + * @param table The table to insert into. + * + * @example + * + * ```ts + * // Insert one row + * await db.insert(cars).values({ brand: 'BMW' }); + * + * // Insert multiple rows + * await db.insert(cars).values([{ brand: 'BMW' }, { brand: 'Porsche' }]); + * ``` + */ + insert( + table: TTable, + ): SingleStoreInsertBuilder { + return new SingleStoreInsertBuilder(table, this.session, this.dialect); + } + + /** + * Creates a delete query. + * + * Calling this method without `.where()` clause will delete all rows in a table. The `.where()` clause specifies which rows should be deleted. + * + * See docs: {@link https://orm.drizzle.team/docs/delete} + * + * @param table The table to delete from. + * + * @example + * + * ```ts + * // Delete all rows in the 'cars' table + * await db.delete(cars); + * + * // Delete rows with filters and conditions + * await db.delete(cars).where(eq(cars.color, 'green')); + * ``` + */ + delete( + table: TTable, + ): SingleStoreDeleteBase { + return new SingleStoreDeleteBase(table, this.session, this.dialect); + } + + execute( + query: SQLWrapper, + ): Promise> { + return this.session.execute(query.getSQL()); + } + + transaction( + transaction: ( + tx: SingleStoreTransaction, + config?: SingleStoreTransactionConfig, + ) => Promise, + config?: SingleStoreTransactionConfig, + ): Promise { + return this.session.transaction(transaction, config); + } + + detach( + database: TDatabase, + ): SingleStoreDetachBase { + return new SingleStoreDetachBase(database, this.session, this.dialect); + } + + attach( + database: TDatabase, + ): SingleStoreAttachBase { + return new SingleStoreAttachBase(database, this.session, this.dialect); + } + + branch( + database: TDatabase, + branchName: string, + ): SingleStoreBranchBase { + return new SingleStoreBranchBase(database, branchName, this.session, this.dialect); + } + + createMilestone( + milestone: TMilestone, + ): SingleStoreCreateMilestoneBase { + return new SingleStoreCreateMilestoneBase(milestone, this.session, this.dialect); + } + + dropMilestone( + milestone: TMilestone, + ): SingleStoreDropMilestoneBase { + return new SingleStoreDropMilestoneBase(milestone, this.session, this.dialect); + } + + optimizeTable< + TTable extends SingleStoreTable, + TArg extends OptimizeTableArgument, + >( + table: TTable, + arg: TArg | undefined = undefined, + ): SingleStoreOptimizeTableBase { + return new SingleStoreOptimizeTableBase(table, arg, this.session, this.dialect); + } +} + +export type SingleStoreWithReplicas = Q & { $primary: Q }; + +export const withReplicas = < + HKT extends SingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TFullSchema extends Record, + TSchema extends TablesRelationalConfig, + Q extends SingleStoreDatabase< + HKT, + TPreparedQueryHKT, + TFullSchema, + TSchema extends Record ? ExtractTablesWithRelations : TSchema + >, +>( + primary: Q, + replicas: [Q, ...Q[]], + getReplica: (replicas: Q[]) => Q = () => replicas[Math.floor(Math.random() * replicas.length)]!, +): SingleStoreWithReplicas => { + const select: Q['select'] = (...args: []) => getReplica(replicas).select(...args); + const selectDistinct: Q['selectDistinct'] = (...args: []) => getReplica(replicas).selectDistinct(...args); + const $with: Q['with'] = (...args: []) => getReplica(replicas).with(...args); + + const update: Q['update'] = (...args: [any]) => primary.update(...args); + const insert: Q['insert'] = (...args: [any]) => primary.insert(...args); + const $delete: Q['delete'] = (...args: [any]) => primary.delete(...args); + const execute: Q['execute'] = (...args: [any]) => primary.execute(...args); + const transaction: Q['transaction'] = (...args: [any, any]) => primary.transaction(...args); + + return { + ...primary, + update, + insert, + delete: $delete, + execute, + transaction, + $primary: primary, + select, + selectDistinct, + with: $with, + get query() { + return getReplica(replicas).query; + }, + }; +}; diff --git a/drizzle-orm/src/singlestore-core/dialect.ts b/drizzle-orm/src/singlestore-core/dialect.ts new file mode 100644 index 000000000..dd7bdc89f --- /dev/null +++ b/drizzle-orm/src/singlestore-core/dialect.ts @@ -0,0 +1,852 @@ +import { aliasedTable, aliasedTableColumn, mapColumnsInAliasedSQLToAlias, mapColumnsInSQLToAlias } from '~/alias.ts'; +import { Column } from '~/column.ts'; +import { entityKind, is } from '~/entity.ts'; +import { DrizzleError } from '~/errors.ts'; +import { and, eq } from '~/expressions.ts'; +import type { MigrationConfig, MigrationMeta } from '~/migrator.ts'; +import { + type BuildRelationalQueryResult, + type DBQueryConfig, + getOperators, + getOrderByOperators, + Many, + normalizeRelation, + One, + type Relation, + type TableRelationalConfig, + type TablesRelationalConfig, +} from '~/relations.ts'; +import { Param, SQL, sql, View } from '~/sql/sql.ts'; +import type { Name, QueryWithTypings, SQLChunk } from '~/sql/sql.ts'; +import { Subquery } from '~/subquery.ts'; +import { getTableName, getTableUniqueName, Table } from '~/table.ts'; +import { orderSelectedFields, type UpdateSet } from '~/utils.ts'; +import { ViewBaseConfig } from '~/view-common.ts'; +import { SingleStoreColumn } from './columns/common.ts'; +import type { SingleStoreAttachConfig } from './query-builders/attach.ts'; +import type { SingleStoreBranchConfig } from './query-builders/branch.ts'; +import type { SingleStoreCreateMilestoneConfig } from './query-builders/createMilestone.ts'; +import type { SingleStoreDeleteConfig } from './query-builders/delete.ts'; +import type { SingleStoreDetachConfig } from './query-builders/detach.ts'; +import type { SingleStoreDropMilestoneConfig } from './query-builders/dropMilestone.ts'; +import type { SingleStoreInsertConfig } from './query-builders/insert.ts'; +import type { SingleStoreOptimizeTableConfig } from './query-builders/optimizeTable.ts'; +import type { + SelectedFieldsOrdered, + SingleStoreSelectConfig, + SingleStoreSelectJoinConfig, +} from './query-builders/select.types.ts'; +import type { SingleStoreUpdateConfig } from './query-builders/update.ts'; +import type { SingleStoreSession } from './session.ts'; +import { SingleStoreTable } from './table.ts'; +import { SingleStoreViewBase } from './view-base.ts'; + +export class SingleStoreDialect { + static readonly [entityKind]: string = 'SingleStoreDialect'; + + async migrate( + migrations: MigrationMeta[], + session: SingleStoreSession, + config: Omit, + ): Promise { + const migrationsTable = config.migrationsTable ?? '__drizzle_migrations'; + const migrationTableCreate = sql` + create table if not exists ${sql.identifier(migrationsTable)} ( + id serial primary key, + hash text not null, + created_at bigint + ) + `; + await session.execute(migrationTableCreate); + + const dbMigrations = await session.all<{ id: number; hash: string; created_at: string }>( + sql`select id, hash, created_at from ${sql.identifier(migrationsTable)} order by created_at desc limit 1`, + ); + + const lastDbMigration = dbMigrations[0]; + + await session.transaction(async (tx) => { + for (const migration of migrations) { + if ( + !lastDbMigration + || Number(lastDbMigration.created_at) < migration.folderMillis + ) { + for (const stmt of migration.sql) { + await tx.execute(sql.raw(stmt)); + } + await tx.execute( + sql`insert into ${ + sql.identifier(migrationsTable) + } (\`hash\`, \`created_at\`) values(${migration.hash}, ${migration.folderMillis})`, + ); + } + } + }); + } + + escapeName(name: string): string { + return `\`${name}\``; + } + + escapeParam(_num: number): string { + return `?`; + } + + escapeString(str: string): string { + return `'${str.replace(/'/g, "''")}'`; + } + + private buildWithCTE(queries: Subquery[] | undefined): SQL | undefined { + if (!queries?.length) return undefined; + + const withSqlChunks = [sql`with `]; + for (const [i, w] of queries.entries()) { + withSqlChunks.push(sql`${sql.identifier(w._.alias)} as (${w._.sql})`); + if (i < queries.length - 1) { + withSqlChunks.push(sql`, `); + } + } + withSqlChunks.push(sql` `); + return sql.join(withSqlChunks); + } + + buildDeleteQuery({ table, where, returning, withList }: SingleStoreDeleteConfig): SQL { + const withSql = this.buildWithCTE(withList); + + const returningSql = returning + ? sql` returning ${this.buildSelection(returning, { isSingleTable: true })}` + : undefined; + + const whereSql = where ? sql` where ${where}` : undefined; + + return sql`${withSql}delete from ${table}${whereSql}${returningSql}`; + } + + buildDetachQuery({ database, milestone, workspace }: SingleStoreDetachConfig): SQL { + const milestoneSql = milestone ? sql` at milestone ${milestone}` : undefined; + + const workspaceSql = workspace ? sql` from workspace ${workspace}` : undefined; + + return sql`detach database ${database}${milestoneSql}${workspaceSql}`; + } + + buildAttachQuery( + { database, milestone, time, databaseAlias, readOnly, ...rest }: SingleStoreAttachConfig | SingleStoreBranchConfig, + ): SQL { + const asSql = databaseAlias ? sql` as ${sql.identifier(databaseAlias)}` : undefined; + const milestoneSql = milestone ? sql` at milestone ${milestone}` : undefined; + const timeSql = time ? sql` at time ${time}` : undefined; + const readOnlySql = readOnly ? sql` read only` : undefined; + const fromWorkspaceGroupSql = 'fromWorkspaceGroup' in rest + ? sql` from workspace group ${rest.fromWorkspaceGroup}` + : undefined; + + return sql`attach database ${ + sql.raw(database) + }${fromWorkspaceGroupSql}${readOnlySql}${asSql}${milestoneSql}${timeSql}`; + } + + buildCreateMilestoneQuery({ database, milestone }: SingleStoreCreateMilestoneConfig): SQL { + const forSql = database ? sql` for ${sql.identifier(database)}` : undefined; + + return sql`create milestone ${milestone}${forSql}`; + } + + buildDropMilestoneQuery({ database, milestone }: SingleStoreDropMilestoneConfig): SQL { + const forSql = database ? sql` for ${sql.identifier(database)}` : undefined; + + return sql`drop milestone ${milestone}${forSql}`; + } + + buildOptimizeTable({ table, arg, selection }: SingleStoreOptimizeTableConfig): SQL { + const argSql = arg ? sql` ${sql.raw(arg)}` : undefined; + + let warmBlobCacheForColumnSql = undefined; + if (selection) { + const selectionField = selection.length > 0 + ? selection.map((column) => { + return { path: [], field: column }; + }) + : [{ path: [], field: sql.raw('*') }]; + warmBlobCacheForColumnSql = sql` warm blob cache for column ${ + this.buildSelection(selectionField, { isSingleTable: true }) + }`; + } + + return sql`optimize table ${table}${argSql}${warmBlobCacheForColumnSql}`; + } + + buildUpdateSet(table: SingleStoreTable, set: UpdateSet): SQL { + const tableColumns = table[Table.Symbol.Columns]; + + const columnNames = Object.keys(tableColumns).filter((colName) => + set[colName] !== undefined || tableColumns[colName]?.onUpdateFn !== undefined + ); + + const setSize = columnNames.length; + return sql.join(columnNames.flatMap((colName, i) => { + const col = tableColumns[colName]!; + + const value = set[colName] ?? sql.param(col.onUpdateFn!(), col); + const res = sql`${sql.identifier(col.name)} = ${value}`; + + if (i < setSize - 1) { + return [res, sql.raw(', ')]; + } + return [res]; + })); + } + + buildUpdateQuery({ table, set, where, returning, withList }: SingleStoreUpdateConfig): SQL { + const withSql = this.buildWithCTE(withList); + + const setSql = this.buildUpdateSet(table, set); + + const returningSql = returning + ? sql` returning ${this.buildSelection(returning, { isSingleTable: true })}` + : undefined; + + const whereSql = where ? sql` where ${where}` : undefined; + + return sql`${withSql}update ${table} set ${setSql}${whereSql}${returningSql}`; + } + + /** + * Builds selection SQL with provided fields/expressions + * + * Examples: + * + * `select from` + * + * `insert ... returning ` + * + * If `isSingleTable` is true, then columns won't be prefixed with table name + */ + private buildSelection( + fields: SelectedFieldsOrdered, + { isSingleTable = false }: { isSingleTable?: boolean } = {}, + ): SQL { + const columnsLen = fields.length; + + const chunks = fields + .flatMap(({ field }, i) => { + const chunk: SQLChunk[] = []; + + if (is(field, SQL.Aliased) && field.isSelectionField) { + chunk.push(sql.identifier(field.fieldAlias)); + } else if (is(field, SQL.Aliased) || is(field, SQL)) { + const query = is(field, SQL.Aliased) ? field.sql : field; + + if (isSingleTable) { + chunk.push( + new SQL( + query.queryChunks.map((c) => { + if (is(c, SingleStoreColumn)) { + return sql.identifier(c.name); + } + return c; + }), + ), + ); + } else { + chunk.push(query); + } + + if (is(field, SQL.Aliased)) { + chunk.push(sql` as ${sql.identifier(field.fieldAlias)}`); + } + } else if (is(field, Column)) { + if (isSingleTable) { + chunk.push(sql.identifier(field.name)); + } else { + chunk.push(field); + } + } + + if (i < columnsLen - 1) { + chunk.push(sql`, `); + } + + return chunk; + }); + + return sql.join(chunks); + } + + buildSelectQuery( + { + withList, + fields, + fieldsFlat, + where, + having, + table, + joins, + orderBy, + groupBy, + limit, + offset, + lockingClause, + distinct, + setOperators, + }: SingleStoreSelectConfig, + ): SQL { + const fieldsList = fieldsFlat ?? orderSelectedFields(fields); + for (const f of fieldsList) { + if ( + is(f.field, Column) + && getTableName(f.field.table) + !== (is(table, Subquery) + ? table._.alias + : is(table, SingleStoreViewBase) + ? table[ViewBaseConfig].name + : is(table, SQL) + ? undefined + : getTableName(table)) + && !((table) => + joins?.some(({ alias }) => + alias === (table[Table.Symbol.IsAlias] ? getTableName(table) : table[Table.Symbol.BaseName]) + ))(f.field.table) + ) { + const tableName = getTableName(f.field.table); + throw new Error( + `Your "${ + f.path.join('->') + }" field references a column "${tableName}"."${f.field.name}", but the table "${tableName}" is not part of the query! Did you forget to join it?`, + ); + } + } + + const isSingleTable = !joins || joins.length === 0; + + const withSql = this.buildWithCTE(withList); + + const distinctSql = distinct ? sql` distinct` : undefined; + + const selection = this.buildSelection(fieldsList, { isSingleTable }); + + const tableSql = (() => { + if (is(table, Table) && table[Table.Symbol.OriginalName] !== table[Table.Symbol.Name]) { + return sql`${sql.identifier(table[Table.Symbol.OriginalName])} ${sql.identifier(table[Table.Symbol.Name])}`; + } + + return table; + })(); + + const joinsArray: SQL[] = []; + + if (joins) { + for (const [index, joinMeta] of joins.entries()) { + if (index === 0) { + joinsArray.push(sql` `); + } + const table = joinMeta.table; + const lateralSql = joinMeta.lateral ? sql` lateral` : undefined; + + if (is(table, SingleStoreTable)) { + const tableName = table[SingleStoreTable.Symbol.Name]; + const tableSchema = table[SingleStoreTable.Symbol.Schema]; + const origTableName = table[SingleStoreTable.Symbol.OriginalName]; + const alias = tableName === origTableName ? undefined : joinMeta.alias; + joinsArray.push( + sql`${sql.raw(joinMeta.joinType)} join${lateralSql} ${ + tableSchema ? sql`${sql.identifier(tableSchema)}.` : undefined + }${sql.identifier(origTableName)}${alias && sql` ${sql.identifier(alias)}`} on ${joinMeta.on}`, + ); + } else if (is(table, View)) { + const viewName = table[ViewBaseConfig].name; + const viewSchema = table[ViewBaseConfig].schema; + const origViewName = table[ViewBaseConfig].originalName; + const alias = viewName === origViewName ? undefined : joinMeta.alias; + joinsArray.push( + sql`${sql.raw(joinMeta.joinType)} join${lateralSql} ${ + viewSchema ? sql`${sql.identifier(viewSchema)}.` : undefined + }${sql.identifier(origViewName)}${alias && sql` ${sql.identifier(alias)}`} on ${joinMeta.on}`, + ); + } else { + joinsArray.push( + sql`${sql.raw(joinMeta.joinType)} join${lateralSql} ${table} on ${joinMeta.on}`, + ); + } + if (index < joins.length - 1) { + joinsArray.push(sql` `); + } + } + } + + const joinsSql = sql.join(joinsArray); + + const whereSql = where ? sql` where ${where}` : undefined; + + const havingSql = having ? sql` having ${having}` : undefined; + + let orderBySql; + if (orderBy && orderBy.length > 0) { + orderBySql = sql` order by ${sql.join(orderBy, sql`, `)}`; + } + + let groupBySql; + if (groupBy && groupBy.length > 0) { + groupBySql = sql` group by ${sql.join(groupBy, sql`, `)}`; + } + + const limitSql = typeof limit === 'object' || (typeof limit === 'number' && limit >= 0) + ? sql` limit ${limit}` + : undefined; + + const offsetSql = offset ? sql` offset ${offset}` : undefined; + + let lockingClausesSql; + if (lockingClause) { + const { config, strength } = lockingClause; + lockingClausesSql = sql` for ${sql.raw(strength)}`; + if (config.noWait) { + lockingClausesSql.append(sql` no wait`); + } else if (config.skipLocked) { + lockingClausesSql.append(sql` skip locked`); + } + } + + const finalQuery = + sql`${withSql}select${distinctSql} ${selection} from ${tableSql}${joinsSql}${whereSql}${groupBySql}${havingSql}${orderBySql}${limitSql}${offsetSql}${lockingClausesSql}`; + + if (setOperators.length > 0) { + return this.buildSetOperations(finalQuery, setOperators); + } + + return finalQuery; + } + + buildSetOperations(leftSelect: SQL, setOperators: SingleStoreSelectConfig['setOperators']): SQL { + const [setOperator, ...rest] = setOperators; + + if (!setOperator) { + throw new Error('Cannot pass undefined values to any set operator'); + } + + if (rest.length === 0) { + return this.buildSetOperationQuery({ leftSelect, setOperator }); + } + + // Some recursive magic here + return this.buildSetOperations( + this.buildSetOperationQuery({ leftSelect, setOperator }), + rest, + ); + } + + buildSetOperationQuery({ + leftSelect, + setOperator: { type, isAll, rightSelect, limit, orderBy, offset }, + }: { leftSelect: SQL; setOperator: SingleStoreSelectConfig['setOperators'][number] }): SQL { + const leftChunk = sql`(${leftSelect.getSQL()}) `; + const rightChunk = sql`(${rightSelect.getSQL()})`; + + let orderBySql; + if (orderBy && orderBy.length > 0) { + const orderByValues: (SQL | Name)[] = []; + + // The next bit is necessary because the sql operator replaces ${table.column} with `table`.`column` + // which is invalid SingleStore syntax, Table from one of the SELECTs cannot be used in global ORDER clause + for (const orderByUnit of orderBy) { + if (is(orderByUnit, SingleStoreColumn)) { + orderByValues.push(sql.identifier(orderByUnit.name)); + } else if (is(orderByUnit, SQL)) { + for (let i = 0; i < orderByUnit.queryChunks.length; i++) { + const chunk = orderByUnit.queryChunks[i]; + + if (is(chunk, SingleStoreColumn)) { + orderByUnit.queryChunks[i] = sql.identifier(chunk.name); + } + } + + orderByValues.push(sql`${orderByUnit}`); + } else { + orderByValues.push(sql`${orderByUnit}`); + } + } + + orderBySql = sql` order by ${sql.join(orderByValues, sql`, `)} `; + } + + const limitSql = typeof limit === 'object' || (typeof limit === 'number' && limit >= 0) + ? sql` limit ${limit}` + : undefined; + + const operatorChunk = sql.raw(`${type} ${isAll ? 'all ' : ''}`); + + const offsetSql = offset ? sql` offset ${offset}` : undefined; + + return sql`${leftChunk}${operatorChunk}${rightChunk}${orderBySql}${limitSql}${offsetSql}`; + } + + buildInsertQuery( + { table, values, ignore, onConflict }: SingleStoreInsertConfig, + ): { sql: SQL; generatedIds: Record[] } { + // const isSingleValue = values.length === 1; + const valuesSqlList: ((SQLChunk | SQL)[] | SQL)[] = []; + const columns: Record = table[Table.Symbol.Columns]; + const colEntries: [string, SingleStoreColumn][] = Object.entries(columns).filter(([_, col]) => + !col.shouldDisableInsert() + ); + + const insertOrder = colEntries.map(([, column]) => sql.identifier(column.name)); + const generatedIdsResponse: Record[] = []; + + for (const [valueIndex, value] of values.entries()) { + const generatedIds: Record = {}; + + const valueList: (SQLChunk | SQL)[] = []; + for (const [fieldName, col] of colEntries) { + const colValue = value[fieldName]; + if (colValue === undefined || (is(colValue, Param) && colValue.value === undefined)) { + // eslint-disable-next-line unicorn/no-negated-condition + if (col.defaultFn !== undefined) { + const defaultFnResult = col.defaultFn(); + generatedIds[fieldName] = defaultFnResult; + const defaultValue = is(defaultFnResult, SQL) ? defaultFnResult : sql.param(defaultFnResult, col); + valueList.push(defaultValue); + // eslint-disable-next-line unicorn/no-negated-condition + } else if (!col.default && col.onUpdateFn !== undefined) { + const onUpdateFnResult = col.onUpdateFn(); + const newValue = is(onUpdateFnResult, SQL) ? onUpdateFnResult : sql.param(onUpdateFnResult, col); + valueList.push(newValue); + } else { + valueList.push(sql`default`); + } + } else { + if (col.defaultFn && is(colValue, Param)) { + generatedIds[fieldName] = colValue.value; + } + valueList.push(colValue); + } + } + + generatedIdsResponse.push(generatedIds); + valuesSqlList.push(valueList); + if (valueIndex < values.length - 1) { + valuesSqlList.push(sql`, `); + } + } + + const valuesSql = sql.join(valuesSqlList); + + const ignoreSql = ignore ? sql` ignore` : undefined; + + const onConflictSql = onConflict ? sql` on duplicate key ${onConflict}` : undefined; + + return { + sql: sql`insert${ignoreSql} into ${table} ${insertOrder} values ${valuesSql}${onConflictSql}`, + generatedIds: generatedIdsResponse, + }; + } + + sqlToQuery(sql: SQL, invokeSource?: 'indexes' | undefined): QueryWithTypings { + return sql.toQuery({ + escapeName: this.escapeName, + escapeParam: this.escapeParam, + escapeString: this.escapeString, + invokeSource, + }); + } + + buildRelationalQuery({ + fullSchema, + schema, + tableNamesMap, + table, + tableConfig, + queryConfig: config, + tableAlias, + nestedQueryRelation, + joinOn, + }: { + fullSchema: Record; + schema: TablesRelationalConfig; + tableNamesMap: Record; + table: SingleStoreTable; + tableConfig: TableRelationalConfig; + queryConfig: true | DBQueryConfig<'many', true>; + tableAlias: string; + nestedQueryRelation?: Relation; + joinOn?: SQL; + }): BuildRelationalQueryResult { + let selection: BuildRelationalQueryResult['selection'] = []; + let limit, offset, orderBy: SingleStoreSelectConfig['orderBy'], where; + const joins: SingleStoreSelectJoinConfig[] = []; + + if (config === true) { + const selectionEntries = Object.entries(tableConfig.columns); + selection = selectionEntries.map(( + [key, value], + ) => ({ + dbKey: value.name, + tsKey: key, + field: aliasedTableColumn(value as SingleStoreColumn, tableAlias), + relationTableTsKey: undefined, + isJson: false, + selection: [], + })); + } else { + const aliasedColumns = Object.fromEntries( + Object.entries(tableConfig.columns).map(([key, value]) => [key, aliasedTableColumn(value, tableAlias)]), + ); + + if (config.where) { + const whereSql = typeof config.where === 'function' + ? config.where(aliasedColumns, getOperators()) + : config.where; + where = whereSql && mapColumnsInSQLToAlias(whereSql, tableAlias); + } + + const fieldsSelection: { tsKey: string; value: SingleStoreColumn | SQL.Aliased }[] = []; + let selectedColumns: string[] = []; + + // Figure out which columns to select + if (config.columns) { + let isIncludeMode = false; + + for (const [field, value] of Object.entries(config.columns)) { + if (value === undefined) { + continue; + } + + if (field in tableConfig.columns) { + if (!isIncludeMode && value === true) { + isIncludeMode = true; + } + selectedColumns.push(field); + } + } + + if (selectedColumns.length > 0) { + selectedColumns = isIncludeMode + ? selectedColumns.filter((c) => config.columns?.[c] === true) + : Object.keys(tableConfig.columns).filter((key) => !selectedColumns.includes(key)); + } + } else { + // Select all columns if selection is not specified + selectedColumns = Object.keys(tableConfig.columns); + } + + for (const field of selectedColumns) { + const column = tableConfig.columns[field]! as SingleStoreColumn; + fieldsSelection.push({ tsKey: field, value: column }); + } + + let selectedRelations: { + tsKey: string; + queryConfig: true | DBQueryConfig<'many', false>; + relation: Relation; + }[] = []; + + // Figure out which relations to select + if (config.with) { + selectedRelations = Object.entries(config.with) + .filter((entry): entry is [typeof entry[0], NonNullable] => !!entry[1]) + .map(([tsKey, queryConfig]) => ({ tsKey, queryConfig, relation: tableConfig.relations[tsKey]! })); + } + + let extras; + + // Figure out which extras to select + if (config.extras) { + extras = typeof config.extras === 'function' + ? config.extras(aliasedColumns, { sql }) + : config.extras; + for (const [tsKey, value] of Object.entries(extras)) { + fieldsSelection.push({ + tsKey, + value: mapColumnsInAliasedSQLToAlias(value, tableAlias), + }); + } + } + + // Transform `fieldsSelection` into `selection` + // `fieldsSelection` shouldn't be used after this point + for (const { tsKey, value } of fieldsSelection) { + selection.push({ + dbKey: is(value, SQL.Aliased) ? value.fieldAlias : tableConfig.columns[tsKey]!.name, + tsKey, + field: is(value, Column) ? aliasedTableColumn(value, tableAlias) : value, + relationTableTsKey: undefined, + isJson: false, + selection: [], + }); + } + + let orderByOrig = typeof config.orderBy === 'function' + ? config.orderBy(aliasedColumns, getOrderByOperators()) + : config.orderBy ?? []; + if (!Array.isArray(orderByOrig)) { + orderByOrig = [orderByOrig]; + } + orderBy = orderByOrig.map((orderByValue) => { + if (is(orderByValue, Column)) { + return aliasedTableColumn(orderByValue, tableAlias) as SingleStoreColumn; + } + return mapColumnsInSQLToAlias(orderByValue, tableAlias); + }); + + limit = config.limit; + offset = config.offset; + + // Process all relations + for ( + const { + tsKey: selectedRelationTsKey, + queryConfig: selectedRelationConfigValue, + relation, + } of selectedRelations + ) { + const normalizedRelation = normalizeRelation(schema, tableNamesMap, relation); + const relationTableName = getTableUniqueName(relation.referencedTable); + const relationTableTsName = tableNamesMap[relationTableName]!; + const relationTableAlias = `${tableAlias}_${selectedRelationTsKey}`; + const joinOn = and( + ...normalizedRelation.fields.map((field, i) => + eq( + aliasedTableColumn(normalizedRelation.references[i]!, relationTableAlias), + aliasedTableColumn(field, tableAlias), + ) + ), + ); + const builtRelation = this.buildRelationalQuery({ + fullSchema, + schema, + tableNamesMap, + table: fullSchema[relationTableTsName] as SingleStoreTable, + tableConfig: schema[relationTableTsName]!, + queryConfig: is(relation, One) + ? (selectedRelationConfigValue === true + ? { limit: 1 } + : { ...selectedRelationConfigValue, limit: 1 }) + : selectedRelationConfigValue, + tableAlias: relationTableAlias, + joinOn, + nestedQueryRelation: relation, + }); + const field = sql`coalesce(${sql.identifier(relationTableAlias)}.${sql.identifier('data')}, "[]")`.as( + selectedRelationTsKey, + ); + joins.push({ + on: sql`true`, + table: new Subquery(builtRelation.sql as SQL, {}, relationTableAlias), + alias: relationTableAlias, + joinType: 'left', + lateral: true, + }); + selection.push({ + dbKey: selectedRelationTsKey, + tsKey: selectedRelationTsKey, + field, + relationTableTsKey: relationTableTsName, + isJson: true, + selection: builtRelation.selection, + }); + } + } + + if (selection.length === 0) { + throw new DrizzleError({ message: `No fields selected for table "${tableConfig.tsName}" ("${tableAlias}")` }); + } + + let result; + + where = and(joinOn, where); + + if (nestedQueryRelation) { + let field = sql`JSON_BUILD_OBJECT(${ + sql.join( + selection.map(({ field, tsKey, isJson }, index) => + isJson + ? sql`${index}, ${sql.identifier(`${tableAlias}_${tsKey}`)}.${sql.identifier('data')}` + : is(field, SQL.Aliased) + ? sql`${index}, ${field.sql}` + : sql`${index}, ${field}` + ), + sql`, `, + ) + })`; + if (is(nestedQueryRelation, Many)) { + field = sql`json_agg(${field})`; + } + const nestedSelection = [{ + dbKey: 'data', + tsKey: 'data', + field: field.as('data'), + isJson: true, + relationTableTsKey: tableConfig.tsName, + selection, + }]; + + const needsSubquery = limit !== undefined || offset !== undefined || (orderBy?.length ?? 0) > 0; + + if (needsSubquery) { + result = this.buildSelectQuery({ + table: aliasedTable(table, tableAlias), + fields: {}, + fieldsFlat: [ + { + path: [], + field: sql.raw('*'), + }, + ...(((orderBy?.length ?? 0) > 0) + ? [{ + path: [], + field: sql`row_number() over (order by ${sql.join(orderBy!, sql`, `)})`, + }] + : []), + ], + where, + limit, + offset, + setOperators: [], + }); + + where = undefined; + limit = undefined; + offset = undefined; + orderBy = undefined; + } else { + result = aliasedTable(table, tableAlias); + } + + result = this.buildSelectQuery({ + table: is(result, SingleStoreTable) ? result : new Subquery(result, {}, tableAlias), + fields: {}, + fieldsFlat: nestedSelection.map(({ field }) => ({ + path: [], + field: is(field, Column) ? aliasedTableColumn(field, tableAlias) : field, + })), + joins, + where, + limit, + offset, + orderBy, + setOperators: [], + }); + } else { + result = this.buildSelectQuery({ + table: aliasedTable(table, tableAlias), + fields: {}, + fieldsFlat: selection.map(({ field }) => ({ + path: [], + field: is(field, Column) ? aliasedTableColumn(field, tableAlias) : field, + })), + joins, + where, + limit, + offset, + orderBy, + setOperators: [], + }); + } + + return { + tableTsKey: tableConfig.tsName, + sql: result, + selection, + }; + } +} diff --git a/drizzle-orm/src/singlestore-core/expressions.ts b/drizzle-orm/src/singlestore-core/expressions.ts new file mode 100644 index 000000000..6d4284d18 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/expressions.ts @@ -0,0 +1,25 @@ +import { bindIfParam } from '~/expressions.ts'; +import type { Placeholder, SQL, SQLChunk, SQLWrapper } from '~/sql/sql.ts'; +import { sql } from '~/sql/sql.ts'; +import type { SingleStoreColumn } from './columns/index.ts'; + +export * from '~/expressions.ts'; + +export function concat(column: SingleStoreColumn | SQL.Aliased, value: string | Placeholder | SQLWrapper): SQL { + return sql`${column} || ${bindIfParam(value, column)}`; +} + +export function substring( + column: SingleStoreColumn | SQL.Aliased, + { from, for: _for }: { from?: number | Placeholder | SQLWrapper; for?: number | Placeholder | SQLWrapper }, +): SQL { + const chunks: SQLChunk[] = [sql`substring(`, column]; + if (from !== undefined) { + chunks.push(sql` from `, bindIfParam(from, column)); + } + if (_for !== undefined) { + chunks.push(sql` for `, bindIfParam(_for, column)); + } + chunks.push(sql`)`); + return sql.join(chunks); +} diff --git a/drizzle-orm/src/singlestore-core/index.ts b/drizzle-orm/src/singlestore-core/index.ts new file mode 100644 index 000000000..4da014404 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/index.ts @@ -0,0 +1,16 @@ +export * from './alias.ts'; +export * from './columns/index.ts'; +export * from './db.ts'; +export * from './dialect.ts'; +export * from './indexes.ts'; +export * from './primary-keys.ts'; +export * from './query-builders/index.ts'; +export * from './schema.ts'; +export * from './session.ts'; +export * from './sql/index.ts'; +export * from './subquery.ts'; +export * from './table.ts'; +export * from './unique-constraint.ts'; +export * from './utils.ts'; +export * from './view-common.ts'; +export * from './view.ts'; diff --git a/drizzle-orm/src/singlestore-core/indexes.ts b/drizzle-orm/src/singlestore-core/indexes.ts new file mode 100644 index 000000000..172f524f5 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/indexes.ts @@ -0,0 +1,191 @@ +import { entityKind } from '~/entity.ts'; +import type { SQL } from '~/sql/sql.ts'; +import type { AnySingleStoreColumn, SingleStoreColumn } from './columns/index.ts'; +import type { SingleStoreTable } from './table.ts'; + +interface IndexConfig { + name: string; + + columns: IndexColumn[]; + + /** + * If true, the index will be created as `create unique index` instead of `create index`. + */ + unique?: boolean; + + /** + * If set, the index will be created as `create index ... using { 'btree' | 'hash' }`. + */ + using?: 'btree' | 'hash'; + + /** + * If set, the index will be created as `create index ... algorythm { 'default' | 'inplace' | 'copy' }`. + */ + algorythm?: 'default' | 'inplace' | 'copy'; + + /** + * If set, adds locks to the index creation. + */ + lock?: 'default' | 'none' | 'shared' | 'exclusive'; +} + +export type IndexColumn = SingleStoreColumn | SQL; + +export class IndexBuilderOn { + static readonly [entityKind]: string = 'SingleStoreIndexBuilderOn'; + + constructor(private name: string, private unique: boolean) {} + + on(...columns: [IndexColumn, ...IndexColumn[]]): IndexBuilder { + return new IndexBuilder(this.name, columns, this.unique); + } +} + +export interface AnyIndexBuilder { + build(table: SingleStoreTable): Index; +} + +export interface AnyFullTextIndexBuilder { + build(table: SingleStoreTable): FullTextIndex; +} + +// eslint-disable-next-line @typescript-eslint/no-empty-interface +export interface IndexBuilder extends AnyIndexBuilder {} + +export class IndexBuilder implements AnyIndexBuilder { + static readonly [entityKind]: string = 'SingleStoreIndexBuilder'; + + /** @internal */ + config: IndexConfig; + + constructor(name: string, columns: IndexColumn[], unique: boolean) { + this.config = { + name, + columns, + unique, + }; + } + + using(using: IndexConfig['using']): this { + this.config.using = using; + return this; + } + + algorythm(algorythm: IndexConfig['algorythm']): this { + this.config.algorythm = algorythm; + return this; + } + + lock(lock: IndexConfig['lock']): this { + this.config.lock = lock; + return this; + } + + /** @internal */ + build(table: SingleStoreTable): Index { + return new Index(this.config, table); + } +} + +export class Index { + static readonly [entityKind]: string = 'SingleStoreIndex'; + + readonly config: IndexConfig & { table: SingleStoreTable }; + + constructor(config: IndexConfig, table: SingleStoreTable) { + this.config = { ...config, table }; + } +} + +export type GetColumnsTableName = TColumns extends + AnySingleStoreColumn<{ tableName: infer TTableName extends string }> | AnySingleStoreColumn< + { tableName: infer TTableName extends string } + >[] ? TTableName + : never; + +export function index(name: string): IndexBuilderOn { + return new IndexBuilderOn(name, false); +} + +export function uniqueIndex(name: string): IndexBuilderOn { + return new IndexBuilderOn(name, true); +} + +interface FullTextIndexConfig { + version?: number; +} + +interface FullTextIndexFullConfig extends FullTextIndexConfig { + columns: IndexColumn[]; + + name: string; +} + +export class FullTextIndexBuilderOn { + static readonly [entityKind]: string = 'SingleStoreFullTextIndexBuilderOn'; + + constructor(private name: string, private config: FullTextIndexConfig) {} + + on(...columns: [IndexColumn, ...IndexColumn[]]): FullTextIndexBuilder { + return new FullTextIndexBuilder({ + name: this.name, + columns: columns, + ...this.config, + }); + } +} + +export interface FullTextIndexBuilder extends AnyFullTextIndexBuilder {} + +export class FullTextIndexBuilder implements AnyFullTextIndexBuilder { + static readonly [entityKind]: string = 'SingleStoreFullTextIndexBuilder'; + + /** @internal */ + config: FullTextIndexFullConfig; + + constructor(config: FullTextIndexFullConfig) { + this.config = config; + } + + /** @internal */ + build(table: SingleStoreTable): FullTextIndex { + return new FullTextIndex(this.config, table); + } +} + +export class FullTextIndex { + static readonly [entityKind]: string = 'SingleStoreFullTextIndex'; + + readonly config: FullTextIndexConfig & { table: SingleStoreTable }; + + constructor(config: FullTextIndexConfig, table: SingleStoreTable) { + this.config = { ...config, table }; + } +} + +export function fulltext(name: string, config: FullTextIndexConfig): FullTextIndexBuilderOn { + return new FullTextIndexBuilderOn(name, config); +} + +export type SortKeyColumn = SingleStoreColumn | SQL; + +export class SortKeyBuilder { + static readonly [entityKind]: string = 'SingleStoreSortKeyBuilder'; + + constructor(private columns: SortKeyColumn[]) {} + + /** @internal */ + build(table: SingleStoreTable): SortKey { + return new SortKey(this.columns, table); + } +} + +export class SortKey { + static readonly [entityKind]: string = 'SingleStoreSortKey'; + + constructor(public columns: SortKeyColumn[], public table: SingleStoreTable) {} +} + +export function sortKey(...columns: SortKeyColumn[]): SortKeyBuilder { + return new SortKeyBuilder(columns); +} diff --git a/drizzle-orm/src/singlestore-core/primary-keys.ts b/drizzle-orm/src/singlestore-core/primary-keys.ts new file mode 100644 index 000000000..47dc0a19c --- /dev/null +++ b/drizzle-orm/src/singlestore-core/primary-keys.ts @@ -0,0 +1,63 @@ +import { entityKind } from '~/entity.ts'; +import type { AnySingleStoreColumn, SingleStoreColumn } from './columns/index.ts'; +import { SingleStoreTable } from './table.ts'; + +export function primaryKey< + TTableName extends string, + TColumn extends AnySingleStoreColumn<{ tableName: TTableName }>, + TColumns extends AnySingleStoreColumn<{ tableName: TTableName }>[], +>(config: { name?: string; columns: [TColumn, ...TColumns] }): PrimaryKeyBuilder; +/** + * @deprecated: Please use primaryKey({ columns: [] }) instead of this function + * @param columns + */ +export function primaryKey< + TTableName extends string, + TColumns extends AnySingleStoreColumn<{ tableName: TTableName }>[], +>(...columns: TColumns): PrimaryKeyBuilder; +export function primaryKey(...config: any) { + if (config[0].columns) { + return new PrimaryKeyBuilder(config[0].columns, config[0].name); + } + return new PrimaryKeyBuilder(config); +} + +export class PrimaryKeyBuilder { + static readonly [entityKind]: string = 'SingleStorePrimaryKeyBuilder'; + + /** @internal */ + columns: SingleStoreColumn[]; + + /** @internal */ + name?: string; + + constructor( + columns: SingleStoreColumn[], + name?: string, + ) { + this.columns = columns; + this.name = name; + } + + /** @internal */ + build(table: SingleStoreTable): PrimaryKey { + return new PrimaryKey(table, this.columns, this.name); + } +} + +export class PrimaryKey { + static readonly [entityKind]: string = 'SingleStorePrimaryKey'; + + readonly columns: SingleStoreColumn[]; + readonly name?: string; + + constructor(readonly table: SingleStoreTable, columns: SingleStoreColumn[], name?: string) { + this.columns = columns; + this.name = name; + } + + getName(): string { + return this.name + ?? `${this.table[SingleStoreTable.Symbol.Name]}_${this.columns.map((column) => column.name).join('_')}_pk`; + } +} diff --git a/drizzle-orm/src/singlestore-core/query-builders/attach.ts b/drizzle-orm/src/singlestore-core/query-builders/attach.ts new file mode 100644 index 000000000..0b5bbac1a --- /dev/null +++ b/drizzle-orm/src/singlestore-core/query-builders/attach.ts @@ -0,0 +1,198 @@ +import { entityKind } from '~/entity.ts'; +import { DrizzleError } from '~/errors.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import type { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; +import type { + AnySingleStoreQueryResultHKT, + PreparedQueryHKTBase, + PreparedQueryKind, + SingleStorePreparedQueryConfig, + SingleStoreQueryResultHKT, + SingleStoreQueryResultKind, + SingleStoreSession, +} from '~/singlestore-core/session.ts'; +import type { Query, SQL, SQLWrapper } from '~/sql/sql.ts'; + +export type SingleStoreAttachWithout< + T extends AnySingleStoreAttachBase, + TDynamic extends boolean, + K extends keyof T & string, +> = TDynamic extends true ? T + : Omit< + SingleStoreAttachBase< + T['_']['database'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'], + TDynamic, + T['_']['excludedMethods'] | K + >, + T['_']['excludedMethods'] | K + >; + +export type SingleStoreAttach< + TDatabase extends string = string, + TQueryResult extends SingleStoreQueryResultHKT = AnySingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, +> = SingleStoreAttachBase; + +export interface SingleStoreAttachConfig { + milestone?: string | undefined; + time?: Date | undefined; + database: string; + databaseAlias?: string | undefined; + readOnly?: boolean | undefined; +} + +export type SingleStoreAttachPrepare = PreparedQueryKind< + T['_']['preparedQueryHKT'], + SingleStorePreparedQueryConfig & { + execute: SingleStoreQueryResultKind; + iterator: never; + }, + true +>; + +type SingleStoreAttachDynamic = SingleStoreAttach< + T['_']['database'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'] +>; + +type AnySingleStoreAttachBase = SingleStoreAttachBase; + +export interface SingleStoreAttachBase< + TDatabase extends string, + TQueryResult extends SingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, +> extends QueryPromise> { + readonly _: { + readonly database: TDatabase; + readonly queryResult: TQueryResult; + readonly preparedQueryHKT: TPreparedQueryHKT; + readonly dynamic: TDynamic; + readonly excludedMethods: TExcludedMethods; + }; +} + +export class SingleStoreAttachBase< + TDatabase extends string, + TQueryResult extends SingleStoreQueryResultHKT, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TPreparedQueryHKT extends PreparedQueryHKTBase, + TDynamic extends boolean = false, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TExcludedMethods extends string = never, +> extends QueryPromise> implements SQLWrapper { + static readonly [entityKind]: string = 'SingleStoreAttach'; + + private config: SingleStoreAttachConfig; + + constructor( + private database: TDatabase, + private session: SingleStoreSession, + private dialect: SingleStoreDialect, + ) { + super(); + this.config = { database }; + } + + as(dabataseAlias: string): SingleStoreAttachWithout { + if (this.config.readOnly) { + throw new DrizzleError({ message: 'Cannot set both databaseAlias and readOnly' }); + } + this.config.databaseAlias = dabataseAlias; + return this as any; + } + + /** + * Adds a `where` clause to the query. + * + * Calling this method will delete only those rows that fulfill a specified condition. + * + * See docs: {@link https://orm.drizzle.team/docs/delete} + * + * @param where the `where` clause. + * + * @example + * You can use conditional operators and `sql function` to filter the rows to be deleted. + * + * ```ts + * // Attach all cars with green color + * db.delete(cars).where(eq(cars.color, 'green')); + * // or + * db.delete(cars).where(sql`${cars.color} = 'green'`) + * ``` + * + * You can logically combine conditional operators with `and()` and `or()` operators: + * + * ```ts + * // Attach all BMW cars with a green color + * db.delete(cars).where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); + * + * // Attach all cars with the green or blue color + * db.delete(cars).where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); + * ``` + */ + // TODO(singlestore): docs + atMilestone(milestone: string): SingleStoreAttachWithout { + if (this.config.time) { + throw new DrizzleError({ message: 'Cannot set both time and milestone' }); + } + this.config.milestone = milestone; + return this as any; + } + + // TODO(singlestore): docs + atTime(time: Date): SingleStoreAttachWithout { + if (this.config.milestone) { + throw new DrizzleError({ message: 'Cannot set both time and milestone' }); + } + this.config.time = time; + return this as any; + } + + // TODO(singlestore): docs + readOnly(): SingleStoreAttachWithout { + if (this.config.databaseAlias) { + throw new DrizzleError({ message: 'Cannot set both databaseAlias and readOnly' }); + } + this.config.readOnly = true; + return this as any; + } + + /** @internal */ + getSQL(): SQL { + return this.dialect.buildAttachQuery(this.config); + } + + toSQL(): Query { + const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); + return rest; + } + + prepare(): SingleStoreAttachPrepare { + return this.session.prepareQuery( + this.dialect.sqlToQuery(this.getSQL()), + undefined, + ) as SingleStoreAttachPrepare; + } + + override execute: ReturnType['execute'] = (placeholderValues) => { + return this.prepare().execute(placeholderValues); + }; + + private createIterator = (): ReturnType['iterator'] => { + const self = this; + return async function*(placeholderValues) { + yield* self.prepare().iterator(placeholderValues); + }; + }; + + iterator = this.createIterator(); + + $dynamic(): SingleStoreAttachDynamic { + return this as any; + } +} diff --git a/drizzle-orm/src/singlestore-core/query-builders/branch.ts b/drizzle-orm/src/singlestore-core/query-builders/branch.ts new file mode 100644 index 000000000..5ed8b6831 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/query-builders/branch.ts @@ -0,0 +1,186 @@ +import { entityKind } from '~/entity.ts'; +import { DrizzleError } from '~/errors.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import type { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; +import type { + AnySingleStoreQueryResultHKT, + PreparedQueryHKTBase, + PreparedQueryKind, + SingleStorePreparedQueryConfig, + SingleStoreQueryResultHKT, + SingleStoreQueryResultKind, + SingleStoreSession, +} from '~/singlestore-core/session.ts'; +import type { Query, SQL, SQLWrapper } from '~/sql/sql.ts'; +import type { SingleStoreAttachConfig } from './attach.ts'; + +export type SingleStoreBranchWithout< + T extends AnySingleStoreBranchBase, + TDynamic extends boolean, + K extends keyof T & string, +> = TDynamic extends true ? T + : Omit< + SingleStoreBranchBase< + T['_']['database'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'], + TDynamic, + T['_']['excludedMethods'] | K + >, + T['_']['excludedMethods'] | K + >; + +export type SingleStoreBranch< + TDatabase extends string = string, + TQueryResult extends SingleStoreQueryResultHKT = AnySingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, +> = SingleStoreBranchBase; + +export interface SingleStoreBranchConfig extends SingleStoreAttachConfig { + databaseAlias: string; + fromWorkspaceGroup?: string | undefined; +} + +export type SingleStoreBranchPrepare = PreparedQueryKind< + T['_']['preparedQueryHKT'], + SingleStorePreparedQueryConfig & { + execute: SingleStoreQueryResultKind; + iterator: never; + }, + true +>; + +type SingleStoreBranchDynamic = SingleStoreBranch< + T['_']['database'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'] +>; + +type AnySingleStoreBranchBase = SingleStoreBranchBase; + +export interface SingleStoreBranchBase< + TDatabase extends string, + TQueryResult extends SingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, +> extends QueryPromise> { + readonly _: { + readonly database: TDatabase; + readonly queryResult: TQueryResult; + readonly preparedQueryHKT: TPreparedQueryHKT; + readonly dynamic: TDynamic; + readonly excludedMethods: TExcludedMethods; + }; +} + +export class SingleStoreBranchBase< + TDatabase extends string, + TQueryResult extends SingleStoreQueryResultHKT, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TPreparedQueryHKT extends PreparedQueryHKTBase, + TDynamic extends boolean = false, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TExcludedMethods extends string = never, +> extends QueryPromise> implements SQLWrapper { + static readonly [entityKind]: string = 'SingleStoreBranch'; + + private config: SingleStoreBranchConfig; + + constructor( + private database: TDatabase, + private branchName: string, + private session: SingleStoreSession, + private dialect: SingleStoreDialect, + ) { + super(); + this.config = { database, databaseAlias: branchName }; + } + + /** + * Adds a `where` clause to the query. + * + * Calling this method will delete only those rows that fulfill a specified condition. + * + * See docs: {@link https://orm.drizzle.team/docs/delete} + * + * @param where the `where` clause. + * + * @example + * You can use conditional operators and `sql function` to filter the rows to be deleted. + * + * ```ts + * // Attach all cars with green color + * db.delete(cars).where(eq(cars.color, 'green')); + * // or + * db.delete(cars).where(sql`${cars.color} = 'green'`) + * ``` + * + * You can logically combine conditional operators with `and()` and `or()` operators: + * + * ```ts + * // Attach all BMW cars with a green color + * db.delete(cars).where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); + * + * // Attach all cars with the green or blue color + * db.delete(cars).where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); + * ``` + */ + // TODO(singlestore): docs + atMilestone(milestone: string): SingleStoreBranchWithout { + if (this.config.time) { + throw new DrizzleError({ message: 'Cannot set both time and milestone' }); + } + this.config.milestone = milestone; + return this as any; + } + + // TODO(singlestore): docs + atTime(time: Date): SingleStoreBranchWithout { + if (this.config.milestone) { + throw new DrizzleError({ message: 'Cannot set both time and milestone' }); + } + this.config.time = time; + return this as any; + } + + // TODO(singlestore): docs + fromWorkspaceGroup(groupID: string): SingleStoreBranchWithout { + this.config.fromWorkspaceGroup = groupID; + return this as any; + } + + /** @internal */ + getSQL(): SQL { + return this.dialect.buildAttachQuery(this.config); + } + + toSQL(): Query { + const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); + return rest; + } + + prepare(): SingleStoreBranchPrepare { + return this.session.prepareQuery( + this.dialect.sqlToQuery(this.getSQL()), + undefined, + ) as SingleStoreBranchPrepare; + } + + override execute: ReturnType['execute'] = (placeholderValues) => { + return this.prepare().execute(placeholderValues); + }; + + private createIterator = (): ReturnType['iterator'] => { + const self = this; + return async function*(placeholderValues) { + yield* self.prepare().iterator(placeholderValues); + }; + }; + + iterator = this.createIterator(); + + $dynamic(): SingleStoreBranchDynamic { + return this as any; + } +} diff --git a/drizzle-orm/src/singlestore-core/query-builders/createMilestone.ts b/drizzle-orm/src/singlestore-core/query-builders/createMilestone.ts new file mode 100644 index 000000000..7922e39a1 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/query-builders/createMilestone.ts @@ -0,0 +1,136 @@ +import { entityKind } from '~/entity.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import type { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; +import type { + AnySingleStoreQueryResultHKT, + PreparedQueryHKTBase, + PreparedQueryKind, + SingleStorePreparedQueryConfig, + SingleStoreQueryResultHKT, + SingleStoreQueryResultKind, + SingleStoreSession, +} from '~/singlestore-core/session.ts'; +import type { Query, SQL, SQLWrapper } from '~/sql/sql.ts'; + +export type SingleStoreCreateMilestoneWithout< + T extends AnySingleStoreCreateMilestoneBase, + TDynamic extends boolean, + K extends keyof T & string, +> = TDynamic extends true ? T + : Omit< + SingleStoreCreateMilestoneBase< + T['_']['milestone'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'], + TDynamic, + T['_']['excludedMethods'] | K + >, + T['_']['excludedMethods'] | K + >; + +export type SingleStoreCreateMilestone< + TDatabase extends string = string, + TQueryResult extends SingleStoreQueryResultHKT = AnySingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, +> = SingleStoreCreateMilestoneBase; + +export interface SingleStoreCreateMilestoneConfig { + milestone: string; + database?: string | undefined; +} + +export type SingleStoreCreateMilestonePrepare = PreparedQueryKind< + T['_']['preparedQueryHKT'], + SingleStorePreparedQueryConfig & { + execute: SingleStoreQueryResultKind; + iterator: never; + }, + true +>; + +type SingleStoreCreateMilestoneDynamic = SingleStoreCreateMilestone< + T['_']['milestone'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'] +>; + +type AnySingleStoreCreateMilestoneBase = SingleStoreCreateMilestoneBase; + +export interface SingleStoreCreateMilestoneBase< + TMilestone extends string, + TQueryResult extends SingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, +> extends QueryPromise> { + readonly _: { + readonly milestone: TMilestone; + readonly queryResult: TQueryResult; + readonly preparedQueryHKT: TPreparedQueryHKT; + readonly dynamic: TDynamic; + readonly excludedMethods: TExcludedMethods; + }; +} + +export class SingleStoreCreateMilestoneBase< + TMilestone extends string, + TQueryResult extends SingleStoreQueryResultHKT, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TPreparedQueryHKT extends PreparedQueryHKTBase, + TDynamic extends boolean = false, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TExcludedMethods extends string = never, +> extends QueryPromise> implements SQLWrapper { + static readonly [entityKind]: string = 'SingleStoreCreateMilestone'; + + private config: SingleStoreCreateMilestoneConfig; + + constructor( + private milestone: TMilestone, + private session: SingleStoreSession, + private dialect: SingleStoreDialect, + ) { + super(); + this.config = { milestone }; + } + + // TODO(singlestore): docs + for(database: string): SingleStoreCreateMilestoneWithout { + this.config.database = database; + return this as any; + } + + /** @internal */ + getSQL(): SQL { + return this.dialect.buildCreateMilestoneQuery(this.config); + } + + toSQL(): Query { + const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); + return rest; + } + + prepare(): SingleStoreCreateMilestonePrepare { + return this.session.prepareQuery( + this.dialect.sqlToQuery(this.getSQL()), + undefined, + ) as SingleStoreCreateMilestonePrepare; + } + + override execute: ReturnType['execute'] = (placeholderValues) => { + return this.prepare().execute(placeholderValues); + }; + + private createIterator = (): ReturnType['iterator'] => { + const self = this; + return async function*(placeholderValues) { + yield* self.prepare().iterator(placeholderValues); + }; + }; + + iterator = this.createIterator(); + + $dynamic(): SingleStoreCreateMilestoneDynamic { + return this as any; + } +} diff --git a/drizzle-orm/src/singlestore-core/query-builders/delete.ts b/drizzle-orm/src/singlestore-core/query-builders/delete.ts new file mode 100644 index 000000000..e0a463784 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/query-builders/delete.ts @@ -0,0 +1,170 @@ +import { entityKind } from '~/entity.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import type { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; +import type { + AnySingleStoreQueryResultHKT, + PreparedQueryHKTBase, + PreparedQueryKind, + SingleStorePreparedQueryConfig, + SingleStoreQueryResultHKT, + SingleStoreQueryResultKind, + SingleStoreSession, +} from '~/singlestore-core/session.ts'; +import type { SingleStoreTable } from '~/singlestore-core/table.ts'; +import type { Query, SQL, SQLWrapper } from '~/sql/sql.ts'; +import type { Subquery } from '~/subquery.ts'; +import type { SelectedFieldsOrdered } from './select.types.ts'; + +export type SingleStoreDeleteWithout< + T extends AnySingleStoreDeleteBase, + TDynamic extends boolean, + K extends keyof T & string, +> = TDynamic extends true ? T + : Omit< + SingleStoreDeleteBase< + T['_']['table'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'], + TDynamic, + T['_']['excludedMethods'] | K + >, + T['_']['excludedMethods'] | K + >; + +export type SingleStoreDelete< + TTable extends SingleStoreTable = SingleStoreTable, + TQueryResult extends SingleStoreQueryResultHKT = AnySingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, +> = SingleStoreDeleteBase; + +export interface SingleStoreDeleteConfig { + where?: SQL | undefined; + table: SingleStoreTable; + returning?: SelectedFieldsOrdered; + withList?: Subquery[]; +} + +export type SingleStoreDeletePrepare = PreparedQueryKind< + T['_']['preparedQueryHKT'], + SingleStorePreparedQueryConfig & { + execute: SingleStoreQueryResultKind; + iterator: never; + }, + true +>; + +type SingleStoreDeleteDynamic = SingleStoreDelete< + T['_']['table'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'] +>; + +type AnySingleStoreDeleteBase = SingleStoreDeleteBase; + +export interface SingleStoreDeleteBase< + TTable extends SingleStoreTable, + TQueryResult extends SingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, +> extends QueryPromise> { + readonly _: { + readonly table: TTable; + readonly queryResult: TQueryResult; + readonly preparedQueryHKT: TPreparedQueryHKT; + readonly dynamic: TDynamic; + readonly excludedMethods: TExcludedMethods; + }; +} + +export class SingleStoreDeleteBase< + TTable extends SingleStoreTable, + TQueryResult extends SingleStoreQueryResultHKT, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TPreparedQueryHKT extends PreparedQueryHKTBase, + TDynamic extends boolean = false, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TExcludedMethods extends string = never, +> extends QueryPromise> implements SQLWrapper { + static readonly [entityKind]: string = 'SingleStoreDelete'; + + private config: SingleStoreDeleteConfig; + + constructor( + private table: TTable, + private session: SingleStoreSession, + private dialect: SingleStoreDialect, + withList?: Subquery[], + ) { + super(); + this.config = { table, withList }; + } + + /** + * Adds a `where` clause to the query. + * + * Calling this method will delete only those rows that fulfill a specified condition. + * + * See docs: {@link https://orm.drizzle.team/docs/delete} + * + * @param where the `where` clause. + * + * @example + * You can use conditional operators and `sql function` to filter the rows to be deleted. + * + * ```ts + * // Delete all cars with green color + * db.delete(cars).where(eq(cars.color, 'green')); + * // or + * db.delete(cars).where(sql`${cars.color} = 'green'`) + * ``` + * + * You can logically combine conditional operators with `and()` and `or()` operators: + * + * ```ts + * // Delete all BMW cars with a green color + * db.delete(cars).where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); + * + * // Delete all cars with the green or blue color + * db.delete(cars).where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); + * ``` + */ + where(where: SQL | undefined): SingleStoreDeleteWithout { + this.config.where = where; + return this as any; + } + + /** @internal */ + getSQL(): SQL { + return this.dialect.buildDeleteQuery(this.config); + } + + toSQL(): Query { + const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); + return rest; + } + + prepare(): SingleStoreDeletePrepare { + return this.session.prepareQuery( + this.dialect.sqlToQuery(this.getSQL()), + this.config.returning, + ) as SingleStoreDeletePrepare; + } + + override execute: ReturnType['execute'] = (placeholderValues) => { + return this.prepare().execute(placeholderValues); + }; + + private createIterator = (): ReturnType['iterator'] => { + const self = this; + return async function*(placeholderValues) { + yield* self.prepare().iterator(placeholderValues); + }; + }; + + iterator = this.createIterator(); + + $dynamic(): SingleStoreDeleteDynamic { + return this as any; + } +} diff --git a/drizzle-orm/src/singlestore-core/query-builders/detach.ts b/drizzle-orm/src/singlestore-core/query-builders/detach.ts new file mode 100644 index 000000000..df3452074 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/query-builders/detach.ts @@ -0,0 +1,172 @@ +import { entityKind } from '~/entity.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import type { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; +import type { + AnySingleStoreQueryResultHKT, + PreparedQueryHKTBase, + PreparedQueryKind, + SingleStorePreparedQueryConfig, + SingleStoreQueryResultHKT, + SingleStoreQueryResultKind, + SingleStoreSession, +} from '~/singlestore-core/session.ts'; +import type { Query, SQL, SQLWrapper } from '~/sql/sql.ts'; + +export type SingleStoreDetachWithout< + T extends AnySingleStoreDetachBase, + TDynamic extends boolean, + K extends keyof T & string, +> = TDynamic extends true ? T + : Omit< + SingleStoreDetachBase< + T['_']['database'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'], + TDynamic, + T['_']['excludedMethods'] | K + >, + T['_']['excludedMethods'] | K + >; + +export type SingleStoreDetach< + TDatabase extends string = string, + TQueryResult extends SingleStoreQueryResultHKT = AnySingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, +> = SingleStoreDetachBase; + +export interface SingleStoreDetachConfig { + milestone?: string | undefined; + database: string; + workspace?: string | undefined; +} + +export type SingleStoreDetachPrepare = PreparedQueryKind< + T['_']['preparedQueryHKT'], + SingleStorePreparedQueryConfig & { + execute: SingleStoreQueryResultKind; + iterator: never; + }, + true +>; + +type SingleStoreDetachDynamic = SingleStoreDetach< + T['_']['database'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'] +>; + +type AnySingleStoreDetachBase = SingleStoreDetachBase; + +export interface SingleStoreDetachBase< + TDatabase extends string, + TQueryResult extends SingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, +> extends QueryPromise> { + readonly _: { + readonly database: TDatabase; + readonly queryResult: TQueryResult; + readonly preparedQueryHKT: TPreparedQueryHKT; + readonly dynamic: TDynamic; + readonly excludedMethods: TExcludedMethods; + }; +} + +export class SingleStoreDetachBase< + TDatabase extends string, + TQueryResult extends SingleStoreQueryResultHKT, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TPreparedQueryHKT extends PreparedQueryHKTBase, + TDynamic extends boolean = false, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TExcludedMethods extends string = never, +> extends QueryPromise> implements SQLWrapper { + static readonly [entityKind]: string = 'SingleStoreDetach'; + + private config: SingleStoreDetachConfig; + + constructor( + private database: TDatabase, + private session: SingleStoreSession, + private dialect: SingleStoreDialect, + ) { + super(); + this.config = { database }; + } + + /** + * Adds a `where` clause to the query. + * + * Calling this method will delete only those rows that fulfill a specified condition. + * + * See docs: {@link https://orm.drizzle.team/docs/delete} + * + * @param where the `where` clause. + * + * @example + * You can use conditional operators and `sql function` to filter the rows to be deleted. + * + * ```ts + * // Detach all cars with green color + * db.delete(cars).where(eq(cars.color, 'green')); + * // or + * db.delete(cars).where(sql`${cars.color} = 'green'`) + * ``` + * + * You can logically combine conditional operators with `and()` and `or()` operators: + * + * ```ts + * // Detach all BMW cars with a green color + * db.delete(cars).where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); + * + * // Detach all cars with the green or blue color + * db.delete(cars).where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); + * ``` + */ + // TODO(singlestore): docs + atMilestone(milestone: string): SingleStoreDetachWithout { + this.config.milestone = milestone; + return this as any; + } + + // TODO(singlestore): docs + fromWorkspace(workspace: string): SingleStoreDetachWithout { + this.config.workspace = workspace; + return this as any; + } + + /** @internal */ + getSQL(): SQL { + return this.dialect.buildDetachQuery(this.config); + } + + toSQL(): Query { + const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); + return rest; + } + + prepare(): SingleStoreDetachPrepare { + return this.session.prepareQuery( + this.dialect.sqlToQuery(this.getSQL()), + undefined, + ) as SingleStoreDetachPrepare; + } + + override execute: ReturnType['execute'] = (placeholderValues) => { + return this.prepare().execute(placeholderValues); + }; + + private createIterator = (): ReturnType['iterator'] => { + const self = this; + return async function*(placeholderValues) { + yield* self.prepare().iterator(placeholderValues); + }; + }; + + iterator = this.createIterator(); + + $dynamic(): SingleStoreDetachDynamic { + return this as any; + } +} diff --git a/drizzle-orm/src/singlestore-core/query-builders/dropMilestone.ts b/drizzle-orm/src/singlestore-core/query-builders/dropMilestone.ts new file mode 100644 index 000000000..7fba4f05e --- /dev/null +++ b/drizzle-orm/src/singlestore-core/query-builders/dropMilestone.ts @@ -0,0 +1,136 @@ +import { entityKind } from '~/entity.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import type { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; +import type { + AnySingleStoreQueryResultHKT, + PreparedQueryHKTBase, + PreparedQueryKind, + SingleStorePreparedQueryConfig, + SingleStoreQueryResultHKT, + SingleStoreQueryResultKind, + SingleStoreSession, +} from '~/singlestore-core/session.ts'; +import type { Query, SQL, SQLWrapper } from '~/sql/sql.ts'; + +export type SingleStoreDropMilestoneWithout< + T extends AnySingleStoreDropMilestoneBase, + TDynamic extends boolean, + K extends keyof T & string, +> = TDynamic extends true ? T + : Omit< + SingleStoreDropMilestoneBase< + T['_']['milestone'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'], + TDynamic, + T['_']['excludedMethods'] | K + >, + T['_']['excludedMethods'] | K + >; + +export type SingleStoreDropMilestone< + TDatabase extends string = string, + TQueryResult extends SingleStoreQueryResultHKT = AnySingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, +> = SingleStoreDropMilestoneBase; + +export interface SingleStoreDropMilestoneConfig { + milestone: string; + database?: string | undefined; +} + +export type SingleStoreDropMilestonePrepare = PreparedQueryKind< + T['_']['preparedQueryHKT'], + SingleStorePreparedQueryConfig & { + execute: SingleStoreQueryResultKind; + iterator: never; + }, + true +>; + +type SingleStoreDropMilestoneDynamic = SingleStoreDropMilestone< + T['_']['milestone'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'] +>; + +type AnySingleStoreDropMilestoneBase = SingleStoreDropMilestoneBase; + +export interface SingleStoreDropMilestoneBase< + TMilestone extends string, + TQueryResult extends SingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, +> extends QueryPromise> { + readonly _: { + readonly milestone: TMilestone; + readonly queryResult: TQueryResult; + readonly preparedQueryHKT: TPreparedQueryHKT; + readonly dynamic: TDynamic; + readonly excludedMethods: TExcludedMethods; + }; +} + +export class SingleStoreDropMilestoneBase< + TMilestone extends string, + TQueryResult extends SingleStoreQueryResultHKT, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TPreparedQueryHKT extends PreparedQueryHKTBase, + TDynamic extends boolean = false, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TExcludedMethods extends string = never, +> extends QueryPromise> implements SQLWrapper { + static readonly [entityKind]: string = 'SingleStoreDropMilestone'; + + private config: SingleStoreDropMilestoneConfig; + + constructor( + private milestone: TMilestone, + private session: SingleStoreSession, + private dialect: SingleStoreDialect, + ) { + super(); + this.config = { milestone }; + } + + // TODO(singlestore): docs + for(database: string): SingleStoreDropMilestoneWithout { + this.config.database = database; + return this as any; + } + + /** @internal */ + getSQL(): SQL { + return this.dialect.buildDropMilestoneQuery(this.config); + } + + toSQL(): Query { + const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); + return rest; + } + + prepare(): SingleStoreDropMilestonePrepare { + return this.session.prepareQuery( + this.dialect.sqlToQuery(this.getSQL()), + undefined, + ) as SingleStoreDropMilestonePrepare; + } + + override execute: ReturnType['execute'] = (placeholderValues) => { + return this.prepare().execute(placeholderValues); + }; + + private createIterator = (): ReturnType['iterator'] => { + const self = this; + return async function*(placeholderValues) { + yield* self.prepare().iterator(placeholderValues); + }; + }; + + iterator = this.createIterator(); + + $dynamic(): SingleStoreDropMilestoneDynamic { + return this as any; + } +} diff --git a/drizzle-orm/src/singlestore-core/query-builders/index.ts b/drizzle-orm/src/singlestore-core/query-builders/index.ts new file mode 100644 index 000000000..16f0e1d4d --- /dev/null +++ b/drizzle-orm/src/singlestore-core/query-builders/index.ts @@ -0,0 +1,6 @@ +export * from './delete.ts'; +export * from './insert.ts'; +export * from './query-builder.ts'; +export * from './select.ts'; +export * from './select.types.ts'; +export * from './update.ts'; diff --git a/drizzle-orm/src/singlestore-core/query-builders/insert.ts b/drizzle-orm/src/singlestore-core/query-builders/insert.ts new file mode 100644 index 000000000..129bf2214 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/query-builders/insert.ts @@ -0,0 +1,305 @@ +import { entityKind, is } from '~/entity.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import type { RunnableQuery } from '~/runnable-query.ts'; +import type { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; +import type { + AnySingleStoreQueryResultHKT, + PreparedQueryHKTBase, + PreparedQueryKind, + SingleStorePreparedQueryConfig, + SingleStoreQueryResultHKT, + SingleStoreQueryResultKind, + SingleStoreSession, +} from '~/singlestore-core/session.ts'; +import type { SingleStoreTable } from '~/singlestore-core/table.ts'; +import type { Placeholder, Query, SQLWrapper } from '~/sql/sql.ts'; +import { Param, SQL, sql } from '~/sql/sql.ts'; +import type { InferModelFromColumns } from '~/table.ts'; +import { Table } from '~/table.ts'; +import { mapUpdateSet, orderSelectedFields } from '~/utils.ts'; +import type { AnySingleStoreColumn, SingleStoreColumn } from '../columns/common.ts'; +import type { SelectedFieldsOrdered } from './select.types.ts'; +import type { SingleStoreUpdateSetSource } from './update.ts'; + +export interface SingleStoreInsertConfig { + table: TTable; + values: Record[]; + ignore: boolean; + onConflict?: SQL; + returning?: SelectedFieldsOrdered; +} + +export type AnySingleStoreInsertConfig = SingleStoreInsertConfig; + +export type SingleStoreInsertValue = + & { + [Key in keyof TTable['$inferInsert']]: TTable['$inferInsert'][Key] | SQL | Placeholder; + } + & {}; + +export class SingleStoreInsertBuilder< + TTable extends SingleStoreTable, + TQueryResult extends SingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, +> { + static readonly [entityKind]: string = 'SingleStoreInsertBuilder'; + + private shouldIgnore = false; + + constructor( + private table: TTable, + private session: SingleStoreSession, + private dialect: SingleStoreDialect, + ) {} + + ignore(): this { + this.shouldIgnore = true; + return this; + } + + values(value: SingleStoreInsertValue): SingleStoreInsertBase; + values(values: SingleStoreInsertValue[]): SingleStoreInsertBase; + values( + values: SingleStoreInsertValue | SingleStoreInsertValue[], + ): SingleStoreInsertBase { + values = Array.isArray(values) ? values : [values]; + if (values.length === 0) { + throw new Error('values() must be called with at least one value'); + } + const mappedValues = values.map((entry) => { + const result: Record = {}; + const cols = this.table[Table.Symbol.Columns]; + for (const colKey of Object.keys(entry)) { + const colValue = entry[colKey as keyof typeof entry]; + result[colKey] = is(colValue, SQL) ? colValue : new Param(colValue, cols[colKey]); + } + return result; + }); + + return new SingleStoreInsertBase(this.table, mappedValues, this.shouldIgnore, this.session, this.dialect); + } +} + +export type SingleStoreInsertWithout< + T extends AnySingleStoreInsert, + TDynamic extends boolean, + K extends keyof T & string, +> = TDynamic extends true ? T + : Omit< + SingleStoreInsertBase< + T['_']['table'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'], + T['_']['returning'], + TDynamic, + T['_']['excludedMethods'] | '$returning' + >, + T['_']['excludedMethods'] | K + >; + +export type SingleStoreInsertDynamic = SingleStoreInsert< + T['_']['table'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'], + T['_']['returning'] +>; + +export type SingleStoreInsertPrepare< + T extends AnySingleStoreInsert, + TReturning extends Record | undefined = undefined, +> = PreparedQueryKind< + T['_']['preparedQueryHKT'], + SingleStorePreparedQueryConfig & { + execute: TReturning extends undefined ? SingleStoreQueryResultKind : TReturning[]; + iterator: never; + }, + true +>; + +export type SingleStoreInsertOnDuplicateKeyUpdateConfig = { + set: SingleStoreUpdateSetSource; +}; + +export type SingleStoreInsert< + TTable extends SingleStoreTable = SingleStoreTable, + TQueryResult extends SingleStoreQueryResultHKT = AnySingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, + TReturning extends Record | undefined = Record | undefined, +> = SingleStoreInsertBase; + +export type SingleStoreInsertReturning< + T extends AnySingleStoreInsert, + TDynamic extends boolean, +> = SingleStoreInsertBase< + T['_']['table'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'], + InferModelFromColumns>, + TDynamic, + T['_']['excludedMethods'] | '$returning' +>; + +export type AnySingleStoreInsert = SingleStoreInsertBase; + +export interface SingleStoreInsertBase< + TTable extends SingleStoreTable, + TQueryResult extends SingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TReturning extends Record | undefined = undefined, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, +> extends + QueryPromise : TReturning[]>, + RunnableQuery< + TReturning extends undefined ? SingleStoreQueryResultKind : TReturning[], + 'singlestore' + >, + SQLWrapper +{ + readonly _: { + readonly dialect: 'singlestore'; + readonly table: TTable; + readonly queryResult: TQueryResult; + readonly preparedQueryHKT: TPreparedQueryHKT; + readonly dynamic: TDynamic; + readonly excludedMethods: TExcludedMethods; + readonly returning: TReturning; + readonly result: TReturning extends undefined ? SingleStoreQueryResultKind : TReturning[]; + }; +} + +export type PrimaryKeyKeys> = { + [K in keyof T]: T[K]['_']['isPrimaryKey'] extends true ? T[K]['_']['isAutoincrement'] extends true ? K + : T[K]['_']['hasRuntimeDefault'] extends true ? T[K]['_']['isPrimaryKey'] extends true ? K : never + : never + : T[K]['_']['hasRuntimeDefault'] extends true ? T[K]['_']['isPrimaryKey'] extends true ? K : never + : never; +}[keyof T]; + +export type GetPrimarySerialOrDefaultKeys> = { + [K in PrimaryKeyKeys]: T[K]; +}; + +export class SingleStoreInsertBase< + TTable extends SingleStoreTable, + TQueryResult extends SingleStoreQueryResultHKT, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TPreparedQueryHKT extends PreparedQueryHKTBase, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TReturning extends Record | undefined = undefined, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TDynamic extends boolean = false, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TExcludedMethods extends string = never, +> extends QueryPromise : TReturning[]> + implements + RunnableQuery< + TReturning extends undefined ? SingleStoreQueryResultKind : TReturning[], + 'singlestore' + >, + SQLWrapper +{ + static readonly [entityKind]: string = 'SingleStoreInsert'; + + declare protected $table: TTable; + + private config: SingleStoreInsertConfig; + + constructor( + table: TTable, + values: SingleStoreInsertConfig['values'], + ignore: boolean, + private session: SingleStoreSession, + private dialect: SingleStoreDialect, + ) { + super(); + this.config = { table, values, ignore }; + } + + /** + * Adds an `on duplicate key update` clause to the query. + * + * Calling this method will update update the row if any unique index conflicts. SingleStore will automatically determine the conflict target based on the primary key and unique indexes. + * + * See docs: {@link https://orm.drizzle.team/docs/insert#on-duplicate-key-update} + * + * @param config The `set` clause + * + * @example + * ```ts + * await db.insert(cars) + * .values({ id: 1, brand: 'BMW'}) + * .onDuplicateKeyUpdate({ set: { brand: 'Porsche' }}); + * ``` + * + * While SingleStore does not directly support doing nothing on conflict, you can perform a no-op by setting any column's value to itself and achieve the same effect: + * + * ```ts + * import { sql } from 'drizzle-orm'; + * + * await db.insert(cars) + * .values({ id: 1, brand: 'BMW' }) + * .onDuplicateKeyUpdate({ set: { id: sql`id` } }); + * ``` + */ + onDuplicateKeyUpdate( + config: SingleStoreInsertOnDuplicateKeyUpdateConfig, + ): SingleStoreInsertWithout { + const setSql = this.dialect.buildUpdateSet(this.config.table, mapUpdateSet(this.config.table, config.set)); + this.config.onConflict = sql`update ${setSql}`; + return this as any; + } + + $returningId(): SingleStoreInsertWithout< + SingleStoreInsertReturning, + TDynamic, + '$returningId' + > { + const returning: SelectedFieldsOrdered = []; + for (const [key, value] of Object.entries(this.config.table[Table.Symbol.Columns])) { + if (value.primary) { + returning.push({ field: value, path: [key] }); + } + } + this.config.returning = orderSelectedFields(this.config.table[Table.Symbol.Columns]); + return this as any; + } + + /** @internal */ + getSQL(): SQL { + return this.dialect.buildInsertQuery(this.config).sql; + } + + toSQL(): Query { + const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); + return rest; + } + + prepare(): SingleStoreInsertPrepare { + const { sql, generatedIds } = this.dialect.buildInsertQuery(this.config); + return this.session.prepareQuery( + this.dialect.sqlToQuery(sql), + undefined, + undefined, + generatedIds, + this.config.returning, + ) as SingleStoreInsertPrepare; + } + + override execute: ReturnType['execute'] = (placeholderValues) => { + return this.prepare().execute(placeholderValues); + }; + + private createIterator = (): ReturnType['iterator'] => { + const self = this; + return async function*(placeholderValues) { + yield* self.prepare().iterator(placeholderValues); + }; + }; + + iterator = this.createIterator(); + + $dynamic(): SingleStoreInsertDynamic { + return this as any; + } +} diff --git a/drizzle-orm/src/singlestore-core/query-builders/optimizeTable.ts b/drizzle-orm/src/singlestore-core/query-builders/optimizeTable.ts new file mode 100644 index 000000000..daecb90e9 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/query-builders/optimizeTable.ts @@ -0,0 +1,153 @@ +import { entityKind } from '~/entity.ts'; +import type { ColumnBaseConfig, ColumnDataType } from '~/index.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import type { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; +import type { + AnySingleStoreQueryResultHKT, + PreparedQueryHKTBase, + PreparedQueryKind, + SingleStorePreparedQueryConfig, + SingleStoreQueryResultHKT, + SingleStoreQueryResultKind, + SingleStoreSession, +} from '~/singlestore-core/session.ts'; +import type { Query, SQL, SQLWrapper } from '~/sql/sql.ts'; +import type { SingleStoreColumn } from '../columns/common.ts'; +import type { SingleStoreTable } from '../table.ts'; +import type { OptimizeTableArgument } from './optimizeTable.types.ts'; + +export type SingleStoreOptimizeTableWithout< + T extends AnySingleStoreOptimizeTableBase, + TDynamic extends boolean, + K extends keyof T & string, +> = TDynamic extends true ? T + : Omit< + SingleStoreOptimizeTableBase< + T['_']['table'], + T['_']['arg'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'], + TDynamic, + T['_']['excludedMethods'] | K + >, + T['_']['excludedMethods'] | K + >; + +export type SingleStoreOptimizeTable< + TTable extends SingleStoreTable = SingleStoreTable, + TArg extends OptimizeTableArgument = OptimizeTableArgument, + TQueryResult extends SingleStoreQueryResultHKT = AnySingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, +> = SingleStoreOptimizeTableBase; + +export interface SingleStoreOptimizeTableConfig { + table: SingleStoreTable; + arg?: OptimizeTableArgument | undefined; + selection?: SingleStoreColumn, object>[] | undefined; +} + +export type SingleStoreOptimizeTablePrepare = PreparedQueryKind< + T['_']['preparedQueryHKT'], + SingleStorePreparedQueryConfig & { + execute: SingleStoreQueryResultKind; + iterator: never; + }, + true +>; + +type SingleStoreOptimizeTableDynamic = SingleStoreOptimizeTable< + T['_']['table'], + T['_']['arg'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'] +>; + +type AnySingleStoreOptimizeTableBase = SingleStoreOptimizeTableBase; + +export interface SingleStoreOptimizeTableBase< + TTable extends SingleStoreTable, + TArg extends OptimizeTableArgument, + TQueryResult extends SingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, +> extends QueryPromise> { + readonly _: { + readonly table: TTable; + readonly arg: TArg | undefined; + readonly queryResult: TQueryResult; + readonly preparedQueryHKT: TPreparedQueryHKT; + readonly dynamic: TDynamic; + readonly excludedMethods: TExcludedMethods; + }; +} + +export class SingleStoreOptimizeTableBase< + TTable extends SingleStoreTable, + TArg extends OptimizeTableArgument, + TQueryResult extends SingleStoreQueryResultHKT, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TPreparedQueryHKT extends PreparedQueryHKTBase, + TDynamic extends boolean = false, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TExcludedMethods extends string = never, +> extends QueryPromise> implements SQLWrapper { + static readonly [entityKind]: string = 'SingleStoreOptimizeTable'; + + private config: SingleStoreOptimizeTableConfig; + + constructor( + private table: TTable, + private arg: TArg | undefined, + private session: SingleStoreSession, + private dialect: SingleStoreDialect, + ) { + super(); + this.config = { table, arg }; + } + + // TODO(singlestore): docs + warmBlobCacheForColumn( + ...selection: SingleStoreColumn, object>[] + ): SingleStoreOptimizeTableWithout { + if (this.config.arg) { + throw new Error('Cannot call warmBlobCacheForColumn with an argument'); + } + this.config.selection = selection; + return this as any; + } + + /** @internal */ + getSQL(): SQL { + return this.dialect.buildOptimizeTable(this.config); + } + + toSQL(): Query { + const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); + return rest; + } + + prepare(): SingleStoreOptimizeTablePrepare { + return this.session.prepareQuery( + this.dialect.sqlToQuery(this.getSQL()), + undefined, + ) as SingleStoreOptimizeTablePrepare; + } + + override execute: ReturnType['execute'] = (placeholderValues) => { + return this.prepare().execute(placeholderValues); + }; + + private createIterator = (): ReturnType['iterator'] => { + const self = this; + return async function*(placeholderValues) { + yield* self.prepare().iterator(placeholderValues); + }; + }; + + iterator = this.createIterator(); + + $dynamic(): SingleStoreOptimizeTableDynamic { + return this as any; + } +} diff --git a/drizzle-orm/src/singlestore-core/query-builders/optimizeTable.types.ts b/drizzle-orm/src/singlestore-core/query-builders/optimizeTable.types.ts new file mode 100644 index 000000000..bb5993ab7 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/query-builders/optimizeTable.types.ts @@ -0,0 +1,5 @@ +export type OptimizeTableArgument = + | 'FULL' + | 'FLUSH' + | 'FIX_ALTER' + | 'INDEX'; diff --git a/drizzle-orm/src/singlestore-core/query-builders/query-builder.ts b/drizzle-orm/src/singlestore-core/query-builders/query-builder.ts new file mode 100644 index 000000000..b76a0457e --- /dev/null +++ b/drizzle-orm/src/singlestore-core/query-builders/query-builder.ts @@ -0,0 +1,107 @@ +import { entityKind } from '~/entity.ts'; +import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; +import { SelectionProxyHandler } from '~/selection-proxy.ts'; +import { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; +import type { WithSubqueryWithSelection } from '~/singlestore-core/subquery.ts'; +import type { ColumnsSelection } from '~/sql/sql.ts'; +import { WithSubquery } from '~/subquery.ts'; +import { SingleStoreSelectBuilder } from './select.ts'; +import type { SelectedFields } from './select.types.ts'; + +export class QueryBuilder { + static readonly [entityKind]: string = 'SingleStoreQueryBuilder'; + + private dialect: SingleStoreDialect | undefined; + + $with(alias: TAlias) { + const queryBuilder = this; + + return { + as( + qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), + ): WithSubqueryWithSelection { + if (typeof qb === 'function') { + qb = qb(queryBuilder); + } + + return new Proxy( + new WithSubquery(qb.getSQL(), qb.getSelectedFields() as SelectedFields, alias, true), + new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), + ) as WithSubqueryWithSelection; + }, + }; + } + + with(...queries: WithSubquery[]) { + const self = this; + + function select(): SingleStoreSelectBuilder; + function select( + fields: TSelection, + ): SingleStoreSelectBuilder; + function select( + fields?: TSelection, + ): SingleStoreSelectBuilder { + return new SingleStoreSelectBuilder({ + fields: fields ?? undefined, + session: undefined, + dialect: self.getDialect(), + withList: queries, + }); + } + + function selectDistinct(): SingleStoreSelectBuilder; + function selectDistinct( + fields: TSelection, + ): SingleStoreSelectBuilder; + function selectDistinct( + fields?: TSelection, + ): SingleStoreSelectBuilder { + return new SingleStoreSelectBuilder({ + fields: fields ?? undefined, + session: undefined, + dialect: self.getDialect(), + withList: queries, + distinct: true, + }); + } + + return { select, selectDistinct }; + } + + select(): SingleStoreSelectBuilder; + select(fields: TSelection): SingleStoreSelectBuilder; + select( + fields?: TSelection, + ): SingleStoreSelectBuilder { + return new SingleStoreSelectBuilder({ + fields: fields ?? undefined, + session: undefined, + dialect: this.getDialect(), + }); + } + + selectDistinct(): SingleStoreSelectBuilder; + selectDistinct( + fields: TSelection, + ): SingleStoreSelectBuilder; + selectDistinct( + fields?: TSelection, + ): SingleStoreSelectBuilder { + return new SingleStoreSelectBuilder({ + fields: fields ?? undefined, + session: undefined, + dialect: this.getDialect(), + distinct: true, + }); + } + + // Lazy load dialect to avoid circular dependency + private getDialect() { + if (!this.dialect) { + this.dialect = new SingleStoreDialect(); + } + + return this.dialect; + } +} diff --git a/drizzle-orm/src/singlestore-core/query-builders/query.ts b/drizzle-orm/src/singlestore-core/query-builders/query.ts new file mode 100644 index 000000000..5744c7c9c --- /dev/null +++ b/drizzle-orm/src/singlestore-core/query-builders/query.ts @@ -0,0 +1,141 @@ +import { entityKind } from '~/entity.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import { + type BuildQueryResult, + type BuildRelationalQueryResult, + type DBQueryConfig, + mapRelationalRow, + type TableRelationalConfig, + type TablesRelationalConfig, +} from '~/relations.ts'; +import type { Query, QueryWithTypings, SQL } from '~/sql/sql.ts'; +import type { KnownKeysOnly } from '~/utils.ts'; +import type { SingleStoreDialect } from '../dialect.ts'; +import type { + PreparedQueryHKTBase, + PreparedQueryKind, + SingleStorePreparedQueryConfig, + SingleStoreSession, +} from '../session.ts'; +import type { SingleStoreTable } from '../table.ts'; + +export class RelationalQueryBuilder< + TPreparedQueryHKT extends PreparedQueryHKTBase, + TSchema extends TablesRelationalConfig, + TFields extends TableRelationalConfig, +> { + static readonly [entityKind]: string = 'SingleStoreRelationalQueryBuilder'; + + constructor( + private fullSchema: Record, + private schema: TSchema, + private tableNamesMap: Record, + private table: SingleStoreTable, + private tableConfig: TableRelationalConfig, + private dialect: SingleStoreDialect, + private session: SingleStoreSession, + ) {} + + findMany>( + config?: KnownKeysOnly>, + ): SingleStoreRelationalQuery[]> { + return new SingleStoreRelationalQuery( + this.fullSchema, + this.schema, + this.tableNamesMap, + this.table, + this.tableConfig, + this.dialect, + this.session, + config ? (config as DBQueryConfig<'many', true>) : {}, + 'many', + ); + } + + findFirst, 'limit'>>( + config?: KnownKeysOnly, 'limit'>>, + ): SingleStoreRelationalQuery | undefined> { + return new SingleStoreRelationalQuery( + this.fullSchema, + this.schema, + this.tableNamesMap, + this.table, + this.tableConfig, + this.dialect, + this.session, + config ? { ...(config as DBQueryConfig<'many', true> | undefined), limit: 1 } : { limit: 1 }, + 'first', + ); + } +} + +export class SingleStoreRelationalQuery< + TPreparedQueryHKT extends PreparedQueryHKTBase, + TResult, +> extends QueryPromise { + static readonly [entityKind]: string = 'SingleStoreRelationalQuery'; + + declare protected $brand: 'SingleStoreRelationalQuery'; + + constructor( + private fullSchema: Record, + private schema: TablesRelationalConfig, + private tableNamesMap: Record, + private table: SingleStoreTable, + private tableConfig: TableRelationalConfig, + private dialect: SingleStoreDialect, + private session: SingleStoreSession, + private config: DBQueryConfig<'many', true> | true, + private queryMode: 'many' | 'first', + ) { + super(); + } + + prepare() { + const { query, builtQuery } = this._toSQL(); + return this.session.prepareQuery( + builtQuery, + undefined, + (rawRows) => { + const rows = rawRows.map((row) => mapRelationalRow(this.schema, this.tableConfig, row, query.selection)); + if (this.queryMode === 'first') { + return rows[0] as TResult; + } + return rows as TResult; + }, + ) as PreparedQueryKind; + } + + private _getQuery() { + return this.dialect.buildRelationalQuery({ + fullSchema: this.fullSchema, + schema: this.schema, + tableNamesMap: this.tableNamesMap, + table: this.table, + tableConfig: this.tableConfig, + queryConfig: this.config, + tableAlias: this.tableConfig.tsName, + }); + } + + private _toSQL(): { query: BuildRelationalQueryResult; builtQuery: QueryWithTypings } { + const query = this._getQuery(); + + const builtQuery = this.dialect.sqlToQuery(query.sql as SQL); + + return { builtQuery, query }; + } + + /** @internal */ + getSQL(): SQL { + return this._getQuery().sql as SQL; + } + + toSQL(): Query { + return this._toSQL().builtQuery; + } + + override execute(): Promise { + return this.prepare().execute(); + } +} diff --git a/drizzle-orm/src/singlestore-core/query-builders/select.ts b/drizzle-orm/src/singlestore-core/query-builders/select.ts new file mode 100644 index 000000000..78cba92e0 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/query-builders/select.ts @@ -0,0 +1,1081 @@ +import { entityKind, is } from '~/entity.ts'; +import { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; +import type { + BuildSubquerySelection, + GetSelectTableName, + GetSelectTableSelection, + JoinNullability, + JoinType, + SelectMode, + SelectResult, + SetOperator, +} from '~/query-builders/select.types.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import { SelectionProxyHandler } from '~/selection-proxy.ts'; +import type { SingleStoreColumn } from '~/singlestore-core/columns/index.ts'; +import type { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; +import type { + PreparedQueryHKTBase, + SingleStorePreparedQueryConfig, + SingleStoreSession, +} from '~/singlestore-core/session.ts'; +import type { SubqueryWithSelection } from '~/singlestore-core/subquery.ts'; +import type { SingleStoreTable } from '~/singlestore-core/table.ts'; +import type { ColumnsSelection, Query } from '~/sql/sql.ts'; +import { SQL, View } from '~/sql/sql.ts'; +import { Subquery } from '~/subquery.ts'; +import { Table } from '~/table.ts'; +import { applyMixins, getTableColumns, getTableLikeName, haveSameKeys, type ValueOrArray } from '~/utils.ts'; +import { orderSelectedFields } from '~/utils.ts'; +import { ViewBaseConfig } from '~/view-common.ts'; +import { SingleStoreViewBase } from '../view-base.ts'; +import type { + AnySingleStoreSelect, + CreateSingleStoreSelectFromBuilderMode, + GetSingleStoreSetOperators, + LockConfig, + LockStrength, + SelectedFields, + SetOperatorRightSelect, + SingleStoreCreateSetOperatorFn, + SingleStoreJoinFn, + SingleStoreSelectConfig, + SingleStoreSelectDynamic, + SingleStoreSelectHKT, + SingleStoreSelectHKTBase, + SingleStoreSelectPrepare, + SingleStoreSelectWithout, + SingleStoreSetOperatorExcludedMethods, + SingleStoreSetOperatorWithResult, +} from './select.types.ts'; + +export class SingleStoreSelectBuilder< + TSelection extends SelectedFields | undefined, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TBuilderMode extends 'db' | 'qb' = 'db', +> { + static readonly [entityKind]: string = 'SingleStoreSelectBuilder'; + + private fields: TSelection; + private session: SingleStoreSession | undefined; + private dialect: SingleStoreDialect; + private withList: Subquery[] = []; + private distinct: boolean | undefined; + + constructor( + config: { + fields: TSelection; + session: SingleStoreSession | undefined; + dialect: SingleStoreDialect; + withList?: Subquery[]; + distinct?: boolean; + }, + ) { + this.fields = config.fields; + this.session = config.session; + this.dialect = config.dialect; + if (config.withList) { + this.withList = config.withList; + } + this.distinct = config.distinct; + } + + from( + source: TFrom, + ): CreateSingleStoreSelectFromBuilderMode< + TBuilderMode, + GetSelectTableName, + TSelection extends undefined ? GetSelectTableSelection : TSelection, + TSelection extends undefined ? 'single' : 'partial', + TPreparedQueryHKT + > { + const isPartialSelect = !!this.fields; + + let fields: SelectedFields; + if (this.fields) { + fields = this.fields; + } else if (is(source, Subquery)) { + // This is required to use the proxy handler to get the correct field values from the subquery + fields = Object.fromEntries( + Object.keys(source._.selectedFields).map(( + key, + ) => [key, source[key as unknown as keyof typeof source] as unknown as SelectedFields[string]]), + ); + } else if (is(source, SingleStoreViewBase)) { + fields = source[ViewBaseConfig].selectedFields as SelectedFields; + } else if (is(source, SQL)) { + fields = {}; + } else { + fields = getTableColumns(source); + } + + return new SingleStoreSelectBase( + { + table: source, + fields, + isPartialSelect, + session: this.session, + dialect: this.dialect, + withList: this.withList, + distinct: this.distinct, + }, + ) as any; + } +} + +export abstract class SingleStoreSelectQueryBuilderBase< + THKT extends SingleStoreSelectHKTBase, + TTableName extends string | undefined, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TNullabilityMap extends Record = TTableName extends string ? Record + : {}, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, + TResult extends any[] = SelectResult[], + TSelectedFields extends ColumnsSelection = BuildSubquerySelection, +> extends TypedQueryBuilder { + static readonly [entityKind]: string = 'SingleStoreSelectQueryBuilder'; + + override readonly _: { + readonly hkt: THKT; + readonly tableName: TTableName; + readonly selection: TSelection; + readonly selectMode: TSelectMode; + readonly preparedQueryHKT: TPreparedQueryHKT; + readonly nullabilityMap: TNullabilityMap; + readonly dynamic: TDynamic; + readonly excludedMethods: TExcludedMethods; + readonly result: TResult; + readonly selectedFields: TSelectedFields; + }; + + protected config: SingleStoreSelectConfig; + protected joinsNotNullableMap: Record; + private tableName: string | undefined; + private isPartialSelect: boolean; + /** @internal */ + readonly session: SingleStoreSession | undefined; + protected dialect: SingleStoreDialect; + + constructor( + { table, fields, isPartialSelect, session, dialect, withList, distinct }: { + table: SingleStoreSelectConfig['table']; + fields: SingleStoreSelectConfig['fields']; + isPartialSelect: boolean; + session: SingleStoreSession | undefined; + dialect: SingleStoreDialect; + withList: Subquery[]; + distinct: boolean | undefined; + }, + ) { + super(); + this.config = { + withList, + table, + fields: { ...fields }, + distinct, + setOperators: [], + }; + this.isPartialSelect = isPartialSelect; + this.session = session; + this.dialect = dialect; + this._ = { + selectedFields: fields as TSelectedFields, + } as this['_']; + this.tableName = getTableLikeName(table); + this.joinsNotNullableMap = typeof this.tableName === 'string' ? { [this.tableName]: true } : {}; + } + + private createJoin( + joinType: TJoinType, + ): SingleStoreJoinFn { + return ( + table: SingleStoreTable | Subquery | SingleStoreViewBase | SQL, + on: ((aliases: TSelection) => SQL | undefined) | SQL | undefined, + ) => { + const baseTableName = this.tableName; + const tableName = getTableLikeName(table); + + if (typeof tableName === 'string' && this.config.joins?.some((join) => join.alias === tableName)) { + throw new Error(`Alias "${tableName}" is already used in this query`); + } + + if (!this.isPartialSelect) { + // If this is the first join and this is not a partial select and we're not selecting from raw SQL, "move" the fields from the main table to the nested object + if (Object.keys(this.joinsNotNullableMap).length === 1 && typeof baseTableName === 'string') { + this.config.fields = { + [baseTableName]: this.config.fields, + }; + } + if (typeof tableName === 'string' && !is(table, SQL)) { + const selection = is(table, Subquery) + ? table._.selectedFields + : is(table, View) + ? table[ViewBaseConfig].selectedFields + : table[Table.Symbol.Columns]; + this.config.fields[tableName] = selection; + } + } + + if (typeof on === 'function') { + on = on( + new Proxy( + this.config.fields, + new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), + ) as TSelection, + ); + } + + if (!this.config.joins) { + this.config.joins = []; + } + + this.config.joins.push({ on, table, joinType, alias: tableName }); + + if (typeof tableName === 'string') { + switch (joinType) { + case 'left': { + this.joinsNotNullableMap[tableName] = false; + break; + } + case 'right': { + this.joinsNotNullableMap = Object.fromEntries( + Object.entries(this.joinsNotNullableMap).map(([key]) => [key, false]), + ); + this.joinsNotNullableMap[tableName] = true; + break; + } + case 'inner': { + this.joinsNotNullableMap[tableName] = true; + break; + } + case 'full': { + this.joinsNotNullableMap = Object.fromEntries( + Object.entries(this.joinsNotNullableMap).map(([key]) => [key, false]), + ); + this.joinsNotNullableMap[tableName] = false; + break; + } + } + } + + return this as any; + }; + } + + /** + * Executes a `left join` operation by adding another table to the current query. + * + * Calling this method associates each row of the table with the corresponding row from the joined table, if a match is found. If no matching row exists, it sets all columns of the joined table to null. + * + * See docs: {@link https://orm.drizzle.team/docs/joins#left-join} + * + * @param table the table to join. + * @param on the `on` clause. + * + * @example + * + * ```ts + * // Select all users and their pets + * const usersWithPets: { user: User; pets: Pet | null }[] = await db.select() + * .from(users) + * .leftJoin(pets, eq(users.id, pets.ownerId)) + * + * // Select userId and petId + * const usersIdsAndPetIds: { userId: number; petId: number | null }[] = await db.select({ + * userId: users.id, + * petId: pets.id, + * }) + * .from(users) + * .leftJoin(pets, eq(users.id, pets.ownerId)) + * ``` + */ + leftJoin = this.createJoin('left'); + + /** + * Executes a `right join` operation by adding another table to the current query. + * + * Calling this method associates each row of the joined table with the corresponding row from the main table, if a match is found. If no matching row exists, it sets all columns of the main table to null. + * + * See docs: {@link https://orm.drizzle.team/docs/joins#right-join} + * + * @param table the table to join. + * @param on the `on` clause. + * + * @example + * + * ```ts + * // Select all users and their pets + * const usersWithPets: { user: User | null; pets: Pet }[] = await db.select() + * .from(users) + * .rightJoin(pets, eq(users.id, pets.ownerId)) + * + * // Select userId and petId + * const usersIdsAndPetIds: { userId: number | null; petId: number }[] = await db.select({ + * userId: users.id, + * petId: pets.id, + * }) + * .from(users) + * .rightJoin(pets, eq(users.id, pets.ownerId)) + * ``` + */ + rightJoin = this.createJoin('right'); + + /** + * Executes an `inner join` operation, creating a new table by combining rows from two tables that have matching values. + * + * Calling this method retrieves rows that have corresponding entries in both joined tables. Rows without matching entries in either table are excluded, resulting in a table that includes only matching pairs. + * + * See docs: {@link https://orm.drizzle.team/docs/joins#inner-join} + * + * @param table the table to join. + * @param on the `on` clause. + * + * @example + * + * ```ts + * // Select all users and their pets + * const usersWithPets: { user: User; pets: Pet }[] = await db.select() + * .from(users) + * .innerJoin(pets, eq(users.id, pets.ownerId)) + * + * // Select userId and petId + * const usersIdsAndPetIds: { userId: number; petId: number }[] = await db.select({ + * userId: users.id, + * petId: pets.id, + * }) + * .from(users) + * .innerJoin(pets, eq(users.id, pets.ownerId)) + * ``` + */ + innerJoin = this.createJoin('inner'); + + /** + * Executes a `full join` operation by combining rows from two tables into a new table. + * + * Calling this method retrieves all rows from both main and joined tables, merging rows with matching values and filling in `null` for non-matching columns. + * + * See docs: {@link https://orm.drizzle.team/docs/joins#full-join} + * + * @param table the table to join. + * @param on the `on` clause. + * + * @example + * + * ```ts + * // Select all users and their pets + * const usersWithPets: { user: User | null; pets: Pet | null }[] = await db.select() + * .from(users) + * .fullJoin(pets, eq(users.id, pets.ownerId)) + * + * // Select userId and petId + * const usersIdsAndPetIds: { userId: number | null; petId: number | null }[] = await db.select({ + * userId: users.id, + * petId: pets.id, + * }) + * .from(users) + * .fullJoin(pets, eq(users.id, pets.ownerId)) + * ``` + */ + fullJoin = this.createJoin('full'); + + private createSetOperator( + type: SetOperator, + isAll: boolean, + ): >( + rightSelection: + | ((setOperators: GetSingleStoreSetOperators) => SetOperatorRightSelect) + | SetOperatorRightSelect, + ) => SingleStoreSelectWithout< + this, + TDynamic, + SingleStoreSetOperatorExcludedMethods, + true + > { + return (rightSelection) => { + const rightSelect = (typeof rightSelection === 'function' + ? rightSelection(getSingleStoreSetOperators()) + : rightSelection) as TypedQueryBuilder< + any, + TResult + >; + + if (!haveSameKeys(this.getSelectedFields(), rightSelect.getSelectedFields())) { + throw new Error( + 'Set operator error (union / intersect / except): selected fields are not the same or are in a different order', + ); + } + + this.config.setOperators.push({ type, isAll, rightSelect }); + return this as any; + }; + } + + /** + * Adds `union` set operator to the query. + * + * Calling this method will combine the result sets of the `select` statements and remove any duplicate rows that appear across them. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#union} + * + * @example + * + * ```ts + * // Select all unique names from customers and users tables + * await db.select({ name: users.name }) + * .from(users) + * .union( + * db.select({ name: customers.name }).from(customers) + * ); + * // or + * import { union } from 'drizzle-orm/singlestore-core' + * + * await union( + * db.select({ name: users.name }).from(users), + * db.select({ name: customers.name }).from(customers) + * ); + * ``` + */ + union = this.createSetOperator('union', false); + + /** + * Adds `union all` set operator to the query. + * + * Calling this method will combine the result-set of the `select` statements and keep all duplicate rows that appear across them. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#union-all} + * + * @example + * + * ```ts + * // Select all transaction ids from both online and in-store sales + * await db.select({ transaction: onlineSales.transactionId }) + * .from(onlineSales) + * .unionAll( + * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) + * ); + * // or + * import { unionAll } from 'drizzle-orm/singlestore-core' + * + * await unionAll( + * db.select({ transaction: onlineSales.transactionId }).from(onlineSales), + * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) + * ); + * ``` + */ + unionAll = this.createSetOperator('union', true); + + /** + * Adds `intersect` set operator to the query. + * + * Calling this method will retain only the rows that are present in both result sets and eliminate duplicates. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#intersect} + * + * @example + * + * ```ts + * // Select course names that are offered in both departments A and B + * await db.select({ courseName: depA.courseName }) + * .from(depA) + * .intersect( + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * // or + * import { intersect } from 'drizzle-orm/singlestore-core' + * + * await intersect( + * db.select({ courseName: depA.courseName }).from(depA), + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * ``` + */ + intersect = this.createSetOperator('intersect', false); + + /** + * Adds `except` set operator to the query. + * + * Calling this method will retrieve all unique rows from the left query, except for the rows that are present in the result set of the right query. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#except} + * + * @example + * + * ```ts + * // Select all courses offered in department A but not in department B + * await db.select({ courseName: depA.courseName }) + * .from(depA) + * .except( + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * // or + * import { except } from 'drizzle-orm/singlestore-core' + * + * await except( + * db.select({ courseName: depA.courseName }).from(depA), + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * ``` + */ + except = this.createSetOperator('except', false); + + /** + * Adds `minus` set operator to the query. + * + * This is an alias of `except` supported by SingleStore. + * + * @example + * + * ```ts + * // Select all courses offered in department A but not in department B + * await db.select({ courseName: depA.courseName }) + * .from(depA) + * .minus( + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * // or + * import { minus } from 'drizzle-orm/singlestore-core' + * + * await minus( + * db.select({ courseName: depA.courseName }).from(depA), + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * ``` + */ + minus = this.createSetOperator('except', false); + + /** @internal */ + addSetOperators(setOperators: SingleStoreSelectConfig['setOperators']): SingleStoreSelectWithout< + this, + TDynamic, + SingleStoreSetOperatorExcludedMethods, + true + > { + this.config.setOperators.push(...setOperators); + return this as any; + } + + /** + * Adds a `where` clause to the query. + * + * Calling this method will select only those rows that fulfill a specified condition. + * + * See docs: {@link https://orm.drizzle.team/docs/select#filtering} + * + * @param where the `where` clause. + * + * @example + * You can use conditional operators and `sql function` to filter the rows to be selected. + * + * ```ts + * // Select all cars with green color + * await db.select().from(cars).where(eq(cars.color, 'green')); + * // or + * await db.select().from(cars).where(sql`${cars.color} = 'green'`) + * ``` + * + * You can logically combine conditional operators with `and()` and `or()` operators: + * + * ```ts + * // Select all BMW cars with a green color + * await db.select().from(cars).where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); + * + * // Select all cars with the green or blue color + * await db.select().from(cars).where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); + * ``` + */ + where( + where: ((aliases: this['_']['selection']) => SQL | undefined) | SQL | undefined, + ): SingleStoreSelectWithout { + if (typeof where === 'function') { + where = where( + new Proxy( + this.config.fields, + new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), + ) as TSelection, + ); + } + this.config.where = where; + return this as any; + } + + /** + * Adds a `having` clause to the query. + * + * Calling this method will select only those rows that fulfill a specified condition. It is typically used with aggregate functions to filter the aggregated data based on a specified condition. + * + * See docs: {@link https://orm.drizzle.team/docs/select#aggregations} + * + * @param having the `having` clause. + * + * @example + * + * ```ts + * // Select all brands with more than one car + * await db.select({ + * brand: cars.brand, + * count: sql`cast(count(${cars.id}) as int)`, + * }) + * .from(cars) + * .groupBy(cars.brand) + * .having(({ count }) => gt(count, 1)); + * ``` + */ + having( + having: ((aliases: this['_']['selection']) => SQL | undefined) | SQL | undefined, + ): SingleStoreSelectWithout { + if (typeof having === 'function') { + having = having( + new Proxy( + this.config.fields, + new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), + ) as TSelection, + ); + } + this.config.having = having; + return this as any; + } + + /** + * Adds a `group by` clause to the query. + * + * Calling this method will group rows that have the same values into summary rows, often used for aggregation purposes. + * + * See docs: {@link https://orm.drizzle.team/docs/select#aggregations} + * + * @example + * + * ```ts + * // Group and count people by their last names + * await db.select({ + * lastName: people.lastName, + * count: sql`cast(count(*) as int)` + * }) + * .from(people) + * .groupBy(people.lastName); + * ``` + */ + groupBy( + builder: (aliases: this['_']['selection']) => ValueOrArray, + ): SingleStoreSelectWithout; + groupBy(...columns: (SingleStoreColumn | SQL | SQL.Aliased)[]): SingleStoreSelectWithout; + groupBy( + ...columns: + | [(aliases: this['_']['selection']) => ValueOrArray] + | (SingleStoreColumn | SQL | SQL.Aliased)[] + ): SingleStoreSelectWithout { + if (typeof columns[0] === 'function') { + const groupBy = columns[0]( + new Proxy( + this.config.fields, + new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), + ) as TSelection, + ); + this.config.groupBy = Array.isArray(groupBy) ? groupBy : [groupBy]; + } else { + this.config.groupBy = columns as (SingleStoreColumn | SQL | SQL.Aliased)[]; + } + return this as any; + } + + /** + * Adds an `order by` clause to the query. + * + * Calling this method will sort the result-set in ascending or descending order. By default, the sort order is ascending. + * + * See docs: {@link https://orm.drizzle.team/docs/select#order-by} + * + * @example + * + * ``` + * // Select cars ordered by year + * await db.select().from(cars).orderBy(cars.year); + * ``` + * + * You can specify whether results are in ascending or descending order with the `asc()` and `desc()` operators. + * + * ```ts + * // Select cars ordered by year in descending order + * await db.select().from(cars).orderBy(desc(cars.year)); + * + * // Select cars ordered by year and price + * await db.select().from(cars).orderBy(asc(cars.year), desc(cars.price)); + * ``` + */ + orderBy( + builder: (aliases: this['_']['selection']) => ValueOrArray, + ): SingleStoreSelectWithout; + orderBy(...columns: (SingleStoreColumn | SQL | SQL.Aliased)[]): SingleStoreSelectWithout; + orderBy( + ...columns: + | [(aliases: this['_']['selection']) => ValueOrArray] + | (SingleStoreColumn | SQL | SQL.Aliased)[] + ): SingleStoreSelectWithout { + if (typeof columns[0] === 'function') { + const orderBy = columns[0]( + new Proxy( + this.config.fields, + new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), + ) as TSelection, + ); + + const orderByArray = Array.isArray(orderBy) ? orderBy : [orderBy]; + + if (this.config.setOperators.length > 0) { + this.config.setOperators.at(-1)!.orderBy = orderByArray; + } else { + this.config.orderBy = orderByArray; + } + } else { + const orderByArray = columns as (SingleStoreColumn | SQL | SQL.Aliased)[]; + + if (this.config.setOperators.length > 0) { + this.config.setOperators.at(-1)!.orderBy = orderByArray; + } else { + this.config.orderBy = orderByArray; + } + } + return this as any; + } + + /** + * Adds a `limit` clause to the query. + * + * Calling this method will set the maximum number of rows that will be returned by this query. + * + * See docs: {@link https://orm.drizzle.team/docs/select#limit--offset} + * + * @param limit the `limit` clause. + * + * @example + * + * ```ts + * // Get the first 10 people from this query. + * await db.select().from(people).limit(10); + * ``` + */ + limit(limit: number): SingleStoreSelectWithout { + if (this.config.setOperators.length > 0) { + this.config.setOperators.at(-1)!.limit = limit; + } else { + this.config.limit = limit; + } + return this as any; + } + + /** + * Adds an `offset` clause to the query. + * + * Calling this method will skip a number of rows when returning results from this query. + * + * See docs: {@link https://orm.drizzle.team/docs/select#limit--offset} + * + * @param offset the `offset` clause. + * + * @example + * + * ```ts + * // Get the 10th-20th people from this query. + * await db.select().from(people).offset(10).limit(10); + * ``` + */ + offset(offset: number): SingleStoreSelectWithout { + if (this.config.setOperators.length > 0) { + this.config.setOperators.at(-1)!.offset = offset; + } else { + this.config.offset = offset; + } + return this as any; + } + + /** + * Adds a `for` clause to the query. + * + * Calling this method will specify a lock strength for this query that controls how strictly it acquires exclusive access to the rows being queried. + * + * See docs: {@link https://dev.mysql.com/doc/refman/8.0/en/innodb-locking-reads.html} + * TODO(singlestore) + * + * @param strength the lock strength. + * @param config the lock configuration. + */ + for(strength: LockStrength, config: LockConfig = {}): SingleStoreSelectWithout { + this.config.lockingClause = { strength, config }; + return this as any; + } + + /** @internal */ + getSQL(): SQL { + return this.dialect.buildSelectQuery(this.config); + } + + toSQL(): Query { + const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); + return rest; + } + + as( + alias: TAlias, + ): SubqueryWithSelection { + return new Proxy( + new Subquery(this.getSQL(), this.config.fields, alias), + new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), + ) as SubqueryWithSelection; + } + + /** @internal */ + override getSelectedFields(): this['_']['selectedFields'] { + return new Proxy( + this.config.fields, + new SelectionProxyHandler({ alias: this.tableName, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), + ) as this['_']['selectedFields']; + } + + $dynamic(): SingleStoreSelectDynamic { + return this as any; + } +} + +export interface SingleStoreSelectBase< + TTableName extends string | undefined, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TNullabilityMap extends Record = TTableName extends string ? Record + : {}, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, + TResult extends any[] = SelectResult[], + TSelectedFields extends ColumnsSelection = BuildSubquerySelection, +> extends + SingleStoreSelectQueryBuilderBase< + SingleStoreSelectHKT, + TTableName, + TSelection, + TSelectMode, + TPreparedQueryHKT, + TNullabilityMap, + TDynamic, + TExcludedMethods, + TResult, + TSelectedFields + >, + QueryPromise +{} + +export class SingleStoreSelectBase< + TTableName extends string | undefined, + TSelection, + TSelectMode extends SelectMode, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TNullabilityMap extends Record = TTableName extends string ? Record + : {}, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, + TResult = SelectResult[], + TSelectedFields = BuildSubquerySelection, +> extends SingleStoreSelectQueryBuilderBase< + SingleStoreSelectHKT, + TTableName, + TSelection, + TSelectMode, + TPreparedQueryHKT, + TNullabilityMap, + TDynamic, + TExcludedMethods, + TResult, + TSelectedFields +> { + static readonly [entityKind]: string = 'SingleStoreSelect'; + + prepare(): SingleStoreSelectPrepare { + if (!this.session) { + throw new Error('Cannot execute a query on a query builder. Please use a database instance instead.'); + } + const fieldsList = orderSelectedFields(this.config.fields); + const query = this.session.prepareQuery< + SingleStorePreparedQueryConfig & { execute: SelectResult[] }, + TPreparedQueryHKT + >(this.dialect.sqlToQuery(this.getSQL()), fieldsList); + query.joinsNotNullableMap = this.joinsNotNullableMap; + return query as SingleStoreSelectPrepare; + } + + execute = ((placeholderValues) => { + return this.prepare().execute(placeholderValues); + }) as ReturnType['execute']; + + private createIterator = (): ReturnType['iterator'] => { + const self = this; + return async function*(placeholderValues) { + yield* self.prepare().iterator(placeholderValues); + }; + }; + + iterator = this.createIterator(); +} + +applyMixins(SingleStoreSelectBase, [QueryPromise]); + +function createSetOperator(type: SetOperator, isAll: boolean): SingleStoreCreateSetOperatorFn { + return (leftSelect, rightSelect, ...restSelects) => { + const setOperators = [rightSelect, ...restSelects].map((select) => ({ + type, + isAll, + rightSelect: select as AnySingleStoreSelect, + })); + + for (const setOperator of setOperators) { + if (!haveSameKeys((leftSelect as any).getSelectedFields(), setOperator.rightSelect.getSelectedFields())) { + throw new Error( + 'Set operator error (union / intersect / except): selected fields are not the same or are in a different order', + ); + } + } + + return (leftSelect as AnySingleStoreSelect).addSetOperators(setOperators) as any; + }; +} + +const getSingleStoreSetOperators = () => ({ + union, + unionAll, + intersect, + except, + minus, +}); + +/** + * Adds `union` set operator to the query. + * + * Calling this method will combine the result sets of the `select` statements and remove any duplicate rows that appear across them. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#union} + * + * @example + * + * ```ts + * // Select all unique names from customers and users tables + * import { union } from 'drizzle-orm/singlestore-core' + * + * await union( + * db.select({ name: users.name }).from(users), + * db.select({ name: customers.name }).from(customers) + * ); + * // or + * await db.select({ name: users.name }) + * .from(users) + * .union( + * db.select({ name: customers.name }).from(customers) + * ); + * ``` + */ +export const union = createSetOperator('union', false); + +/** + * Adds `union all` set operator to the query. + * + * Calling this method will combine the result-set of the `select` statements and keep all duplicate rows that appear across them. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#union-all} + * + * @example + * + * ```ts + * // Select all transaction ids from both online and in-store sales + * import { unionAll } from 'drizzle-orm/singlestore-core' + * + * await unionAll( + * db.select({ transaction: onlineSales.transactionId }).from(onlineSales), + * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) + * ); + * // or + * await db.select({ transaction: onlineSales.transactionId }) + * .from(onlineSales) + * .unionAll( + * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) + * ); + * ``` + */ +export const unionAll = createSetOperator('union', true); + +/** + * Adds `intersect` set operator to the query. + * + * Calling this method will retain only the rows that are present in both result sets and eliminate duplicates. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#intersect} + * + * @example + * + * ```ts + * // Select course names that are offered in both departments A and B + * import { intersect } from 'drizzle-orm/singlestore-core' + * + * await intersect( + * db.select({ courseName: depA.courseName }).from(depA), + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * // or + * await db.select({ courseName: depA.courseName }) + * .from(depA) + * .intersect( + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * ``` + */ +export const intersect = createSetOperator('intersect', false); + +/** + * Adds `except` set operator to the query. + * + * Calling this method will retrieve all unique rows from the left query, except for the rows that are present in the result set of the right query. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#except} + * + * @example + * + * ```ts + * // Select all courses offered in department A but not in department B + * import { except } from 'drizzle-orm/singlestore-core' + * + * await except( + * db.select({ courseName: depA.courseName }).from(depA), + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * // or + * await db.select({ courseName: depA.courseName }) + * .from(depA) + * .except( + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * ``` + */ +export const except = createSetOperator('except', false); + +/** + * Adds `minus` set operator to the query. + * + * This is an alias of `except` supported by SingleStore. + * + * @example + * + * ```ts + * // Select all courses offered in department A but not in department B + * import { minus } from 'drizzle-orm/singlestore-core' + * + * await minus( + * db.select({ courseName: depA.courseName }).from(depA), + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * // or + * await db.select({ courseName: depA.courseName }) + * .from(depA) + * .minus( + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * ``` + */ +export const minus = createSetOperator('except', true); diff --git a/drizzle-orm/src/singlestore-core/query-builders/select.types.ts b/drizzle-orm/src/singlestore-core/query-builders/select.types.ts new file mode 100644 index 000000000..6db1cc357 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/query-builders/select.types.ts @@ -0,0 +1,457 @@ +import type { + SelectedFields as SelectedFieldsBase, + SelectedFieldsFlat as SelectedFieldsFlatBase, + SelectedFieldsOrdered as SelectedFieldsOrderedBase, +} from '~/operations.ts'; +import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; +import type { + AppendToNullabilityMap, + AppendToResult, + BuildSubquerySelection, + GetSelectTableName, + JoinNullability, + JoinType, + MapColumnsToTableAlias, + SelectMode, + SelectResult, + SetOperator, +} from '~/query-builders/select.types.ts'; +import type { SingleStoreColumn } from '~/singlestore-core/columns/index.ts'; +import type { SingleStoreTable, SingleStoreTableWithColumns } from '~/singlestore-core/table.ts'; +import type { ColumnsSelection, Placeholder, SQL, View } from '~/sql/sql.ts'; +import type { Subquery } from '~/subquery.ts'; +import type { Table, UpdateTableConfig } from '~/table.ts'; +import type { Assume, ValidateShape } from '~/utils.ts'; +import type { PreparedQueryHKTBase, PreparedQueryKind, SingleStorePreparedQueryConfig } from '../session.ts'; +import type { SingleStoreViewBase } from '../view-base.ts'; +import type { SingleStoreViewWithSelection } from '../view.ts'; +import type { SingleStoreSelectBase, SingleStoreSelectQueryBuilderBase } from './select.ts'; + +export interface SingleStoreSelectJoinConfig { + on: SQL | undefined; + table: SingleStoreTable | Subquery | SingleStoreViewBase | SQL; + alias: string | undefined; + joinType: JoinType; + lateral?: boolean; +} + +export type BuildAliasTable = TTable extends Table + ? SingleStoreTableWithColumns< + UpdateTableConfig; + }> + > + : TTable extends View ? SingleStoreViewWithSelection< + TAlias, + TTable['_']['existing'], + MapColumnsToTableAlias + > + : never; + +export interface SingleStoreSelectConfig { + withList?: Subquery[]; + fields: Record; + fieldsFlat?: SelectedFieldsOrdered; + where?: SQL; + having?: SQL; + table: SingleStoreTable | Subquery | SingleStoreViewBase | SQL; + limit?: number | Placeholder; + offset?: number | Placeholder; + joins?: SingleStoreSelectJoinConfig[]; + orderBy?: (SingleStoreColumn | SQL | SQL.Aliased)[]; + groupBy?: (SingleStoreColumn | SQL | SQL.Aliased)[]; + lockingClause?: { + strength: LockStrength; + config: LockConfig; + }; + distinct?: boolean; + setOperators: { + rightSelect: TypedQueryBuilder; + type: SetOperator; + isAll: boolean; + orderBy?: (SingleStoreColumn | SQL | SQL.Aliased)[]; + limit?: number | Placeholder; + offset?: number | Placeholder; + }[]; +} + +export type SingleStoreJoin< + T extends AnySingleStoreSelectQueryBuilder, + TDynamic extends boolean, + TJoinType extends JoinType, + TJoinedTable extends SingleStoreTable | Subquery | SingleStoreViewBase | SQL, + TJoinedName extends GetSelectTableName = GetSelectTableName, +> = T extends any ? SingleStoreSelectWithout< + SingleStoreSelectKind< + T['_']['hkt'], + T['_']['tableName'], + AppendToResult< + T['_']['tableName'], + T['_']['selection'], + TJoinedName, + TJoinedTable extends SingleStoreTable ? TJoinedTable['_']['columns'] + : TJoinedTable extends Subquery ? Assume + : never, + T['_']['selectMode'] + >, + T['_']['selectMode'] extends 'partial' ? T['_']['selectMode'] : 'multiple', + T['_']['preparedQueryHKT'], + AppendToNullabilityMap, + TDynamic, + T['_']['excludedMethods'] + >, + TDynamic, + T['_']['excludedMethods'] + > + : never; + +export type SingleStoreJoinFn< + T extends AnySingleStoreSelectQueryBuilder, + TDynamic extends boolean, + TJoinType extends JoinType, +> = < + TJoinedTable extends SingleStoreTable | Subquery | SingleStoreViewBase | SQL, + TJoinedName extends GetSelectTableName = GetSelectTableName, +>( + table: TJoinedTable, + on: ((aliases: T['_']['selection']) => SQL | undefined) | SQL | undefined, +) => SingleStoreJoin; + +export type SelectedFieldsFlat = SelectedFieldsFlatBase; + +export type SelectedFields = SelectedFieldsBase; + +export type SelectedFieldsOrdered = SelectedFieldsOrderedBase; + +export type LockStrength = 'update' | 'share'; + +export type LockConfig = { + noWait: true; + skipLocked?: undefined; +} | { + noWait?: undefined; + skipLocked: true; +} | { + noWait?: undefined; + skipLocked?: undefined; +}; + +export interface SingleStoreSelectHKTBase { + tableName: string | undefined; + selection: unknown; + selectMode: SelectMode; + preparedQueryHKT: unknown; + nullabilityMap: unknown; + dynamic: boolean; + excludedMethods: string; + result: unknown; + selectedFields: unknown; + _type: unknown; +} + +export type SingleStoreSelectKind< + T extends SingleStoreSelectHKTBase, + TTableName extends string | undefined, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TNullabilityMap extends Record, + TDynamic extends boolean, + TExcludedMethods extends string, + TResult = SelectResult[], + TSelectedFields = BuildSubquerySelection, +> = (T & { + tableName: TTableName; + selection: TSelection; + selectMode: TSelectMode; + preparedQueryHKT: TPreparedQueryHKT; + nullabilityMap: TNullabilityMap; + dynamic: TDynamic; + excludedMethods: TExcludedMethods; + result: TResult; + selectedFields: TSelectedFields; +})['_type']; + +export interface SingleStoreSelectQueryBuilderHKT extends SingleStoreSelectHKTBase { + _type: SingleStoreSelectQueryBuilderBase< + SingleStoreSelectQueryBuilderHKT, + this['tableName'], + Assume, + this['selectMode'], + Assume, + Assume>, + this['dynamic'], + this['excludedMethods'], + Assume, + Assume + >; +} + +export interface SingleStoreSelectHKT extends SingleStoreSelectHKTBase { + _type: SingleStoreSelectBase< + this['tableName'], + Assume, + this['selectMode'], + Assume, + Assume>, + this['dynamic'], + this['excludedMethods'], + Assume, + Assume + >; +} + +export type SingleStoreSetOperatorExcludedMethods = + | 'where' + | 'having' + | 'groupBy' + | 'session' + | 'leftJoin' + | 'rightJoin' + | 'innerJoin' + | 'fullJoin' + | 'for'; + +export type SingleStoreSelectWithout< + T extends AnySingleStoreSelectQueryBuilder, + TDynamic extends boolean, + K extends keyof T & string, + TResetExcluded extends boolean = false, +> = TDynamic extends true ? T : Omit< + SingleStoreSelectKind< + T['_']['hkt'], + T['_']['tableName'], + T['_']['selection'], + T['_']['selectMode'], + T['_']['preparedQueryHKT'], + T['_']['nullabilityMap'], + TDynamic, + TResetExcluded extends true ? K : T['_']['excludedMethods'] | K, + T['_']['result'], + T['_']['selectedFields'] + >, + TResetExcluded extends true ? K : T['_']['excludedMethods'] | K +>; + +export type SingleStoreSelectPrepare = PreparedQueryKind< + T['_']['preparedQueryHKT'], + SingleStorePreparedQueryConfig & { + execute: T['_']['result']; + iterator: T['_']['result'][number]; + }, + true +>; + +export type SingleStoreSelectDynamic = SingleStoreSelectKind< + T['_']['hkt'], + T['_']['tableName'], + T['_']['selection'], + T['_']['selectMode'], + T['_']['preparedQueryHKT'], + T['_']['nullabilityMap'], + true, + never, + T['_']['result'], + T['_']['selectedFields'] +>; + +export type CreateSingleStoreSelectFromBuilderMode< + TBuilderMode extends 'db' | 'qb', + TTableName extends string | undefined, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, + TPreparedQueryHKT extends PreparedQueryHKTBase, +> = TBuilderMode extends 'db' ? SingleStoreSelectBase + : SingleStoreSelectQueryBuilderBase< + SingleStoreSelectQueryBuilderHKT, + TTableName, + TSelection, + TSelectMode, + TPreparedQueryHKT + >; + +export type SingleStoreSelectQueryBuilder< + THKT extends SingleStoreSelectHKTBase = SingleStoreSelectQueryBuilderHKT, + TTableName extends string | undefined = string | undefined, + TSelection extends ColumnsSelection = ColumnsSelection, + TSelectMode extends SelectMode = SelectMode, + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, + TNullabilityMap extends Record = Record, + TResult extends any[] = unknown[], + TSelectedFields extends ColumnsSelection = ColumnsSelection, +> = SingleStoreSelectQueryBuilderBase< + THKT, + TTableName, + TSelection, + TSelectMode, + TPreparedQueryHKT, + TNullabilityMap, + true, + never, + TResult, + TSelectedFields +>; + +export type AnySingleStoreSelectQueryBuilder = SingleStoreSelectQueryBuilderBase< + any, + any, + any, + any, + any, + any, + any, + any, + any +>; + +export type AnySingleStoreSetOperatorInterface = SingleStoreSetOperatorInterface< + any, + any, + any, + any, + any, + any, + any, + any, + any +>; + +export interface SingleStoreSetOperatorInterface< + TTableName extends string | undefined, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, + TNullabilityMap extends Record = TTableName extends string ? Record + : {}, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, + TResult extends any[] = SelectResult[], + TSelectedFields extends ColumnsSelection = BuildSubquerySelection, +> { + _: { + readonly hkt: SingleStoreSelectHKT; + readonly tableName: TTableName; + readonly selection: TSelection; + readonly selectMode: TSelectMode; + readonly preparedQueryHKT: TPreparedQueryHKT; + readonly nullabilityMap: TNullabilityMap; + readonly dynamic: TDynamic; + readonly excludedMethods: TExcludedMethods; + readonly result: TResult; + readonly selectedFields: TSelectedFields; + }; +} + +export type SingleStoreSetOperatorWithResult = SingleStoreSetOperatorInterface< + any, + any, + any, + any, + any, + any, + any, + TResult, + any +>; + +export type SingleStoreSelect< + TTableName extends string | undefined = string | undefined, + TSelection extends ColumnsSelection = Record, + TSelectMode extends SelectMode = SelectMode, + TNullabilityMap extends Record = Record, +> = SingleStoreSelectBase; + +export type AnySingleStoreSelect = SingleStoreSelectBase; + +export type SingleStoreSetOperator< + TTableName extends string | undefined = string | undefined, + TSelection extends ColumnsSelection = Record, + TSelectMode extends SelectMode = SelectMode, + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, + TNullabilityMap extends Record = Record, +> = SingleStoreSelectBase< + TTableName, + TSelection, + TSelectMode, + TPreparedQueryHKT, + TNullabilityMap, + true, + SingleStoreSetOperatorExcludedMethods +>; + +export type SetOperatorRightSelect< + TValue extends SingleStoreSetOperatorWithResult, + TResult extends any[], +> = TValue extends SingleStoreSetOperatorInterface + ? ValidateShape< + TValueResult[number], + TResult[number], + TypedQueryBuilder + > + : TValue; + +export type SetOperatorRestSelect< + TValue extends readonly SingleStoreSetOperatorWithResult[], + TResult extends any[], +> = TValue extends [infer First, ...infer Rest] + ? First extends SingleStoreSetOperatorInterface + ? Rest extends AnySingleStoreSetOperatorInterface[] ? [ + ValidateShape>, + ...SetOperatorRestSelect, + ] + : ValidateShape[]> + : never + : TValue; + +export type SingleStoreCreateSetOperatorFn = < + TTableName extends string | undefined, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, + TValue extends SingleStoreSetOperatorWithResult, + TRest extends SingleStoreSetOperatorWithResult[], + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, + TNullabilityMap extends Record = TTableName extends string ? Record + : {}, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, + TResult extends any[] = SelectResult[], + TSelectedFields extends ColumnsSelection = BuildSubquerySelection, +>( + leftSelect: SingleStoreSetOperatorInterface< + TTableName, + TSelection, + TSelectMode, + TPreparedQueryHKT, + TNullabilityMap, + TDynamic, + TExcludedMethods, + TResult, + TSelectedFields + >, + rightSelect: SetOperatorRightSelect, + ...restSelects: SetOperatorRestSelect +) => SingleStoreSelectWithout< + SingleStoreSelectBase< + TTableName, + TSelection, + TSelectMode, + TPreparedQueryHKT, + TNullabilityMap, + TDynamic, + TExcludedMethods, + TResult, + TSelectedFields + >, + false, + SingleStoreSetOperatorExcludedMethods, + true +>; + +export type GetSingleStoreSetOperators = { + union: SingleStoreCreateSetOperatorFn; + intersect: SingleStoreCreateSetOperatorFn; + except: SingleStoreCreateSetOperatorFn; + unionAll: SingleStoreCreateSetOperatorFn; + minus: SingleStoreCreateSetOperatorFn; +}; diff --git a/drizzle-orm/src/singlestore-core/query-builders/update.ts b/drizzle-orm/src/singlestore-core/query-builders/update.ts new file mode 100644 index 000000000..d26394dfe --- /dev/null +++ b/drizzle-orm/src/singlestore-core/query-builders/update.ts @@ -0,0 +1,215 @@ +import type { GetColumnData } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import type { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; +import type { + AnySingleStoreQueryResultHKT, + PreparedQueryHKTBase, + PreparedQueryKind, + SingleStorePreparedQueryConfig, + SingleStoreQueryResultHKT, + SingleStoreQueryResultKind, + SingleStoreSession, +} from '~/singlestore-core/session.ts'; +import type { SingleStoreTable } from '~/singlestore-core/table.ts'; +import type { Query, SQL, SQLWrapper } from '~/sql/sql.ts'; +import type { Subquery } from '~/subquery.ts'; +import { mapUpdateSet, type UpdateSet } from '~/utils.ts'; +import type { SelectedFieldsOrdered } from './select.types.ts'; + +export interface SingleStoreUpdateConfig { + where?: SQL | undefined; + set: UpdateSet; + table: SingleStoreTable; + returning?: SelectedFieldsOrdered; + withList?: Subquery[]; +} + +export type SingleStoreUpdateSetSource = + & { + [Key in keyof TTable['$inferInsert']]?: + | GetColumnData + | SQL; + } + & {}; + +export class SingleStoreUpdateBuilder< + TTable extends SingleStoreTable, + TQueryResult extends SingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, +> { + static readonly [entityKind]: string = 'SingleStoreUpdateBuilder'; + + declare readonly _: { + readonly table: TTable; + }; + + constructor( + private table: TTable, + private session: SingleStoreSession, + private dialect: SingleStoreDialect, + private withList?: Subquery[], + ) {} + + set(values: SingleStoreUpdateSetSource): SingleStoreUpdateBase { + return new SingleStoreUpdateBase( + this.table, + mapUpdateSet(this.table, values), + this.session, + this.dialect, + this.withList, + ); + } +} + +export type SingleStoreUpdateWithout< + T extends AnySingleStoreUpdateBase, + TDynamic extends boolean, + K extends keyof T & string, +> = TDynamic extends true ? T : Omit< + SingleStoreUpdateBase< + T['_']['table'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'], + TDynamic, + T['_']['excludedMethods'] | K + >, + T['_']['excludedMethods'] | K +>; + +export type SingleStoreUpdatePrepare = PreparedQueryKind< + T['_']['preparedQueryHKT'], + SingleStorePreparedQueryConfig & { + execute: SingleStoreQueryResultKind; + iterator: never; + }, + true +>; + +export type SingleStoreUpdateDynamic = SingleStoreUpdate< + T['_']['table'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'] +>; + +export type SingleStoreUpdate< + TTable extends SingleStoreTable = SingleStoreTable, + TQueryResult extends SingleStoreQueryResultHKT = AnySingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, +> = SingleStoreUpdateBase; + +export type AnySingleStoreUpdateBase = SingleStoreUpdateBase; + +export interface SingleStoreUpdateBase< + TTable extends SingleStoreTable, + TQueryResult extends SingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, +> extends QueryPromise>, SQLWrapper { + readonly _: { + readonly table: TTable; + readonly queryResult: TQueryResult; + readonly preparedQueryHKT: TPreparedQueryHKT; + readonly dynamic: TDynamic; + readonly excludedMethods: TExcludedMethods; + }; +} + +export class SingleStoreUpdateBase< + TTable extends SingleStoreTable, + TQueryResult extends SingleStoreQueryResultHKT, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TPreparedQueryHKT extends PreparedQueryHKTBase, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TDynamic extends boolean = false, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TExcludedMethods extends string = never, +> extends QueryPromise> implements SQLWrapper { + static readonly [entityKind]: string = 'SingleStoreUpdate'; + + private config: SingleStoreUpdateConfig; + + constructor( + table: TTable, + set: UpdateSet, + private session: SingleStoreSession, + private dialect: SingleStoreDialect, + withList?: Subquery[], + ) { + super(); + this.config = { set, table, withList }; + } + + /** + * Adds a 'where' clause to the query. + * + * Calling this method will update only those rows that fulfill a specified condition. + * + * See docs: {@link https://orm.drizzle.team/docs/update} + * + * @param where the 'where' clause. + * + * @example + * You can use conditional operators and `sql function` to filter the rows to be updated. + * + * ```ts + * // Update all cars with green color + * db.update(cars).set({ color: 'red' }) + * .where(eq(cars.color, 'green')); + * // or + * db.update(cars).set({ color: 'red' }) + * .where(sql`${cars.color} = 'green'`) + * ``` + * + * You can logically combine conditional operators with `and()` and `or()` operators: + * + * ```ts + * // Update all BMW cars with a green color + * db.update(cars).set({ color: 'red' }) + * .where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); + * + * // Update all cars with the green or blue color + * db.update(cars).set({ color: 'red' }) + * .where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); + * ``` + */ + where(where: SQL | undefined): SingleStoreUpdateWithout { + this.config.where = where; + return this as any; + } + + /** @internal */ + getSQL(): SQL { + return this.dialect.buildUpdateQuery(this.config); + } + + toSQL(): Query { + const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); + return rest; + } + + prepare(): SingleStoreUpdatePrepare { + return this.session.prepareQuery( + this.dialect.sqlToQuery(this.getSQL()), + this.config.returning, + ) as SingleStoreUpdatePrepare; + } + + override execute: ReturnType['execute'] = (placeholderValues) => { + return this.prepare().execute(placeholderValues); + }; + + private createIterator = (): ReturnType['iterator'] => { + const self = this; + return async function*(placeholderValues) { + yield* self.prepare().iterator(placeholderValues); + }; + }; + + iterator = this.createIterator(); + + $dynamic(): SingleStoreUpdateDynamic { + return this as any; + } +} diff --git a/drizzle-orm/src/singlestore-core/schema.ts b/drizzle-orm/src/singlestore-core/schema.ts new file mode 100644 index 000000000..82da44a49 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/schema.ts @@ -0,0 +1,41 @@ +import { entityKind, is } from '~/entity.ts'; +import { type SingleStoreTableFn, singlestoreTableWithSchema } from './table.ts'; +import { type singlestoreView, singlestoreViewWithSchema } from './view.ts'; + +export class SingleStoreSchema { + static readonly [entityKind]: string = 'SingleStoreSchema'; + + constructor( + public readonly schemaName: TName, + ) {} + + table: SingleStoreTableFn = (name, columns, extraConfig) => { + return singlestoreTableWithSchema(name, columns, extraConfig, this.schemaName); + }; + + view = ((name, columns) => { + return singlestoreViewWithSchema(name, columns, this.schemaName); + }) as typeof singlestoreView; +} + +/** @deprecated - use `instanceof SingleStoreSchema` */ +export function isSingleStoreSchema(obj: unknown): obj is SingleStoreSchema { + return is(obj, SingleStoreSchema); +} + +/** + * Create a SingleStore schema. + * https://dev.mysql.com/doc/refman/8.0/en/create-database.html + * TODO(singlestore) + * + * @param name singlestore use schema name + * @returns SingleStore schema + */ +export function singlestoreDatabase(name: TName) { + return new SingleStoreSchema(name); +} + +/** + * @see singlestoreDatabase + */ +export const singlestoreSchema = singlestoreDatabase; diff --git a/drizzle-orm/src/singlestore-core/session.ts b/drizzle-orm/src/singlestore-core/session.ts new file mode 100644 index 000000000..96158c50f --- /dev/null +++ b/drizzle-orm/src/singlestore-core/session.ts @@ -0,0 +1,139 @@ +import { entityKind } from '~/entity.ts'; +import { TransactionRollbackError } from '~/errors.ts'; +import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; +import { type Query, type SQL, sql } from '~/sql/sql.ts'; +import type { Assume, Equal } from '~/utils.ts'; +import { SingleStoreDatabase } from './db.ts'; +import type { SingleStoreDialect } from './dialect.ts'; +import type { SelectedFieldsOrdered } from './query-builders/select.types.ts'; + +export interface SingleStoreQueryResultHKT { + readonly $brand: 'SingleStoreQueryResultHKT'; + readonly row: unknown; + readonly type: unknown; +} + +export interface AnySingleStoreQueryResultHKT extends SingleStoreQueryResultHKT { + readonly type: any; +} + +export type SingleStoreQueryResultKind = (TKind & { + readonly row: TRow; +})['type']; + +export interface SingleStorePreparedQueryConfig { + execute: unknown; + iterator: unknown; +} + +export interface SingleStorePreparedQueryHKT { + readonly $brand: 'SingleStorePreparedQueryHKT'; + readonly config: unknown; + readonly type: unknown; +} + +export type PreparedQueryKind< + TKind extends SingleStorePreparedQueryHKT, + TConfig extends SingleStorePreparedQueryConfig, + TAssume extends boolean = false, +> = Equal extends true + ? Assume<(TKind & { readonly config: TConfig })['type'], SingleStorePreparedQuery> + : (TKind & { readonly config: TConfig })['type']; + +export abstract class SingleStorePreparedQuery { + static readonly [entityKind]: string = 'SingleStorePreparedQuery'; + + /** @internal */ + joinsNotNullableMap?: Record; + + abstract execute(placeholderValues?: Record): Promise; + + abstract iterator(placeholderValues?: Record): AsyncGenerator; +} + +// In SingleStore, there is no need to explicitly set the isolation level, since READ COMMITTED is the only isolation level supported and is set by default for all transactions +export interface SingleStoreTransactionConfig { + withConsistentSnapshot?: boolean; + accessMode?: 'read only' | 'read write'; +} + +export abstract class SingleStoreSession< + TQueryResult extends SingleStoreQueryResultHKT = SingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, + TFullSchema extends Record = Record, + TSchema extends TablesRelationalConfig = Record, +> { + static readonly [entityKind]: string = 'SingleStoreSession'; + + constructor(protected dialect: SingleStoreDialect) {} + + abstract prepareQuery< + T extends SingleStorePreparedQueryConfig, + TPreparedQueryHKT extends SingleStorePreparedQueryHKT, + >( + query: Query, + fields: SelectedFieldsOrdered | undefined, + customResultMapper?: (rows: unknown[][]) => T['execute'], + generatedIds?: Record[], + returningIds?: SelectedFieldsOrdered, + ): PreparedQueryKind; + + execute(query: SQL): Promise { + return this.prepareQuery( + this.dialect.sqlToQuery(query), + undefined, + ).execute(); + } + + abstract all(query: SQL): Promise; + + abstract transaction( + transaction: (tx: SingleStoreTransaction) => Promise, + config?: SingleStoreTransactionConfig, + ): Promise; + + protected getStartTransactionSQL(config: SingleStoreTransactionConfig): SQL | undefined { + const parts: string[] = []; + + if (config.withConsistentSnapshot) { + parts.push('with consistent snapshot'); + } + + if (config.accessMode) { + parts.push(config.accessMode); + } + + return parts.length ? sql.join(['start transaction ', parts.join(' ')]) : undefined; + } +} + +export abstract class SingleStoreTransaction< + TQueryResult extends SingleStoreQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TFullSchema extends Record = Record, + TSchema extends TablesRelationalConfig = Record, +> extends SingleStoreDatabase { + static readonly [entityKind]: string = 'SingleStoreTransaction'; + + constructor( + dialect: SingleStoreDialect, + session: SingleStoreSession, + protected schema: RelationalSchemaConfig | undefined, + protected readonly nestedIndex: number, + ) { + super(dialect, session, schema); + } + + rollback(): never { + throw new TransactionRollbackError(); + } + + /** Nested transactions (aka savepoints) only work with InnoDB engine. */ + abstract override transaction( + transaction: (tx: SingleStoreTransaction) => Promise, + ): Promise; +} + +export interface PreparedQueryHKTBase extends SingleStorePreparedQueryHKT { + type: SingleStorePreparedQuery>; +} diff --git a/drizzle-orm/src/singlestore-core/sql/expressions/conditions.ts b/drizzle-orm/src/singlestore-core/sql/expressions/conditions.ts new file mode 100644 index 000000000..95cffabdd --- /dev/null +++ b/drizzle-orm/src/singlestore-core/sql/expressions/conditions.ts @@ -0,0 +1,22 @@ +import { bindIfParam } from '~/sql/expressions/conditions.ts'; +import { type SQL, sql } from '~/sql/sql.ts'; +import type { Table } from '~/table'; + +/** + * Test that two values match. + * + * ## Examples + * + * ```ts + * // Select cars made by Ford + * db.select().from(cars) + * .where(match(cars.make, 'Ford')) + * ``` + * + * @see isNull for a way to test equality to NULL. + */ +export function match< + TTable extends Table, +>(left: TTable, right: unknown): SQL { + return sql`MATCH (TABLE ${left}) AGAINST (${bindIfParam(right, left)})`; +} diff --git a/drizzle-orm/src/singlestore-core/sql/expressions/index.ts b/drizzle-orm/src/singlestore-core/sql/expressions/index.ts new file mode 100644 index 000000000..81cb13770 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/sql/expressions/index.ts @@ -0,0 +1 @@ +export * from './conditions.ts'; diff --git a/drizzle-orm/src/singlestore-core/sql/index.ts b/drizzle-orm/src/singlestore-core/sql/index.ts new file mode 100644 index 000000000..16ca76679 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/sql/index.ts @@ -0,0 +1 @@ +export * from './expressions/index.ts'; diff --git a/drizzle-orm/src/singlestore-core/subquery.ts b/drizzle-orm/src/singlestore-core/subquery.ts new file mode 100644 index 000000000..a4605c56d --- /dev/null +++ b/drizzle-orm/src/singlestore-core/subquery.ts @@ -0,0 +1,17 @@ +import type { AddAliasToSelection } from '~/query-builders/select.types.ts'; +import type { ColumnsSelection } from '~/sql/sql.ts'; +import type { Subquery, WithSubquery } from '~/subquery.ts'; + +export type SubqueryWithSelection< + TSelection extends ColumnsSelection, + TAlias extends string, +> = + & Subquery> + & AddAliasToSelection; + +export type WithSubqueryWithSelection< + TSelection extends ColumnsSelection, + TAlias extends string, +> = + & WithSubquery> + & AddAliasToSelection; diff --git a/drizzle-orm/src/singlestore-core/table.ts b/drizzle-orm/src/singlestore-core/table.ts new file mode 100644 index 000000000..529a95fe9 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/table.ts @@ -0,0 +1,120 @@ +import type { BuildColumns, BuildExtraConfigColumns } from '~/column-builder.ts'; +import { entityKind } from '~/entity.ts'; +import { Table, type TableConfig as TableConfigBase, type UpdateTableConfig } from '~/table.ts'; +import type { SingleStoreColumn, SingleStoreColumnBuilder, SingleStoreColumnBuilderBase } from './columns/common.ts'; +import type { AnyIndexBuilder } from './indexes.ts'; +import type { PrimaryKeyBuilder } from './primary-keys.ts'; +import type { UniqueConstraintBuilder } from './unique-constraint.ts'; + +export type SingleStoreTableExtraConfig = Record< + string, + | AnyIndexBuilder + | PrimaryKeyBuilder + | UniqueConstraintBuilder +>; + +export type TableConfig = TableConfigBase; + +export class SingleStoreTable extends Table { + static readonly [entityKind]: string = 'SingleStoreTable'; + + declare protected $columns: T['columns']; + + /** @internal */ + static override readonly Symbol = Object.assign({}, Table.Symbol, {}); + + /** @internal */ + override [Table.Symbol.Columns]!: NonNullable; + + /** @internal */ + override [Table.Symbol.ExtraConfigBuilder]: + | ((self: Record) => SingleStoreTableExtraConfig) + | undefined = undefined; +} + +export type AnySingleStoreTable = {}> = SingleStoreTable< + UpdateTableConfig +>; + +export type SingleStoreTableWithColumns = + & SingleStoreTable + & { + [Key in keyof T['columns']]: T['columns'][Key]; + }; + +export function singlestoreTableWithSchema< + TTableName extends string, + TSchemaName extends string | undefined, + TColumnsMap extends Record, +>( + name: TTableName, + columns: TColumnsMap, + extraConfig: + | ((self: BuildColumns) => SingleStoreTableExtraConfig) + | undefined, + schema: TSchemaName, + baseName = name, +): SingleStoreTableWithColumns<{ + name: TTableName; + schema: TSchemaName; + columns: BuildColumns; + dialect: 'singlestore'; +}> { + const rawTable = new SingleStoreTable<{ + name: TTableName; + schema: TSchemaName; + columns: BuildColumns; + dialect: 'singlestore'; + }>(name, schema, baseName); + + const builtColumns = Object.fromEntries( + Object.entries(columns).map(([name, colBuilderBase]) => { + const colBuilder = colBuilderBase as SingleStoreColumnBuilder; + const column = colBuilder.build(rawTable); + return [name, column]; + }), + ) as unknown as BuildColumns; + + const table = Object.assign(rawTable, builtColumns); + + table[Table.Symbol.Columns] = builtColumns; + table[Table.Symbol.ExtraConfigColumns] = builtColumns as unknown as BuildExtraConfigColumns< + TTableName, + TColumnsMap, + 'singlestore' + >; + + if (extraConfig) { + table[SingleStoreTable.Symbol.ExtraConfigBuilder] = extraConfig as unknown as ( + self: Record, + ) => SingleStoreTableExtraConfig; + } + + return table; +} + +export interface SingleStoreTableFn { + < + TTableName extends string, + TColumnsMap extends Record, + >( + name: TTableName, + columns: TColumnsMap, + extraConfig?: (self: BuildColumns) => SingleStoreTableExtraConfig, + ): SingleStoreTableWithColumns<{ + name: TTableName; + schema: TSchemaName; + columns: BuildColumns; + dialect: 'singlestore'; + }>; +} + +export const singlestoreTable: SingleStoreTableFn = (name, columns, extraConfig) => { + return singlestoreTableWithSchema(name, columns, extraConfig, undefined, name); +}; + +export function singlestoreTableCreator(customizeTableName: (name: string) => string): SingleStoreTableFn { + return (name, columns, extraConfig) => { + return singlestoreTableWithSchema(customizeTableName(name) as typeof name, columns, extraConfig, undefined, name); + }; +} diff --git a/drizzle-orm/src/singlestore-core/unique-constraint.ts b/drizzle-orm/src/singlestore-core/unique-constraint.ts new file mode 100644 index 000000000..faa4f3216 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/unique-constraint.ts @@ -0,0 +1,64 @@ +import { entityKind } from '~/entity.ts'; +import type { SingleStoreColumn } from './columns/index.ts'; +import { SingleStoreTable } from './table.ts'; + +export function unique(name?: string): UniqueOnConstraintBuilder { + return new UniqueOnConstraintBuilder(name); +} + +export function uniqueKeyName(table: SingleStoreTable, columns: string[]) { + return `${table[SingleStoreTable.Symbol.Name]}_${columns.join('_')}_unique`; +} + +export class UniqueConstraintBuilder { + static readonly [entityKind]: string = 'SingleStoreUniqueConstraintBuilder'; + + /** @internal */ + columns: SingleStoreColumn[]; + + constructor( + columns: SingleStoreColumn[], + private name?: string, + ) { + this.columns = columns; + } + + /** @internal */ + build(table: SingleStoreTable): UniqueConstraint { + return new UniqueConstraint(table, this.columns, this.name); + } +} + +export class UniqueOnConstraintBuilder { + static readonly [entityKind]: string = 'SingleStoreUniqueOnConstraintBuilder'; + + /** @internal */ + name?: string; + + constructor( + name?: string, + ) { + this.name = name; + } + + on(...columns: [SingleStoreColumn, ...SingleStoreColumn[]]) { + return new UniqueConstraintBuilder(columns, this.name); + } +} + +export class UniqueConstraint { + static readonly [entityKind]: string = 'SingleStoreUniqueConstraint'; + + readonly columns: SingleStoreColumn[]; + readonly name?: string; + readonly nullsNotDistinct: boolean = false; + + constructor(readonly table: SingleStoreTable, columns: SingleStoreColumn[], name?: string) { + this.columns = columns; + this.name = name ?? uniqueKeyName(this.table, this.columns.map((column) => column.name)); + } + + getName() { + return this.name; + } +} diff --git a/drizzle-orm/src/singlestore-core/utils.ts b/drizzle-orm/src/singlestore-core/utils.ts new file mode 100644 index 000000000..e6412161d --- /dev/null +++ b/drizzle-orm/src/singlestore-core/utils.ts @@ -0,0 +1,56 @@ +import { is } from '~/entity.ts'; +import { Table } from '~/table.ts'; +import { ViewBaseConfig } from '~/view-common.ts'; +import type { Index } from './indexes.ts'; +import { IndexBuilder } from './indexes.ts'; +import type { PrimaryKey } from './primary-keys.ts'; +import { PrimaryKeyBuilder } from './primary-keys.ts'; +import { SingleStoreTable } from './table.ts'; +import { type UniqueConstraint, UniqueConstraintBuilder } from './unique-constraint.ts'; +import { SingleStoreViewConfig } from './view-common.ts'; +import type { SingleStoreView } from './view.ts'; + +export function getTableConfig(table: SingleStoreTable) { + const columns = Object.values(table[SingleStoreTable.Symbol.Columns]); + const indexes: Index[] = []; + const primaryKeys: PrimaryKey[] = []; + const uniqueConstraints: UniqueConstraint[] = []; + const name = table[Table.Symbol.Name]; + const schema = table[Table.Symbol.Schema]; + const baseName = table[Table.Symbol.BaseName]; + + const extraConfigBuilder = table[SingleStoreTable.Symbol.ExtraConfigBuilder]; + + if (extraConfigBuilder !== undefined) { + const extraConfig = extraConfigBuilder(table[SingleStoreTable.Symbol.Columns]); + for (const builder of Object.values(extraConfig)) { + if (is(builder, IndexBuilder)) { + indexes.push(builder.build(table)); + } else if (is(builder, UniqueConstraintBuilder)) { + uniqueConstraints.push(builder.build(table)); + } else if (is(builder, PrimaryKeyBuilder)) { + primaryKeys.push(builder.build(table)); + } + } + } + + return { + columns, + indexes, + primaryKeys, + uniqueConstraints, + name, + schema, + baseName, + }; +} + +export function getViewConfig< + TName extends string = string, + TExisting extends boolean = boolean, +>(view: SingleStoreView) { + return { + ...view[ViewBaseConfig], + ...view[SingleStoreViewConfig], + }; +} diff --git a/drizzle-orm/src/singlestore-core/view-base.ts b/drizzle-orm/src/singlestore-core/view-base.ts new file mode 100644 index 000000000..f71536e28 --- /dev/null +++ b/drizzle-orm/src/singlestore-core/view-base.ts @@ -0,0 +1,15 @@ +import { entityKind } from '~/entity.ts'; +import type { ColumnsSelection } from '~/sql/sql.ts'; +import { View } from '~/sql/sql.ts'; + +export abstract class SingleStoreViewBase< + TName extends string = string, + TExisting extends boolean = boolean, + TSelectedFields extends ColumnsSelection = ColumnsSelection, +> extends View { + static readonly [entityKind]: string = 'SingleStoreViewBase'; + + declare readonly _: View['_'] & { + readonly viewBrand: 'SingleStoreViewBase'; + }; +} diff --git a/drizzle-orm/src/singlestore-core/view-common.ts b/drizzle-orm/src/singlestore-core/view-common.ts new file mode 100644 index 000000000..d29c3d5ad --- /dev/null +++ b/drizzle-orm/src/singlestore-core/view-common.ts @@ -0,0 +1 @@ +export const SingleStoreViewConfig = Symbol.for('drizzle:SingleStoreViewConfig'); diff --git a/drizzle-orm/src/singlestore-core/view.ts b/drizzle-orm/src/singlestore-core/view.ts new file mode 100644 index 000000000..3c6c8d25c --- /dev/null +++ b/drizzle-orm/src/singlestore-core/view.ts @@ -0,0 +1,208 @@ +import type { BuildColumns } from '~/column-builder.ts'; +import { entityKind } from '~/entity.ts'; +import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; +import type { AddAliasToSelection } from '~/query-builders/select.types.ts'; +import { SelectionProxyHandler } from '~/selection-proxy.ts'; +import type { ColumnsSelection, SQL } from '~/sql/sql.ts'; +import { getTableColumns } from '~/utils.ts'; +import type { SingleStoreColumn, SingleStoreColumnBuilderBase } from './columns/index.ts'; +import { QueryBuilder } from './query-builders/query-builder.ts'; +import type { SelectedFields } from './query-builders/select.types.ts'; +import { singlestoreTable } from './table.ts'; +import { SingleStoreViewBase } from './view-base.ts'; +import { SingleStoreViewConfig } from './view-common.ts'; + +export interface ViewBuilderConfig { + algorithm?: 'undefined' | 'merge' | 'temptable'; + definer?: string; + sqlSecurity?: 'definer' | 'invoker'; + withCheckOption?: 'cascaded' | 'local'; +} + +export class ViewBuilderCore { + static readonly [entityKind]: string = 'SingleStoreViewBuilder'; + + declare readonly _: { + readonly name: TConfig['name']; + readonly columns: TConfig['columns']; + }; + + constructor( + protected name: TConfig['name'], + protected schema: string | undefined, + ) {} + + protected config: ViewBuilderConfig = {}; + + algorithm( + algorithm: Exclude, + ): this { + this.config.algorithm = algorithm; + return this; + } + + definer( + definer: Exclude, + ): this { + this.config.definer = definer; + return this; + } + + sqlSecurity( + sqlSecurity: Exclude, + ): this { + this.config.sqlSecurity = sqlSecurity; + return this; + } + + withCheckOption( + withCheckOption?: Exclude, + ): this { + this.config.withCheckOption = withCheckOption ?? 'cascaded'; + return this; + } +} + +export class ViewBuilder extends ViewBuilderCore<{ name: TName }> { + static readonly [entityKind]: string = 'SingleStoreViewBuilder'; + + as( + qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), + ): SingleStoreViewWithSelection> { + if (typeof qb === 'function') { + qb = qb(new QueryBuilder()); + } + const selectionProxy = new SelectionProxyHandler({ + alias: this.name, + sqlBehavior: 'error', + sqlAliasedBehavior: 'alias', + replaceOriginalName: true, + }); + const aliasedSelection = new Proxy(qb.getSelectedFields(), selectionProxy); + return new Proxy( + new SingleStoreView({ + singlestoreConfig: this.config, + config: { + name: this.name, + schema: this.schema, + selectedFields: aliasedSelection, + query: qb.getSQL().inlineParams(), + }, + }), + selectionProxy as any, + ) as SingleStoreViewWithSelection>; + } +} + +export class ManualViewBuilder< + TName extends string = string, + TColumns extends Record = Record, +> extends ViewBuilderCore<{ name: TName; columns: TColumns }> { + static readonly [entityKind]: string = 'SingleStoreManualViewBuilder'; + + private columns: Record; + + constructor( + name: TName, + columns: TColumns, + schema: string | undefined, + ) { + super(name, schema); + this.columns = getTableColumns(singlestoreTable(name, columns)) as BuildColumns; + } + + existing(): SingleStoreViewWithSelection> { + return new Proxy( + new SingleStoreView({ + singlestoreConfig: undefined, + config: { + name: this.name, + schema: this.schema, + selectedFields: this.columns, + query: undefined, + }, + }), + new SelectionProxyHandler({ + alias: this.name, + sqlBehavior: 'error', + sqlAliasedBehavior: 'alias', + replaceOriginalName: true, + }), + ) as SingleStoreViewWithSelection>; + } + + as(query: SQL): SingleStoreViewWithSelection> { + return new Proxy( + new SingleStoreView({ + singlestoreConfig: this.config, + config: { + name: this.name, + schema: this.schema, + selectedFields: this.columns, + query: query.inlineParams(), + }, + }), + new SelectionProxyHandler({ + alias: this.name, + sqlBehavior: 'error', + sqlAliasedBehavior: 'alias', + replaceOriginalName: true, + }), + ) as SingleStoreViewWithSelection>; + } +} + +export class SingleStoreView< + TName extends string = string, + TExisting extends boolean = boolean, + TSelectedFields extends ColumnsSelection = ColumnsSelection, +> extends SingleStoreViewBase { + static readonly [entityKind]: string = 'SingleStoreView'; + + declare protected $SingleStoreViewBrand: 'SingleStoreView'; + + [SingleStoreViewConfig]: ViewBuilderConfig | undefined; + + constructor({ singlestoreConfig, config }: { + singlestoreConfig: ViewBuilderConfig | undefined; + config: { + name: TName; + schema: string | undefined; + selectedFields: SelectedFields; + query: SQL | undefined; + }; + }) { + super(config); + this[SingleStoreViewConfig] = singlestoreConfig; + } +} + +export type SingleStoreViewWithSelection< + TName extends string, + TExisting extends boolean, + TSelectedFields extends ColumnsSelection, +> = SingleStoreView & TSelectedFields; + +/** @internal */ +export function singlestoreViewWithSchema( + name: string, + selection: Record | undefined, + schema: string | undefined, +): ViewBuilder | ManualViewBuilder { + if (selection) { + return new ManualViewBuilder(name, selection, schema); + } + return new ViewBuilder(name, schema); +} + +export function singlestoreView(name: TName): ViewBuilder; +export function singlestoreView>( + name: TName, + columns: TColumns, +): ManualViewBuilder; +export function singlestoreView( + name: string, + selection?: Record, +): ViewBuilder | ManualViewBuilder { + return singlestoreViewWithSchema(name, selection, undefined); +} diff --git a/drizzle-orm/src/singlestore-proxy/driver.ts b/drizzle-orm/src/singlestore-proxy/driver.ts new file mode 100644 index 000000000..f54180c66 --- /dev/null +++ b/drizzle-orm/src/singlestore-proxy/driver.ts @@ -0,0 +1,54 @@ +import { DefaultLogger } from '~/logger.ts'; +import { + createTableRelationsHelpers, + extractTablesRelationalConfig, + type RelationalSchemaConfig, + type TablesRelationalConfig, +} from '~/relations.ts'; +import { SingleStoreDatabase } from '~/singlestore-core/db.ts'; +import { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; +import type { DrizzleConfig } from '~/utils.ts'; +import { + type SingleStoreRemotePreparedQueryHKT, + type SingleStoreRemoteQueryResultHKT, + SingleStoreRemoteSession, +} from './session.ts'; + +export type SingleStoreRemoteDatabase< + TSchema extends Record = Record, +> = SingleStoreDatabase; + +export type RemoteCallback = ( + sql: string, + params: any[], + method: 'all' | 'execute', +) => Promise<{ rows: any[]; insertId?: number; affectedRows?: number }>; + +export function drizzle = Record>( + callback: RemoteCallback, + config: DrizzleConfig = {}, +): SingleStoreRemoteDatabase { + const dialect = new SingleStoreDialect(); + let logger; + if (config.logger === true) { + logger = new DefaultLogger(); + } else if (config.logger !== false) { + logger = config.logger; + } + + let schema: RelationalSchemaConfig | undefined; + if (config.schema) { + const tablesConfig = extractTablesRelationalConfig( + config.schema, + createTableRelationsHelpers, + ); + schema = { + fullSchema: config.schema, + schema: tablesConfig.tables, + tableNamesMap: tablesConfig.tableNamesMap, + }; + } + + const session = new SingleStoreRemoteSession(callback, dialect, schema, { logger }); + return new SingleStoreDatabase(dialect, session, schema) as SingleStoreRemoteDatabase; +} diff --git a/drizzle-orm/src/singlestore-proxy/index.ts b/drizzle-orm/src/singlestore-proxy/index.ts new file mode 100644 index 000000000..b1b6a52e7 --- /dev/null +++ b/drizzle-orm/src/singlestore-proxy/index.ts @@ -0,0 +1,2 @@ +export * from './driver.ts'; +export * from './session.ts'; diff --git a/drizzle-orm/src/singlestore-proxy/migrator.ts b/drizzle-orm/src/singlestore-proxy/migrator.ts new file mode 100644 index 000000000..2ed0172fb --- /dev/null +++ b/drizzle-orm/src/singlestore-proxy/migrator.ts @@ -0,0 +1,52 @@ +import type { MigrationConfig } from '~/migrator.ts'; +import { readMigrationFiles } from '~/migrator.ts'; +import { sql } from '~/sql/sql.ts'; +import type { SingleStoreRemoteDatabase } from './driver.ts'; + +export type ProxyMigrator = (migrationQueries: string[]) => Promise; + +export async function migrate>( + db: SingleStoreRemoteDatabase, + callback: ProxyMigrator, + config: MigrationConfig, +) { + const migrations = readMigrationFiles(config); + + const migrationsTable = config.migrationsTable ?? '__drizzle_migrations'; + const migrationTableCreate = sql` + create table if not exists ${sql.identifier(migrationsTable)} ( + id serial primary key, + hash text not null, + created_at bigint + ) + `; + await db.execute(migrationTableCreate); + + const dbMigrations = await db.select({ + id: sql.raw('id'), + hash: sql.raw('hash'), + created_at: sql.raw('created_at'), + }).from(sql.identifier(migrationsTable).getSQL()).orderBy( + sql.raw('created_at desc'), + ).limit(1); + + const lastDbMigration = dbMigrations[0]; + + const queriesToRun: string[] = []; + + for (const migration of migrations) { + if ( + !lastDbMigration + || Number(lastDbMigration.created_at) < migration.folderMillis + ) { + queriesToRun.push( + ...migration.sql, + `insert into ${ + sql.identifier(migrationsTable).value + } (\`hash\`, \`created_at\`) values('${migration.hash}', '${migration.folderMillis}')`, + ); + } + } + + await callback(queriesToRun); +} diff --git a/drizzle-orm/src/singlestore-proxy/session.ts b/drizzle-orm/src/singlestore-proxy/session.ts new file mode 100644 index 000000000..cf73f7c71 --- /dev/null +++ b/drizzle-orm/src/singlestore-proxy/session.ts @@ -0,0 +1,178 @@ +import type { FieldPacket, ResultSetHeader } from 'mysql2/promise'; +import { Column } from '~/column.ts'; +import { entityKind, is } from '~/entity.ts'; +import type { Logger } from '~/logger.ts'; +import { NoopLogger } from '~/logger.ts'; +import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; +import type { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; +import { SingleStoreTransaction } from '~/singlestore-core/index.ts'; +import type { SelectedFieldsOrdered } from '~/singlestore-core/query-builders/select.types.ts'; +import type { + PreparedQueryKind, + SingleStorePreparedQueryConfig, + SingleStorePreparedQueryHKT, + SingleStoreQueryResultHKT, + SingleStoreTransactionConfig, +} from '~/singlestore-core/session.ts'; +import { SingleStorePreparedQuery as PreparedQueryBase, SingleStoreSession } from '~/singlestore-core/session.ts'; +import { fillPlaceholders } from '~/sql/sql.ts'; +import type { Query, SQL } from '~/sql/sql.ts'; +import { type Assume, mapResultRow } from '~/utils.ts'; +import type { RemoteCallback } from './driver.ts'; + +export type SingleStoreRawQueryResult = [ResultSetHeader, FieldPacket[]]; + +export interface SingleStoreRemoteSessionOptions { + logger?: Logger; +} + +export class SingleStoreRemoteSession< + TFullSchema extends Record, + TSchema extends TablesRelationalConfig, +> extends SingleStoreSession { + static readonly [entityKind]: string = 'SingleStoreRemoteSession'; + + private logger: Logger; + + constructor( + private client: RemoteCallback, + dialect: SingleStoreDialect, + private schema: RelationalSchemaConfig | undefined, + options: SingleStoreRemoteSessionOptions, + ) { + super(dialect); + this.logger = options.logger ?? new NoopLogger(); + } + + prepareQuery( + query: Query, + fields: SelectedFieldsOrdered | undefined, + customResultMapper?: (rows: unknown[][]) => T['execute'], + generatedIds?: Record[], + returningIds?: SelectedFieldsOrdered, + ): PreparedQueryKind { + return new PreparedQuery( + this.client, + query.sql, + query.params, + this.logger, + fields, + customResultMapper, + generatedIds, + returningIds, + ) as PreparedQueryKind; + } + + override all(query: SQL): Promise { + const querySql = this.dialect.sqlToQuery(query); + this.logger.logQuery(querySql.sql, querySql.params); + return this.client(querySql.sql, querySql.params, 'all').then(({ rows }) => rows) as Promise; + } + + override async transaction( + _transaction: (tx: SingleStoreProxyTransaction) => Promise, + _config?: SingleStoreTransactionConfig, + ): Promise { + throw new Error('Transactions are not supported by the SingleStore Proxy driver'); + } +} + +export class SingleStoreProxyTransaction< + TFullSchema extends Record, + TSchema extends TablesRelationalConfig, +> extends SingleStoreTransaction< + SingleStoreRemoteQueryResultHKT, + SingleStoreRemotePreparedQueryHKT, + TFullSchema, + TSchema +> { + static readonly [entityKind]: string = 'SingleStoreProxyTransaction'; + + override async transaction( + _transaction: (tx: SingleStoreProxyTransaction) => Promise, + ): Promise { + throw new Error('Transactions are not supported by the SingleStore Proxy driver'); + } +} + +export class PreparedQuery extends PreparedQueryBase { + static readonly [entityKind]: string = 'SingleStoreProxyPreparedQuery'; + + constructor( + private client: RemoteCallback, + private queryString: string, + private params: unknown[], + private logger: Logger, + private fields: SelectedFieldsOrdered | undefined, + private customResultMapper?: (rows: unknown[][]) => T['execute'], + // Keys that were used in $default and the value that was generated for them + private generatedIds?: Record[], + // Keys that should be returned, it has the column with all properries + key from object + private returningIds?: SelectedFieldsOrdered, + ) { + super(); + } + + async execute(placeholderValues: Record | undefined = {}): Promise { + const params = fillPlaceholders(this.params, placeholderValues); + + const { fields, client, queryString, logger, joinsNotNullableMap, customResultMapper, returningIds, generatedIds } = + this; + + logger.logQuery(queryString, params); + + if (!fields && !customResultMapper) { + const { rows: data } = await client(queryString, params, 'execute'); + + const insertId = data[0].insertId as number; + const affectedRows = data[0].affectedRows; + + if (returningIds) { + const returningResponse = []; + let j = 0; + for (let i = insertId; i < insertId + affectedRows; i++) { + for (const column of returningIds) { + const key = returningIds[0]!.path[0]!; + if (is(column.field, Column)) { + // @ts-ignore + if (column.field.primary && column.field.autoIncrement) { + returningResponse.push({ [key]: i }); + } + if (column.field.defaultFn && generatedIds) { + // generatedIds[rowIdx][key] + returningResponse.push({ [key]: generatedIds[j]![key] }); + } + } + } + j++; + } + + return returningResponse; + } + + return data; + } + + const { rows } = await client(queryString, params, 'all'); + + if (customResultMapper) { + return customResultMapper(rows); + } + + return rows.map((row) => mapResultRow(fields!, row, joinsNotNullableMap)); + } + + override iterator( + _placeholderValues: Record = {}, + ): AsyncGenerator { + throw new Error('Streaming is not supported by the SingleStore Proxy driver'); + } +} + +export interface SingleStoreRemoteQueryResultHKT extends SingleStoreQueryResultHKT { + type: SingleStoreRawQueryResult; +} + +export interface SingleStoreRemotePreparedQueryHKT extends SingleStorePreparedQueryHKT { + type: PreparedQuery>; +} diff --git a/drizzle-orm/src/singlestore/driver.ts b/drizzle-orm/src/singlestore/driver.ts new file mode 100644 index 000000000..ffc5c2795 --- /dev/null +++ b/drizzle-orm/src/singlestore/driver.ts @@ -0,0 +1,92 @@ +import type { Connection as CallbackConnection, Pool as CallbackPool } from 'mysql2'; +import { entityKind } from '~/entity.ts'; +import type { Logger } from '~/logger.ts'; +import { DefaultLogger } from '~/logger.ts'; +import { + createTableRelationsHelpers, + extractTablesRelationalConfig, + type RelationalSchemaConfig, + type TablesRelationalConfig, +} from '~/relations.ts'; +import { SingleStoreDatabase } from '~/singlestore-core/db.ts'; +import { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; +import type { DrizzleConfig } from '~/utils.ts'; +import type { + Mode, + SingleStoreDriverClient, + SingleStoreDriverPreparedQueryHKT, + SingleStoreDriverQueryResultHKT, +} from './session.ts'; +import { SingleStoreDriverSession } from './session.ts'; + +export interface SingleStoreDriverOptions { + logger?: Logger; +} + +export class SingleStoreDriver { + static readonly [entityKind]: string = 'SingleStoreDriver'; + + constructor( + private client: SingleStoreDriverClient, + private dialect: SingleStoreDialect, + private options: SingleStoreDriverOptions = {}, + ) { + } + + createSession( + schema: RelationalSchemaConfig | undefined, + ): SingleStoreDriverSession, TablesRelationalConfig> { + return new SingleStoreDriverSession(this.client, this.dialect, schema, { logger: this.options.logger }); + } +} + +export { SingleStoreDatabase } from '~/singlestore-core/db.ts'; + +export type SingleStoreDriverDatabase< + TSchema extends Record = Record, +> = SingleStoreDatabase; + +export type SingleStoreDriverDrizzleConfig = Record> = + & Omit, 'schema'> + & ({ schema: TSchema; mode: Mode } | { schema?: undefined; mode?: Mode }); + +export function drizzle = Record>( + client: SingleStoreDriverClient | CallbackConnection | CallbackPool, + config: DrizzleConfig = {}, +): SingleStoreDriverDatabase { + const dialect = new SingleStoreDialect(); + let logger; + if (config.logger === true) { + logger = new DefaultLogger(); + } else if (config.logger !== false) { + logger = config.logger; + } + if (isCallbackClient(client)) { + client = client.promise(); + } + + let schema: RelationalSchemaConfig | undefined; + if (config.schema) { + const tablesConfig = extractTablesRelationalConfig( + config.schema, + createTableRelationsHelpers, + ); + schema = { + fullSchema: config.schema, + schema: tablesConfig.tables, + tableNamesMap: tablesConfig.tableNamesMap, + }; + } + + const driver = new SingleStoreDriver(client as SingleStoreDriverClient, dialect, { logger }); + const session = driver.createSession(schema); + return new SingleStoreDatabase(dialect, session, schema) as SingleStoreDriverDatabase; +} + +interface CallbackClient { + promise(): SingleStoreDriverClient; +} + +function isCallbackClient(client: any): client is CallbackClient { + return typeof client.promise === 'function'; +} diff --git a/drizzle-orm/src/singlestore/index.ts b/drizzle-orm/src/singlestore/index.ts new file mode 100644 index 000000000..b1b6a52e7 --- /dev/null +++ b/drizzle-orm/src/singlestore/index.ts @@ -0,0 +1,2 @@ +export * from './driver.ts'; +export * from './session.ts'; diff --git a/drizzle-orm/src/singlestore/migrator.ts b/drizzle-orm/src/singlestore/migrator.ts new file mode 100644 index 000000000..6f342c0c5 --- /dev/null +++ b/drizzle-orm/src/singlestore/migrator.ts @@ -0,0 +1,11 @@ +import type { MigrationConfig } from '~/migrator.ts'; +import { readMigrationFiles } from '~/migrator.ts'; +import type { SingleStoreDriverDatabase } from './driver.ts'; + +export async function migrate>( + db: SingleStoreDriverDatabase, + config: MigrationConfig, +) { + const migrations = readMigrationFiles(config); + await db.dialect.migrate(migrations, db.session, config); +} diff --git a/drizzle-orm/src/singlestore/session.ts b/drizzle-orm/src/singlestore/session.ts new file mode 100644 index 000000000..0ea143971 --- /dev/null +++ b/drizzle-orm/src/singlestore/session.ts @@ -0,0 +1,339 @@ +import type { Connection as CallbackConnection } from 'mysql2'; +import type { + Connection, + FieldPacket, + OkPacket, + Pool, + PoolConnection, + QueryOptions, + ResultSetHeader, + RowDataPacket, +} from 'mysql2/promise'; +import { once } from 'node:events'; +import { Column } from '~/column.ts'; +import { entityKind, is } from '~/entity.ts'; +import type { Logger } from '~/logger.ts'; +import { NoopLogger } from '~/logger.ts'; +import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; +import type { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; +import type { SelectedFieldsOrdered } from '~/singlestore-core/query-builders/select.types.ts'; +import { + type PreparedQueryKind, + SingleStorePreparedQuery, + type SingleStorePreparedQueryConfig, + type SingleStorePreparedQueryHKT, + type SingleStoreQueryResultHKT, + SingleStoreSession, + SingleStoreTransaction, + type SingleStoreTransactionConfig, +} from '~/singlestore-core/session.ts'; +import { fillPlaceholders, sql } from '~/sql/sql.ts'; +import type { Query, SQL } from '~/sql/sql.ts'; +import { type Assume, mapResultRow } from '~/utils.ts'; + +// must keep this type here for compatibility with DrizzleConfig +export type Mode = 'default'; + +export type SingleStoreDriverClient = Pool | Connection; + +export type SingleStoreRawQueryResult = [ResultSetHeader, FieldPacket[]]; +export type SingleStoreQueryResultType = RowDataPacket[][] | RowDataPacket[] | OkPacket | OkPacket[] | ResultSetHeader; +export type SingleStoreQueryResult< + T = any, +> = [T extends ResultSetHeader ? T : T[], FieldPacket[]]; + +export class SingleStoreDriverPreparedQuery + extends SingleStorePreparedQuery +{ + static readonly [entityKind]: string = 'SingleStoreDriverPreparedQuery'; + + private rawQuery: QueryOptions; + private query: QueryOptions; + + constructor( + private client: SingleStoreDriverClient, + queryString: string, + private params: unknown[], + private logger: Logger, + private fields: SelectedFieldsOrdered | undefined, + private customResultMapper?: (rows: unknown[][]) => T['execute'], + // Keys that were used in $default and the value that was generated for them + private generatedIds?: Record[], + // Keys that should be returned, it has the column with all properries + key from object + private returningIds?: SelectedFieldsOrdered, + ) { + super(); + this.rawQuery = { + sql: queryString, + // rowsAsArray: true, + typeCast: function(field: any, next: any) { + if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { + return field.string(); + } + return next(); + }, + }; + this.query = { + sql: queryString, + rowsAsArray: true, + typeCast: function(field: any, next: any) { + if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { + return field.string(); + } + return next(); + }, + }; + } + + async execute(placeholderValues: Record = {}): Promise { + const params = fillPlaceholders(this.params, placeholderValues); + + this.logger.logQuery(this.rawQuery.sql, params); + + const { fields, client, rawQuery, query, joinsNotNullableMap, customResultMapper, returningIds, generatedIds } = + this; + if (!fields && !customResultMapper) { + const res = await client.query(rawQuery, params); + const insertId = res[0].insertId; + const affectedRows = res[0].affectedRows; + // for each row, I need to check keys from + if (returningIds) { + const returningResponse = []; + let j = 0; + for (let i = insertId; i < insertId + affectedRows; i++) { + for (const column of returningIds) { + const key = returningIds[0]!.path[0]!; + if (is(column.field, Column)) { + // @ts-ignore + if (column.field.primary && column.field.autoIncrement) { + returningResponse.push({ [key]: i }); + } + if (column.field.defaultFn && generatedIds) { + // generatedIds[rowIdx][key] + returningResponse.push({ [key]: generatedIds[j]![key] }); + } + } + } + j++; + } + + return returningResponse; + } + return res; + } + + const result = await client.query(query, params); + const rows = result[0]; + + if (customResultMapper) { + return customResultMapper(rows); + } + + return rows.map((row) => mapResultRow(fields!, row, joinsNotNullableMap)); + } + + async *iterator( + placeholderValues: Record = {}, + ): AsyncGenerator { + const params = fillPlaceholders(this.params, placeholderValues); + const conn = ((isPool(this.client) ? await this.client.getConnection() : this.client) as {} as { + connection: CallbackConnection; + }).connection; + + const { fields, query, rawQuery, joinsNotNullableMap, client, customResultMapper } = this; + const hasRowsMapper = Boolean(fields || customResultMapper); + const driverQuery = hasRowsMapper ? conn.query(query, params) : conn.query(rawQuery, params); + + const stream = driverQuery.stream(); + + function dataListener() { + stream.pause(); + } + + stream.on('data', dataListener); + + try { + const onEnd = once(stream, 'end'); + const onError = once(stream, 'error'); + + while (true) { + stream.resume(); + const row = await Promise.race([onEnd, onError, new Promise((resolve) => stream.once('data', resolve))]); + if (row === undefined || (Array.isArray(row) && row.length === 0)) { + break; + } else if (row instanceof Error) { // eslint-disable-line no-instanceof/no-instanceof + throw row; + } else { + if (hasRowsMapper) { + if (customResultMapper) { + const mappedRow = customResultMapper([row as unknown[]]); + yield (Array.isArray(mappedRow) ? mappedRow[0] : mappedRow); + } else { + yield mapResultRow(fields!, row as unknown[], joinsNotNullableMap); + } + } else { + yield row as T['execute']; + } + } + } + } finally { + stream.off('data', dataListener); + if (isPool(client)) { + conn.end(); + } + } + } +} + +export interface SingleStoreDriverSessionOptions { + logger?: Logger; +} + +export class SingleStoreDriverSession< + TFullSchema extends Record, + TSchema extends TablesRelationalConfig, +> extends SingleStoreSession { + static readonly [entityKind]: string = 'SingleStoreDriverSession'; + + private logger: Logger; + + constructor( + private client: SingleStoreDriverClient, + dialect: SingleStoreDialect, + private schema: RelationalSchemaConfig | undefined, + private options: SingleStoreDriverSessionOptions, + ) { + super(dialect); + this.logger = options.logger ?? new NoopLogger(); + } + + prepareQuery( + query: Query, + fields: SelectedFieldsOrdered | undefined, + customResultMapper?: (rows: unknown[][]) => T['execute'], + generatedIds?: Record[], + returningIds?: SelectedFieldsOrdered, + ): PreparedQueryKind { + // Add returningId fields + // Each driver gets them from response from database + return new SingleStoreDriverPreparedQuery( + this.client, + query.sql, + query.params, + this.logger, + fields, + customResultMapper, + generatedIds, + returningIds, + ) as PreparedQueryKind; + } + + /** + * @internal + * What is its purpose? + */ + async query(query: string, params: unknown[]): Promise { + this.logger.logQuery(query, params); + const result = await this.client.query({ + sql: query, + values: params, + rowsAsArray: true, + typeCast: function(field: any, next: any) { + if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { + return field.string(); + } + return next(); + }, + }); + return result; + } + + override all(query: SQL): Promise { + const querySql = this.dialect.sqlToQuery(query); + this.logger.logQuery(querySql.sql, querySql.params); + return this.client.execute(querySql.sql, querySql.params).then((result) => result[0]) as Promise; + } + + override async transaction( + transaction: (tx: SingleStoreDriverTransaction) => Promise, + config?: SingleStoreTransactionConfig, + ): Promise { + const session = isPool(this.client) + ? new SingleStoreDriverSession( + await this.client.getConnection(), + this.dialect, + this.schema, + this.options, + ) + : this; + const tx = new SingleStoreDriverTransaction( + this.dialect, + session as SingleStoreSession, + this.schema, + 0, + ); + if (config) { + const startTransactionSql = this.getStartTransactionSQL(config); + await (startTransactionSql ? tx.execute(startTransactionSql) : tx.execute(sql`begin`)); + } else { + await tx.execute(sql`begin`); + } + try { + const result = await transaction(tx); + await tx.execute(sql`commit`); + return result; + } catch (err) { + await tx.execute(sql`rollback`); + throw err; + } finally { + if (isPool(this.client)) { + (session.client as PoolConnection).release(); + } + } + } +} + +export class SingleStoreDriverTransaction< + TFullSchema extends Record, + TSchema extends TablesRelationalConfig, +> extends SingleStoreTransaction< + SingleStoreDriverQueryResultHKT, + SingleStoreDriverPreparedQueryHKT, + TFullSchema, + TSchema +> { + static readonly [entityKind]: string = 'SingleStoreDriverTransaction'; + + override async transaction( + transaction: (tx: SingleStoreDriverTransaction) => Promise, + ): Promise { + const savepointName = `sp${this.nestedIndex + 1}`; + const tx = new SingleStoreDriverTransaction( + this.dialect, + this.session, + this.schema, + this.nestedIndex + 1, + ); + await tx.execute(sql.raw(`savepoint ${savepointName}`)); + try { + const result = await transaction(tx); + await tx.execute(sql.raw(`release savepoint ${savepointName}`)); + return result; + } catch (err) { + await tx.execute(sql.raw(`rollback to savepoint ${savepointName}`)); + throw err; + } + } +} + +function isPool(client: SingleStoreDriverClient): client is Pool { + return 'getConnection' in client; +} + +export interface SingleStoreDriverQueryResultHKT extends SingleStoreQueryResultHKT { + type: SingleStoreRawQueryResult; +} + +export interface SingleStoreDriverPreparedQueryHKT extends SingleStorePreparedQueryHKT { + type: SingleStoreDriverPreparedQuery>; +} diff --git a/drizzle-orm/src/sql/expressions/conditions.ts b/drizzle-orm/src/sql/expressions/conditions.ts index 0a911e1ff..ba0e21fbc 100644 --- a/drizzle-orm/src/sql/expressions/conditions.ts +++ b/drizzle-orm/src/sql/expressions/conditions.ts @@ -256,11 +256,6 @@ export const lte: BinaryOperator = (left: SQLWrapper, right: unknown): SQL => { * Test whether the first parameter, a column or expression, * has a value from a list passed as the second argument. * - * ## Throws - * - * The argument passed in the second array can't be empty: - * if an empty is provided, this method will throw. - * * ## Examples * * ```ts @@ -302,11 +297,6 @@ export function inArray( * has a value that is not present in a list passed as the * second argument. * - * ## Throws - * - * The argument passed in the second array can't be empty: - * if an empty is provided, this method will throw. - * * ## Examples * * ```ts diff --git a/drizzle-orm/src/sql/sql.ts b/drizzle-orm/src/sql/sql.ts index f5b3f30b4..3814c9aaa 100644 --- a/drizzle-orm/src/sql/sql.ts +++ b/drizzle-orm/src/sql/sql.ts @@ -203,7 +203,11 @@ export class SQL implements SQLWrapper { } if (is(chunk, Param)) { - const mappedValue = (chunk.value === null) ? null : chunk.encoder.mapToDriverValue(chunk.value); + if (is(chunk.value, Placeholder)) { + return { sql: escapeParam(paramStartIndex.value++, chunk), params: [chunk], typings: ['none'] }; + } + + const mappedValue = chunk.value === null ? null : chunk.encoder.mapToDriverValue(chunk.value); if (is(mappedValue, SQL)) { return this.buildQueryFromSourceParams([mappedValue], config); @@ -213,7 +217,7 @@ export class SQL implements SQLWrapper { return { sql: this.mapInlineParam(mappedValue, config), params: [] }; } - let typings: QueryTypingsValue[] | undefined; + let typings: QueryTypingsValue[] = ['none']; if (prepareTyping) { typings = [prepareTyping(chunk.encoder)]; } @@ -263,7 +267,7 @@ export class SQL implements SQLWrapper { return { sql: this.mapInlineParam(chunk, config), params: [] }; } - return { sql: escapeParam(paramStartIndex.value++, chunk), params: [chunk] }; + return { sql: escapeParam(paramStartIndex.value++, chunk), params: [chunk], typings: ['none'] }; })); } @@ -583,9 +587,18 @@ export function fillPlaceholders(params: unknown[], values: Record | undefined = Record | undefined, > = SQLiteUpdateBase; -type AnySQLiteUpdate = SQLiteUpdateBase; +export type AnySQLiteUpdate = SQLiteUpdateBase; export interface SQLiteUpdateBase< TTable extends SQLiteTable = SQLiteTable, diff --git a/drizzle-orm/src/table.ts b/drizzle-orm/src/table.ts index 3db9d5559..8632dd35c 100644 --- a/drizzle-orm/src/table.ts +++ b/drizzle-orm/src/table.ts @@ -107,6 +107,9 @@ export class Table implements SQLWrapper { /** @internal */ [IsAlias] = false; + /** @internal */ + [IsDrizzleTable] = true; + /** @internal */ [ExtraConfigBuilder]: ((self: any) => Record) | undefined = undefined; diff --git a/drizzle-orm/type-tests/singlestore/1000columns.ts b/drizzle-orm/type-tests/singlestore/1000columns.ts new file mode 100644 index 000000000..f84640858 --- /dev/null +++ b/drizzle-orm/type-tests/singlestore/1000columns.ts @@ -0,0 +1,904 @@ +import { bigint, double, singlestoreTable, varchar } from '~/singlestore-core/index.ts'; + +singlestoreTable('test', { + col0: double('col1').primaryKey().autoincrement().default(0), + col1: double('col1').primaryKey().autoincrement().default(0), + col2: double('col1').primaryKey().autoincrement().default(0), + col3: double('col1').primaryKey().autoincrement().default(0), + col4: double('col1').primaryKey().autoincrement().default(0), + col5: double('col1').primaryKey().autoincrement().default(0), + col6: double('col1').primaryKey().autoincrement().default(0), + col8: double('col1').primaryKey().autoincrement().default(0), + col9: double('col1').primaryKey().autoincrement().default(0), + col10: double('col1').primaryKey().autoincrement().default(0), + col11: double('col1').primaryKey().autoincrement().default(0), + col12: double('col1').primaryKey().autoincrement().default(0), + col13: double('col1').primaryKey().autoincrement().default(0), + col14: double('col1').primaryKey().autoincrement().default(0), + col15: double('col1').primaryKey().autoincrement().default(0), + col16: double('col1').primaryKey().autoincrement().default(0), + col18: double('col1').primaryKey().autoincrement().default(0), + col19: double('col1').primaryKey().autoincrement().default(0), + col20: double('col1').primaryKey().autoincrement().default(0), + col21: double('col1').primaryKey().autoincrement().default(0), + col22: double('col1').primaryKey().autoincrement().default(0), + col23: double('col1').primaryKey().autoincrement().default(0), + col24: double('col1').primaryKey().autoincrement().default(0), + col25: double('col1').primaryKey().autoincrement().default(0), + col26: double('col1').primaryKey().autoincrement().default(0), + col28: double('col1').primaryKey().autoincrement().default(0), + col29: double('col1').primaryKey().autoincrement().default(0), + col30: double('col1').primaryKey().autoincrement().default(0), + col31: double('col1').primaryKey().autoincrement().default(0), + col32: double('col1').primaryKey().autoincrement().default(0), + col33: double('col1').primaryKey().autoincrement().default(0), + col34: double('col1').primaryKey().autoincrement().default(0), + col35: double('col1').primaryKey().autoincrement().default(0), + col36: double('col1').primaryKey().autoincrement().default(0), + col38: double('col1').primaryKey().autoincrement().default(0), + col39: double('col1').primaryKey().autoincrement().default(0), + col40: double('col1').primaryKey().autoincrement().default(0), + col41: double('col1').primaryKey().autoincrement().default(0), + col42: double('col1').primaryKey().autoincrement().default(0), + col43: double('col1').primaryKey().autoincrement().default(0), + col44: double('col1').primaryKey().autoincrement().default(0), + col45: double('col1').primaryKey().autoincrement().default(0), + col46: double('col1').primaryKey().autoincrement().default(0), + col48: double('col1').primaryKey().autoincrement().default(0), + col49: double('col1').primaryKey().autoincrement().default(0), + col50: double('col1').primaryKey().autoincrement().default(0), + col51: double('col1').primaryKey().autoincrement().default(0), + col52: double('col1').primaryKey().autoincrement().default(0), + col53: double('col1').primaryKey().autoincrement().default(0), + col54: double('col1').primaryKey().autoincrement().default(0), + col55: double('col1').primaryKey().autoincrement().default(0), + col56: double('col1').primaryKey().autoincrement().default(0), + col58: double('col1').primaryKey().autoincrement().default(0), + col59: double('col1').primaryKey().autoincrement().default(0), + col60: double('col1').primaryKey().autoincrement().default(0), + col61: double('col1').primaryKey().autoincrement().default(0), + col62: double('col1').primaryKey().autoincrement().default(0), + col63: double('col1').primaryKey().autoincrement().default(0), + col64: double('col1').primaryKey().autoincrement().default(0), + col65: double('col1').primaryKey().autoincrement().default(0), + col66: double('col1').primaryKey().autoincrement().default(0), + col68: double('col1').primaryKey().autoincrement().default(0), + col69: double('col1').primaryKey().autoincrement().default(0), + col70: double('col1').primaryKey().autoincrement().default(0), + col71: double('col1').primaryKey().autoincrement().default(0), + col72: double('col1').primaryKey().autoincrement().default(0), + col73: double('col1').primaryKey().autoincrement().default(0), + col74: double('col1').primaryKey().autoincrement().default(0), + col75: double('col1').primaryKey().autoincrement().default(0), + col76: double('col1').primaryKey().autoincrement().default(0), + col78: double('col1').primaryKey().autoincrement().default(0), + col79: double('col1').primaryKey().autoincrement().default(0), + col80: double('col1').primaryKey().autoincrement().default(0), + col81: double('col1').primaryKey().autoincrement().default(0), + col82: double('col1').primaryKey().autoincrement().default(0), + col83: double('col1').primaryKey().autoincrement().default(0), + col84: double('col1').primaryKey().autoincrement().default(0), + col85: double('col1').primaryKey().autoincrement().default(0), + col86: double('col1').primaryKey().autoincrement().default(0), + col88: double('col1').primaryKey().autoincrement().default(0), + col89: double('col1').primaryKey().autoincrement().default(0), + col90: double('col1').primaryKey().autoincrement().default(0), + col91: double('col1').primaryKey().autoincrement().default(0), + col92: double('col1').primaryKey().autoincrement().default(0), + col93: double('col1').primaryKey().autoincrement().default(0), + col94: double('col1').primaryKey().autoincrement().default(0), + col95: double('col1').primaryKey().autoincrement().default(0), + col96: double('col1').primaryKey().autoincrement().default(0), + col98: double('col1').primaryKey().autoincrement().default(0), + col99: double('col1').primaryKey().autoincrement().default(0), + col100: double('col1').primaryKey().autoincrement().default(0), + col101: double('col1').primaryKey().autoincrement().default(0), + col102: double('col1').primaryKey().autoincrement().default(0), + col103: double('col1').primaryKey().autoincrement().default(0), + col104: double('col1').primaryKey().autoincrement().default(0), + col105: double('col1').primaryKey().autoincrement().default(0), + col106: double('col1').primaryKey().autoincrement().default(0), + col108: double('col1').primaryKey().autoincrement().default(0), + col109: double('col1').primaryKey().autoincrement().default(0), + col110: double('col11').primaryKey().autoincrement().default(0), + col111: double('col11').primaryKey().autoincrement().default(0), + col112: double('col11').primaryKey().autoincrement().default(0), + col113: double('col11').primaryKey().autoincrement().default(0), + col114: double('col11').primaryKey().autoincrement().default(0), + col115: double('col11').primaryKey().autoincrement().default(0), + col116: double('col11').primaryKey().autoincrement().default(0), + col118: double('col11').primaryKey().autoincrement().default(0), + col119: double('col11').primaryKey().autoincrement().default(0), + col120: double('col11').primaryKey().autoincrement().default(0), + col121: double('col11').primaryKey().autoincrement().default(0), + col122: double('col11').primaryKey().autoincrement().default(0), + col123: double('col11').primaryKey().autoincrement().default(0), + col124: double('col11').primaryKey().autoincrement().default(0), + col125: double('col11').primaryKey().autoincrement().default(0), + col126: double('col11').primaryKey().autoincrement().default(0), + col128: double('col11').primaryKey().autoincrement().default(0), + col129: double('col11').primaryKey().autoincrement().default(0), + col130: double('col11').primaryKey().autoincrement().default(0), + col131: double('col11').primaryKey().autoincrement().default(0), + col132: double('col11').primaryKey().autoincrement().default(0), + col133: double('col11').primaryKey().autoincrement().default(0), + col134: double('col11').primaryKey().autoincrement().default(0), + col135: double('col11').primaryKey().autoincrement().default(0), + col136: double('col11').primaryKey().autoincrement().default(0), + col138: double('col11').primaryKey().autoincrement().default(0), + col139: double('col11').primaryKey().autoincrement().default(0), + col140: double('col11').primaryKey().autoincrement().default(0), + col141: double('col11').primaryKey().autoincrement().default(0), + col142: double('col11').primaryKey().autoincrement().default(0), + col143: double('col11').primaryKey().autoincrement().default(0), + col144: double('col11').primaryKey().autoincrement().default(0), + col145: double('col11').primaryKey().autoincrement().default(0), + col146: double('col11').primaryKey().autoincrement().default(0), + col148: double('col11').primaryKey().autoincrement().default(0), + col149: double('col11').primaryKey().autoincrement().default(0), + col150: double('col11').primaryKey().autoincrement().default(0), + col151: double('col11').primaryKey().autoincrement().default(0), + col152: double('col11').primaryKey().autoincrement().default(0), + col153: double('col11').primaryKey().autoincrement().default(0), + col154: double('col11').primaryKey().autoincrement().default(0), + col155: double('col11').primaryKey().autoincrement().default(0), + col156: double('col11').primaryKey().autoincrement().default(0), + col158: double('col11').primaryKey().autoincrement().default(0), + col159: double('col11').primaryKey().autoincrement().default(0), + col160: double('col11').primaryKey().autoincrement().default(0), + col161: double('col11').primaryKey().autoincrement().default(0), + col162: double('col11').primaryKey().autoincrement().default(0), + col163: double('col11').primaryKey().autoincrement().default(0), + col164: double('col11').primaryKey().autoincrement().default(0), + col165: double('col11').primaryKey().autoincrement().default(0), + col166: double('col11').primaryKey().autoincrement().default(0), + col168: double('col11').primaryKey().autoincrement().default(0), + col169: double('col11').primaryKey().autoincrement().default(0), + col170: double('col11').primaryKey().autoincrement().default(0), + col171: double('col11').primaryKey().autoincrement().default(0), + col172: double('col11').primaryKey().autoincrement().default(0), + col173: double('col11').primaryKey().autoincrement().default(0), + col174: double('col11').primaryKey().autoincrement().default(0), + col175: double('col11').primaryKey().autoincrement().default(0), + col176: double('col11').primaryKey().autoincrement().default(0), + col178: double('col11').primaryKey().autoincrement().default(0), + col179: double('col11').primaryKey().autoincrement().default(0), + col180: double('col11').primaryKey().autoincrement().default(0), + col181: double('col11').primaryKey().autoincrement().default(0), + col182: double('col11').primaryKey().autoincrement().default(0), + col183: double('col11').primaryKey().autoincrement().default(0), + col184: double('col11').primaryKey().autoincrement().default(0), + col185: double('col11').primaryKey().autoincrement().default(0), + col186: double('col11').primaryKey().autoincrement().default(0), + col188: double('col11').primaryKey().autoincrement().default(0), + col189: double('col11').primaryKey().autoincrement().default(0), + col190: double('col11').primaryKey().autoincrement().default(0), + col191: double('col11').primaryKey().autoincrement().default(0), + col192: double('col11').primaryKey().autoincrement().default(0), + col193: double('col11').primaryKey().autoincrement().default(0), + col194: double('col11').primaryKey().autoincrement().default(0), + col195: double('col11').primaryKey().autoincrement().default(0), + col196: double('col11').primaryKey().autoincrement().default(0), + col198: double('col11').primaryKey().autoincrement().default(0), + col199: double('col11').primaryKey().autoincrement().default(0), + col200: double('col2').primaryKey().autoincrement().default(0), + col201: double('col2').primaryKey().autoincrement().default(0), + col202: double('col2').primaryKey().autoincrement().default(0), + col203: double('col2').primaryKey().autoincrement().default(0), + col204: double('col2').primaryKey().autoincrement().default(0), + col205: double('col2').primaryKey().autoincrement().default(0), + col206: double('col2').primaryKey().autoincrement().default(0), + col208: double('col2').primaryKey().autoincrement().default(0), + col209: double('col2').primaryKey().autoincrement().default(0), + col210: double('col21').primaryKey().autoincrement().default(0), + col211: double('col21').primaryKey().autoincrement().default(0), + col212: double('col21').primaryKey().autoincrement().default(0), + col213: double('col21').primaryKey().autoincrement().default(0), + col214: double('col21').primaryKey().autoincrement().default(0), + col215: double('col21').primaryKey().autoincrement().default(0), + col216: double('col21').primaryKey().autoincrement().default(0), + col218: double('col21').primaryKey().autoincrement().default(0), + col219: double('col21').primaryKey().autoincrement().default(0), + col220: double('col21').primaryKey().autoincrement().default(0), + col221: double('col21').primaryKey().autoincrement().default(0), + col222: double('col21').primaryKey().autoincrement().default(0), + col223: double('col21').primaryKey().autoincrement().default(0), + col224: double('col21').primaryKey().autoincrement().default(0), + col225: double('col21').primaryKey().autoincrement().default(0), + col226: double('col21').primaryKey().autoincrement().default(0), + col228: double('col21').primaryKey().autoincrement().default(0), + col229: double('col21').primaryKey().autoincrement().default(0), + col230: double('col21').primaryKey().autoincrement().default(0), + col231: double('col21').primaryKey().autoincrement().default(0), + col232: double('col21').primaryKey().autoincrement().default(0), + col233: double('col21').primaryKey().autoincrement().default(0), + col234: double('col21').primaryKey().autoincrement().default(0), + col235: double('col21').primaryKey().autoincrement().default(0), + col236: double('col21').primaryKey().autoincrement().default(0), + col238: double('col21').primaryKey().autoincrement().default(0), + col239: double('col21').primaryKey().autoincrement().default(0), + col240: double('col21').primaryKey().autoincrement().default(0), + col241: double('col21').primaryKey().autoincrement().default(0), + col242: double('col21').primaryKey().autoincrement().default(0), + col243: double('col21').primaryKey().autoincrement().default(0), + col244: double('col21').primaryKey().autoincrement().default(0), + col245: double('col21').primaryKey().autoincrement().default(0), + col246: double('col21').primaryKey().autoincrement().default(0), + col248: double('col21').primaryKey().autoincrement().default(0), + col249: double('col21').primaryKey().autoincrement().default(0), + col250: double('col21').primaryKey().autoincrement().default(0), + col251: double('col21').primaryKey().autoincrement().default(0), + col252: double('col21').primaryKey().autoincrement().default(0), + col253: double('col21').primaryKey().autoincrement().default(0), + col254: double('col21').primaryKey().autoincrement().default(0), + col255: double('col21').primaryKey().autoincrement().default(0), + col256: double('col21').primaryKey().autoincrement().default(0), + col258: double('col21').primaryKey().autoincrement().default(0), + col259: double('col21').primaryKey().autoincrement().default(0), + col260: double('col21').primaryKey().autoincrement().default(0), + col261: double('col21').primaryKey().autoincrement().default(0), + col262: double('col21').primaryKey().autoincrement().default(0), + col263: double('col21').primaryKey().autoincrement().default(0), + col264: double('col21').primaryKey().autoincrement().default(0), + col265: double('col21').primaryKey().autoincrement().default(0), + col266: double('col21').primaryKey().autoincrement().default(0), + col268: double('col21').primaryKey().autoincrement().default(0), + col269: double('col21').primaryKey().autoincrement().default(0), + col270: double('col21').primaryKey().autoincrement().default(0), + col271: double('col21').primaryKey().autoincrement().default(0), + col272: double('col21').primaryKey().autoincrement().default(0), + col273: double('col21').primaryKey().autoincrement().default(0), + col274: double('col21').primaryKey().autoincrement().default(0), + col275: double('col21').primaryKey().autoincrement().default(0), + col276: double('col21').primaryKey().autoincrement().default(0), + col278: double('col21').primaryKey().autoincrement().default(0), + col279: double('col21').primaryKey().autoincrement().default(0), + col280: double('col21').primaryKey().autoincrement().default(0), + col281: double('col21').primaryKey().autoincrement().default(0), + col282: double('col21').primaryKey().autoincrement().default(0), + col283: double('col21').primaryKey().autoincrement().default(0), + col284: double('col21').primaryKey().autoincrement().default(0), + col285: double('col21').primaryKey().autoincrement().default(0), + col286: double('col21').primaryKey().autoincrement().default(0), + col288: double('col21').primaryKey().autoincrement().default(0), + col289: double('col21').primaryKey().autoincrement().default(0), + col290: double('col21').primaryKey().autoincrement().default(0), + col291: double('col21').primaryKey().autoincrement().default(0), + col292: double('col21').primaryKey().autoincrement().default(0), + col293: double('col21').primaryKey().autoincrement().default(0), + col294: double('col21').primaryKey().autoincrement().default(0), + col295: double('col21').primaryKey().autoincrement().default(0), + col296: double('col21').primaryKey().autoincrement().default(0), + col298: double('col21').primaryKey().autoincrement().default(0), + col299: double('col21').primaryKey().autoincrement().default(0), + col300: double('col3').primaryKey().autoincrement().default(0), + col301: double('col3').primaryKey().autoincrement().default(0), + col302: double('col3').primaryKey().autoincrement().default(0), + col303: double('col3').primaryKey().autoincrement().default(0), + col304: double('col3').primaryKey().autoincrement().default(0), + col305: double('col3').primaryKey().autoincrement().default(0), + col306: double('col3').primaryKey().autoincrement().default(0), + col308: double('col3').primaryKey().autoincrement().default(0), + col309: double('col3').primaryKey().autoincrement().default(0), + col310: double('col31').primaryKey().autoincrement().default(0), + col311: double('col31').primaryKey().autoincrement().default(0), + col312: double('col31').primaryKey().autoincrement().default(0), + col313: double('col31').primaryKey().autoincrement().default(0), + col314: double('col31').primaryKey().autoincrement().default(0), + col315: double('col31').primaryKey().autoincrement().default(0), + col316: double('col31').primaryKey().autoincrement().default(0), + col318: double('col31').primaryKey().autoincrement().default(0), + col319: double('col31').primaryKey().autoincrement().default(0), + col320: double('col31').primaryKey().autoincrement().default(0), + col321: double('col31').primaryKey().autoincrement().default(0), + col322: double('col31').primaryKey().autoincrement().default(0), + col323: double('col31').primaryKey().autoincrement().default(0), + col324: double('col31').primaryKey().autoincrement().default(0), + col325: double('col31').primaryKey().autoincrement().default(0), + col326: double('col31').primaryKey().autoincrement().default(0), + col328: double('col31').primaryKey().autoincrement().default(0), + col329: double('col31').primaryKey().autoincrement().default(0), + col330: double('col31').primaryKey().autoincrement().default(0), + col331: double('col31').primaryKey().autoincrement().default(0), + col332: double('col31').primaryKey().autoincrement().default(0), + col333: double('col31').primaryKey().autoincrement().default(0), + col334: double('col31').primaryKey().autoincrement().default(0), + col335: double('col31').primaryKey().autoincrement().default(0), + col336: double('col31').primaryKey().autoincrement().default(0), + col338: double('col31').primaryKey().autoincrement().default(0), + col339: double('col31').primaryKey().autoincrement().default(0), + col340: double('col31').primaryKey().autoincrement().default(0), + col341: double('col31').primaryKey().autoincrement().default(0), + col342: double('col31').primaryKey().autoincrement().default(0), + col343: double('col31').primaryKey().autoincrement().default(0), + col344: double('col31').primaryKey().autoincrement().default(0), + col345: double('col31').primaryKey().autoincrement().default(0), + col346: double('col31').primaryKey().autoincrement().default(0), + col348: double('col31').primaryKey().autoincrement().default(0), + col349: double('col31').primaryKey().autoincrement().default(0), + col350: double('col31').primaryKey().autoincrement().default(0), + col351: double('col31').primaryKey().autoincrement().default(0), + col352: double('col31').primaryKey().autoincrement().default(0), + col353: double('col31').primaryKey().autoincrement().default(0), + col354: double('col31').primaryKey().autoincrement().default(0), + col355: double('col31').primaryKey().autoincrement().default(0), + col356: double('col31').primaryKey().autoincrement().default(0), + col358: double('col31').primaryKey().autoincrement().default(0), + col359: double('col31').primaryKey().autoincrement().default(0), + col360: double('col31').primaryKey().autoincrement().default(0), + col361: double('col31').primaryKey().autoincrement().default(0), + col362: double('col31').primaryKey().autoincrement().default(0), + col363: double('col31').primaryKey().autoincrement().default(0), + col364: double('col31').primaryKey().autoincrement().default(0), + col365: double('col31').primaryKey().autoincrement().default(0), + col366: double('col31').primaryKey().autoincrement().default(0), + col368: double('col31').primaryKey().autoincrement().default(0), + col369: double('col31').primaryKey().autoincrement().default(0), + col370: double('col31').primaryKey().autoincrement().default(0), + col371: double('col31').primaryKey().autoincrement().default(0), + col372: double('col31').primaryKey().autoincrement().default(0), + col373: double('col31').primaryKey().autoincrement().default(0), + col374: double('col31').primaryKey().autoincrement().default(0), + col375: double('col31').primaryKey().autoincrement().default(0), + col376: double('col31').primaryKey().autoincrement().default(0), + col378: double('col31').primaryKey().autoincrement().default(0), + col379: double('col31').primaryKey().autoincrement().default(0), + col380: double('col31').primaryKey().autoincrement().default(0), + col381: double('col31').primaryKey().autoincrement().default(0), + col382: double('col31').primaryKey().autoincrement().default(0), + col383: double('col31').primaryKey().autoincrement().default(0), + col384: double('col31').primaryKey().autoincrement().default(0), + col385: double('col31').primaryKey().autoincrement().default(0), + col386: double('col31').primaryKey().autoincrement().default(0), + col388: double('col31').primaryKey().autoincrement().default(0), + col389: double('col31').primaryKey().autoincrement().default(0), + col390: double('col31').primaryKey().autoincrement().default(0), + col391: double('col31').primaryKey().autoincrement().default(0), + col392: double('col31').primaryKey().autoincrement().default(0), + col393: double('col31').primaryKey().autoincrement().default(0), + col394: double('col31').primaryKey().autoincrement().default(0), + col395: double('col31').primaryKey().autoincrement().default(0), + col396: double('col31').primaryKey().autoincrement().default(0), + col398: double('col31').primaryKey().autoincrement().default(0), + col399: double('col31').primaryKey().autoincrement().default(0), + col400: double('col4').primaryKey().autoincrement().default(0), + col401: double('col4').primaryKey().autoincrement().default(0), + col402: double('col4').primaryKey().autoincrement().default(0), + col403: double('col4').primaryKey().autoincrement().default(0), + col404: double('col4').primaryKey().autoincrement().default(0), + col405: double('col4').primaryKey().autoincrement().default(0), + col406: double('col4').primaryKey().autoincrement().default(0), + col408: double('col4').primaryKey().autoincrement().default(0), + col409: double('col4').primaryKey().autoincrement().default(0), + col410: double('col41').primaryKey().autoincrement().default(0), + col411: double('col41').primaryKey().autoincrement().default(0), + col412: double('col41').primaryKey().autoincrement().default(0), + col413: double('col41').primaryKey().autoincrement().default(0), + col414: double('col41').primaryKey().autoincrement().default(0), + col415: double('col41').primaryKey().autoincrement().default(0), + col416: double('col41').primaryKey().autoincrement().default(0), + col418: double('col41').primaryKey().autoincrement().default(0), + col419: double('col41').primaryKey().autoincrement().default(0), + col420: double('col41').primaryKey().autoincrement().default(0), + col421: double('col41').primaryKey().autoincrement().default(0), + col422: double('col41').primaryKey().autoincrement().default(0), + col423: double('col41').primaryKey().autoincrement().default(0), + col424: double('col41').primaryKey().autoincrement().default(0), + col425: double('col41').primaryKey().autoincrement().default(0), + col426: double('col41').primaryKey().autoincrement().default(0), + col428: double('col41').primaryKey().autoincrement().default(0), + col429: double('col41').primaryKey().autoincrement().default(0), + col430: double('col41').primaryKey().autoincrement().default(0), + col431: double('col41').primaryKey().autoincrement().default(0), + col432: double('col41').primaryKey().autoincrement().default(0), + col433: double('col41').primaryKey().autoincrement().default(0), + col434: double('col41').primaryKey().autoincrement().default(0), + col435: double('col41').primaryKey().autoincrement().default(0), + col436: double('col41').primaryKey().autoincrement().default(0), + col438: double('col41').primaryKey().autoincrement().default(0), + col439: double('col41').primaryKey().autoincrement().default(0), + col440: double('col41').primaryKey().autoincrement().default(0), + col441: double('col41').primaryKey().autoincrement().default(0), + col442: double('col41').primaryKey().autoincrement().default(0), + col443: double('col41').primaryKey().autoincrement().default(0), + col444: double('col41').primaryKey().autoincrement().default(0), + col445: double('col41').primaryKey().autoincrement().default(0), + col446: double('col41').primaryKey().autoincrement().default(0), + col448: double('col41').primaryKey().autoincrement().default(0), + col449: double('col41').primaryKey().autoincrement().default(0), + col450: double('col41').primaryKey().autoincrement().default(0), + col451: double('col41').primaryKey().autoincrement().default(0), + col452: double('col41').primaryKey().autoincrement().default(0), + col453: double('col41').primaryKey().autoincrement().default(0), + col454: double('col41').primaryKey().autoincrement().default(0), + col455: double('col41').primaryKey().autoincrement().default(0), + col456: double('col41').primaryKey().autoincrement().default(0), + col458: double('col41').primaryKey().autoincrement().default(0), + col459: double('col41').primaryKey().autoincrement().default(0), + col460: double('col41').primaryKey().autoincrement().default(0), + col461: double('col41').primaryKey().autoincrement().default(0), + col462: double('col41').primaryKey().autoincrement().default(0), + col463: double('col41').primaryKey().autoincrement().default(0), + col464: double('col41').primaryKey().autoincrement().default(0), + col465: double('col41').primaryKey().autoincrement().default(0), + col466: double('col41').primaryKey().autoincrement().default(0), + col468: double('col41').primaryKey().autoincrement().default(0), + col469: double('col41').primaryKey().autoincrement().default(0), + col470: double('col41').primaryKey().autoincrement().default(0), + col471: double('col41').primaryKey().autoincrement().default(0), + col472: double('col41').primaryKey().autoincrement().default(0), + col473: double('col41').primaryKey().autoincrement().default(0), + col474: double('col41').primaryKey().autoincrement().default(0), + col475: double('col41').primaryKey().autoincrement().default(0), + col476: double('col41').primaryKey().autoincrement().default(0), + col478: double('col41').primaryKey().autoincrement().default(0), + col479: double('col41').primaryKey().autoincrement().default(0), + col480: double('col41').primaryKey().autoincrement().default(0), + col481: double('col41').primaryKey().autoincrement().default(0), + col482: double('col41').primaryKey().autoincrement().default(0), + col483: double('col41').primaryKey().autoincrement().default(0), + col484: double('col41').primaryKey().autoincrement().default(0), + col485: double('col41').primaryKey().autoincrement().default(0), + col486: double('col41').primaryKey().autoincrement().default(0), + col488: double('col41').primaryKey().autoincrement().default(0), + col489: double('col41').primaryKey().autoincrement().default(0), + col490: double('col41').primaryKey().autoincrement().default(0), + col491: double('col41').primaryKey().autoincrement().default(0), + col492: double('col41').primaryKey().autoincrement().default(0), + col493: double('col41').primaryKey().autoincrement().default(0), + col494: double('col41').primaryKey().autoincrement().default(0), + col495: double('col41').primaryKey().autoincrement().default(0), + col496: double('col41').primaryKey().autoincrement().default(0), + col498: double('col41').primaryKey().autoincrement().default(0), + col499: double('col41').primaryKey().autoincrement().default(0), + col500: double('col5').primaryKey().autoincrement().default(0), + col501: double('col5').primaryKey().autoincrement().default(0), + col502: double('col5').primaryKey().autoincrement().default(0), + col503: double('col5').primaryKey().autoincrement().default(0), + col504: double('col5').primaryKey().autoincrement().default(0), + col505: double('col5').primaryKey().autoincrement().default(0), + col506: double('col5').primaryKey().autoincrement().default(0), + col508: double('col5').primaryKey().autoincrement().default(0), + col509: double('col5').primaryKey().autoincrement().default(0), + col510: double('col51').primaryKey().autoincrement().default(0), + col511: double('col51').primaryKey().autoincrement().default(0), + col512: double('col51').primaryKey().autoincrement().default(0), + col513: double('col51').primaryKey().autoincrement().default(0), + col514: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col515: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col516: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col518: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col519: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col520: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col521: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col522: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col523: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col524: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col525: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col526: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col528: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col529: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col530: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col531: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col532: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col533: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col534: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col535: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col536: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col538: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col539: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col540: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col541: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col542: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col543: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col544: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col545: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col546: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col548: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col549: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col550: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col551: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col552: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col553: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col554: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col555: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col556: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col558: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col559: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col560: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col561: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col562: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col563: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col564: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col565: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col566: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col568: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col569: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col570: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col571: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col572: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col573: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col574: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col575: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col576: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col578: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col579: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col580: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col581: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col582: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col583: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col584: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col585: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col586: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col588: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col589: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col590: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col591: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col592: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col593: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col594: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col595: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col596: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col598: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col599: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), + col600: bigint('col6', { mode: 'number' }).primaryKey().autoincrement().default(0), + col601: double('col6').primaryKey().autoincrement().default(0), + col602: double('col6').primaryKey().autoincrement().default(0), + col603: double('col6').primaryKey().autoincrement().default(0), + col604: double('col6').primaryKey().autoincrement().default(0), + col605: double('col6').primaryKey().autoincrement().default(0), + col606: double('col6').primaryKey().autoincrement().default(0), + col608: double('col6').primaryKey().autoincrement().default(0), + col609: double('col6').primaryKey().autoincrement().default(0), + col610: double('col61').primaryKey().autoincrement().default(0), + col611: double('col61').primaryKey().autoincrement().default(0), + col612: double('col61').primaryKey().autoincrement().default(0), + col613: double('col61').primaryKey().autoincrement().default(0), + col614: double('col61').primaryKey().autoincrement().default(0), + col615: double('col61').primaryKey().autoincrement().default(0), + col616: double('col61').primaryKey().autoincrement().default(0), + col618: double('col61').primaryKey().autoincrement().default(0), + col619: double('col61').primaryKey().autoincrement().default(0), + col620: double('col61').primaryKey().autoincrement().default(0), + col621: double('col61').primaryKey().autoincrement().default(0), + col622: double('col61').primaryKey().autoincrement().default(0), + col623: double('col61').primaryKey().autoincrement().default(0), + col624: double('col61').primaryKey().autoincrement().default(0), + col625: double('col61').primaryKey().autoincrement().default(0), + col626: double('col61').primaryKey().autoincrement().default(0), + col628: double('col61').primaryKey().autoincrement().default(0), + col629: double('col61').primaryKey().autoincrement().default(0), + col630: double('col61').primaryKey().autoincrement().default(0), + col631: double('col61').primaryKey().autoincrement().default(0), + col632: double('col61').primaryKey().autoincrement().default(0), + col633: double('col61').primaryKey().autoincrement().default(0), + col634: double('col61').primaryKey().autoincrement().default(0), + col635: double('col61').primaryKey().autoincrement().default(0), + col636: double('col61').primaryKey().autoincrement().default(0), + col638: double('col61').primaryKey().autoincrement().default(0), + col639: double('col61').primaryKey().autoincrement().default(0), + col640: double('col61').primaryKey().autoincrement().default(0), + col641: double('col61').primaryKey().autoincrement().default(0), + col642: double('col61').primaryKey().autoincrement().default(0), + col643: double('col61').primaryKey().autoincrement().default(0), + col644: double('col61').primaryKey().autoincrement().default(0), + col645: double('col61').primaryKey().autoincrement().default(0), + col646: double('col61').primaryKey().autoincrement().default(0), + col648: double('col61').primaryKey().autoincrement().default(0), + col649: double('col61').primaryKey().autoincrement().default(0), + col650: double('col61').primaryKey().autoincrement().default(0), + col651: double('col61').primaryKey().autoincrement().default(0), + col652: double('col61').primaryKey().autoincrement().default(0), + col653: double('col61').primaryKey().autoincrement().default(0), + col654: double('col61').primaryKey().autoincrement().default(0), + col655: double('col61').primaryKey().autoincrement().default(0), + col656: double('col61').primaryKey().autoincrement().default(0), + col658: double('col61').primaryKey().autoincrement().default(0), + col659: double('col61').primaryKey().autoincrement().default(0), + col660: double('col61').primaryKey().autoincrement().default(0), + col661: double('col61').primaryKey().autoincrement().default(0), + col662: double('col61').primaryKey().autoincrement().default(0), + col663: double('col61').primaryKey().autoincrement().default(0), + col664: double('col61').primaryKey().autoincrement().default(0), + col665: double('col61').primaryKey().autoincrement().default(0), + col666: double('col61').primaryKey().autoincrement().default(0), + col668: double('col61').primaryKey().autoincrement().default(0), + col669: double('col61').primaryKey().autoincrement().default(0), + col670: double('col61').primaryKey().autoincrement().default(0), + col671: double('col61').primaryKey().autoincrement().default(0), + col672: double('col61').primaryKey().autoincrement().default(0), + col673: double('col61').primaryKey().autoincrement().default(0), + col674: double('col61').primaryKey().autoincrement().default(0), + col675: double('col61').primaryKey().autoincrement().default(0), + col676: double('col61').primaryKey().autoincrement().default(0), + col678: double('col61').primaryKey().autoincrement().default(0), + col679: double('col61').primaryKey().autoincrement().default(0), + col680: double('col61').primaryKey().autoincrement().default(0), + col681: double('col61').primaryKey().autoincrement().default(0), + col682: double('col61').primaryKey().autoincrement().default(0), + col683: double('col61').primaryKey().autoincrement().default(0), + col684: double('col61').primaryKey().autoincrement().default(0), + col685: double('col61').primaryKey().autoincrement().default(0), + col686: double('col61').primaryKey().autoincrement().default(0), + col688: double('col61').primaryKey().autoincrement().default(0), + col689: double('col61').primaryKey().autoincrement().default(0), + col690: double('col61').primaryKey().autoincrement().default(0), + col691: double('col61').primaryKey().autoincrement().default(0), + col692: double('col61').primaryKey().autoincrement().default(0), + col693: double('col61').primaryKey().autoincrement().default(0), + col694: double('col61').primaryKey().autoincrement().default(0), + col695: double('col61').primaryKey().autoincrement().default(0), + col696: double('col61').primaryKey().autoincrement().default(0), + col698: double('col61').primaryKey().autoincrement().default(0), + col699: double('col61').primaryKey().autoincrement().default(0), + col700: double('col7').primaryKey().autoincrement().default(0), + col701: double('col7').primaryKey().autoincrement().default(0), + col702: double('col7').primaryKey().autoincrement().default(0), + col703: double('col7').primaryKey().autoincrement().default(0), + col704: double('col7').primaryKey().autoincrement().default(0), + col705: double('col7').primaryKey().autoincrement().default(0), + col706: double('col7').primaryKey().autoincrement().default(0), + col708: double('col7').primaryKey().autoincrement().default(0), + col709: double('col7').primaryKey().autoincrement().default(0), + col710: double('col71').primaryKey().autoincrement().default(0), + col711: double('col71').primaryKey().autoincrement().default(0), + col712: double('col71').primaryKey().autoincrement().default(0), + col713: double('col71').primaryKey().autoincrement().default(0), + col714: double('col71').primaryKey().autoincrement().default(0), + col715: double('col71').primaryKey().autoincrement().default(0), + col716: double('col71').primaryKey().autoincrement().default(0), + col718: double('col71').primaryKey().autoincrement().default(0), + col719: double('col71').primaryKey().autoincrement().default(0), + col720: double('col71').primaryKey().autoincrement().default(0), + col721: double('col71').primaryKey().autoincrement().default(0), + col722: double('col71').primaryKey().autoincrement().default(0), + col723: double('col71').primaryKey().autoincrement().default(0), + col724: double('col71').primaryKey().autoincrement().default(0), + col725: double('col71').primaryKey().autoincrement().default(0), + col726: double('col71').primaryKey().autoincrement().default(0), + col728: double('col71').primaryKey().autoincrement().default(0), + col729: double('col71').primaryKey().autoincrement().default(0), + col730: double('col71').primaryKey().autoincrement().default(0), + col731: double('col71').primaryKey().autoincrement().default(0), + col732: double('col71').primaryKey().autoincrement().default(0), + col733: double('col71').primaryKey().autoincrement().default(0), + col734: double('col71').primaryKey().autoincrement().default(0), + col735: double('col71').primaryKey().autoincrement().default(0), + col736: double('col71').primaryKey().autoincrement().default(0), + col738: double('col71').primaryKey().autoincrement().default(0), + col739: double('col71').primaryKey().autoincrement().default(0), + col740: double('col71').primaryKey().autoincrement().default(0), + col741: double('col71').primaryKey().autoincrement().default(0), + col742: double('col71').primaryKey().autoincrement().default(0), + col743: double('col71').primaryKey().autoincrement().default(0), + col744: double('col71').primaryKey().autoincrement().default(0), + col745: double('col71').primaryKey().autoincrement().default(0), + col746: double('col71').primaryKey().autoincrement().default(0), + col748: double('col71').primaryKey().autoincrement().default(0), + col749: double('col71').primaryKey().autoincrement().default(0), + col750: double('col71').primaryKey().autoincrement().default(0), + col751: double('col71').primaryKey().autoincrement().default(0), + col752: double('col71').primaryKey().autoincrement().default(0), + col753: double('col71').primaryKey().autoincrement().default(0), + col754: double('col71').primaryKey().autoincrement().default(0), + col755: double('col71').primaryKey().autoincrement().default(0), + col756: double('col71').primaryKey().autoincrement().default(0), + col758: double('col71').primaryKey().autoincrement().default(0), + col759: double('col71').primaryKey().autoincrement().default(0), + col760: double('col71').primaryKey().autoincrement().default(0), + col761: double('col71').primaryKey().autoincrement().default(0), + col762: double('col71').primaryKey().autoincrement().default(0), + col763: double('col71').primaryKey().autoincrement().default(0), + col764: double('col71').primaryKey().autoincrement().default(0), + col765: double('col71').primaryKey().autoincrement().default(0), + col766: double('col71').primaryKey().autoincrement().default(0), + col768: double('col71').primaryKey().autoincrement().default(0), + col769: double('col71').primaryKey().autoincrement().default(0), + col770: double('col71').primaryKey().autoincrement().default(0), + col771: double('col71').primaryKey().autoincrement().default(0), + col772: double('col71').primaryKey().autoincrement().default(0), + col773: double('col71').primaryKey().autoincrement().default(0), + col774: double('col71').primaryKey().autoincrement().default(0), + col775: double('col71').primaryKey().autoincrement().default(0), + col776: double('col71').primaryKey().autoincrement().default(0), + col778: double('col71').primaryKey().autoincrement().default(0), + col779: double('col71').primaryKey().autoincrement().default(0), + col780: double('col71').primaryKey().autoincrement().default(0), + col781: double('col71').primaryKey().autoincrement().default(0), + col782: double('col71').primaryKey().autoincrement().default(0), + col783: double('col71').primaryKey().autoincrement().default(0), + col784: double('col71').primaryKey().autoincrement().default(0), + col785: double('col71').primaryKey().autoincrement().default(0), + col786: double('col71').primaryKey().autoincrement().default(0), + col788: double('col71').primaryKey().autoincrement().default(0), + col789: double('col71').primaryKey().autoincrement().default(0), + col790: double('col71').primaryKey().autoincrement().default(0), + col791: double('col71').primaryKey().autoincrement().default(0), + col792: double('col71').primaryKey().autoincrement().default(0), + col793: double('col71').primaryKey().autoincrement().default(0), + col794: double('col71').primaryKey().autoincrement().default(0), + col795: double('col71').primaryKey().autoincrement().default(0), + col796: double('col71').primaryKey().autoincrement().default(0), + col798: double('col71').primaryKey().autoincrement().default(0), + col799: double('col71').primaryKey().autoincrement().default(0), + col800: double('col8').primaryKey().autoincrement().default(0), + col801: double('col8').primaryKey().autoincrement().default(0), + col802: double('col8').primaryKey().autoincrement().default(0), + col803: double('col8').primaryKey().autoincrement().default(0), + col804: double('col8').primaryKey().autoincrement().default(0), + col805: double('col8').primaryKey().autoincrement().default(0), + col806: double('col8').primaryKey().autoincrement().default(0), + col808: double('col8').primaryKey().autoincrement().default(0), + col809: double('col8').primaryKey().autoincrement().default(0), + col810: double('col81').primaryKey().autoincrement().default(0), + col811: double('col81').primaryKey().autoincrement().default(0), + col812: double('col81').primaryKey().autoincrement().default(0), + col813: double('col81').primaryKey().autoincrement().default(0), + col814: double('col81').primaryKey().autoincrement().default(0), + col815: double('col81').primaryKey().autoincrement().default(0), + col816: double('col81').primaryKey().autoincrement().default(0), + col818: double('col81').primaryKey().autoincrement().default(0), + col819: double('col81').primaryKey().autoincrement().default(0), + col820: double('col81').primaryKey().autoincrement().default(0), + col821: double('col81').primaryKey().autoincrement().default(0), + col822: double('col81').primaryKey().autoincrement().default(0), + col823: double('col81').primaryKey().autoincrement().default(0), + col824: double('col81').primaryKey().autoincrement().default(0), + col825: double('col81').primaryKey().autoincrement().default(0), + col826: double('col81').primaryKey().autoincrement().default(0), + col828: double('col81').primaryKey().autoincrement().default(0), + col829: double('col81').primaryKey().autoincrement().default(0), + col830: double('col81').primaryKey().autoincrement().default(0), + col831: double('col81').primaryKey().autoincrement().default(0), + col832: double('col81').primaryKey().autoincrement().default(0), + col833: double('col81').primaryKey().autoincrement().default(0), + col834: double('col81').primaryKey().autoincrement().default(0), + col835: double('col81').primaryKey().autoincrement().default(0), + col836: double('col81').primaryKey().autoincrement().default(0), + col838: double('col81').primaryKey().autoincrement().default(0), + col839: double('col81').primaryKey().autoincrement().default(0), + col840: double('col81').primaryKey().autoincrement().default(0), + col841: double('col81').primaryKey().autoincrement().default(0), + col842: double('col81').primaryKey().autoincrement().default(0), + col843: double('col81').primaryKey().autoincrement().default(0), + col844: double('col81').primaryKey().autoincrement().default(0), + col845: double('col81').primaryKey().autoincrement().default(0), + col846: double('col81').primaryKey().autoincrement().default(0), + col848: double('col81').primaryKey().autoincrement().default(0), + col849: double('col81').primaryKey().autoincrement().default(0), + col850: double('col81').primaryKey().autoincrement().default(0), + col851: double('col81').primaryKey().autoincrement().default(0), + col852: double('col81').primaryKey().autoincrement().default(0), + col853: double('col81').primaryKey().autoincrement().default(0), + col854: double('col81').primaryKey().autoincrement().default(0), + col855: double('col81').primaryKey().autoincrement().default(0), + col856: double('col81').primaryKey().autoincrement().default(0), + col858: double('col81').primaryKey().autoincrement().default(0), + col859: double('col81').primaryKey().autoincrement().default(0), + col860: double('col81').primaryKey().autoincrement().default(0), + col861: double('col81').primaryKey().autoincrement().default(0), + col862: double('col81').primaryKey().autoincrement().default(0), + col863: double('col81').primaryKey().autoincrement().default(0), + col864: double('col81').primaryKey().autoincrement().default(0), + col865: double('col81').primaryKey().autoincrement().default(0), + col866: double('col81').primaryKey().autoincrement().default(0), + col868: double('col81').primaryKey().autoincrement().default(0), + col869: double('col81').primaryKey().autoincrement().default(0), + col870: double('col81').primaryKey().autoincrement().default(0), + col871: double('col81').primaryKey().autoincrement().default(0), + col872: double('col81').primaryKey().autoincrement().default(0), + col873: double('col81').primaryKey().autoincrement().default(0), + col874: double('col81').primaryKey().autoincrement().default(0), + col875: double('col81').primaryKey().autoincrement().default(0), + col876: double('col81').primaryKey().autoincrement().default(0), + col878: double('col81').primaryKey().autoincrement().default(0), + col879: double('col81').primaryKey().autoincrement().default(0), + col880: double('col81').primaryKey().autoincrement().default(0), + col881: double('col81').primaryKey().autoincrement().default(0), + col882: double('col81').primaryKey().autoincrement().default(0), + col883: double('col81').primaryKey().autoincrement().default(0), + col884: double('col81').primaryKey().autoincrement().default(0), + col885: double('col81').primaryKey().autoincrement().default(0), + col886: double('col81').primaryKey().autoincrement().default(0), + col888: double('col81').primaryKey().autoincrement().default(0), + col889: double('col81').primaryKey().autoincrement().default(0), + col890: double('col81').primaryKey().autoincrement().default(0), + col891: double('col81').primaryKey().autoincrement().default(0), + col892: double('col81').primaryKey().autoincrement().default(0), + col893: double('col81').primaryKey().autoincrement().default(0), + col894: double('col81').primaryKey().autoincrement().default(0), + col895: double('col81').primaryKey().autoincrement().default(0), + col896: double('col81').primaryKey().autoincrement().default(0), + col898: double('col81').primaryKey().autoincrement().default(0), + col899: double('col81').primaryKey().autoincrement().default(0), + col900: double('col9').primaryKey().autoincrement().default(0), + col901: double('col9').primaryKey().autoincrement().default(0), + col902: double('col9').primaryKey().autoincrement().default(0), + col903: double('col9').primaryKey().autoincrement().default(0), + col904: double('col9').primaryKey().autoincrement().default(0), + col905: double('col9').primaryKey().autoincrement().default(0), + col906: double('col9').primaryKey().autoincrement().default(0), + col908: double('col9').primaryKey().autoincrement().default(0), + col909: double('col9').primaryKey().autoincrement().default(0), + col910: double('col91').primaryKey().autoincrement().default(0), + col911: double('col91').primaryKey().autoincrement().default(0), + col912: double('col91').primaryKey().autoincrement().default(0), + col913: double('col91').primaryKey().autoincrement().default(0), + col914: double('col91').primaryKey().autoincrement().default(0), + col915: double('col91').primaryKey().autoincrement().default(0), + col916: double('col91').primaryKey().autoincrement().default(0), + col918: double('col91').primaryKey().autoincrement().default(0), + col919: double('col91').primaryKey().autoincrement().default(0), + col920: double('col91').primaryKey().autoincrement().default(0), + col921: double('col91').primaryKey().autoincrement().default(0), + col922: double('col91').primaryKey().autoincrement().default(0), + col923: double('col91').primaryKey().autoincrement().default(0), + col924: double('col91').primaryKey().autoincrement().default(0), + col925: double('col91').primaryKey().autoincrement().default(0), + col926: double('col91').primaryKey().autoincrement().default(0), + col928: double('col91').primaryKey().autoincrement().default(0), + col929: double('col91').primaryKey().autoincrement().default(0), + col930: double('col91').primaryKey().autoincrement().default(0), + col931: double('col91').primaryKey().autoincrement().default(0), + col932: double('col91').primaryKey().autoincrement().default(0), + col933: double('col91').primaryKey().autoincrement().default(0), + col934: double('col91').primaryKey().autoincrement().default(0), + col935: double('col91').primaryKey().autoincrement().default(0), + col936: double('col91').primaryKey().autoincrement().default(0), + col938: double('col91').primaryKey().autoincrement().default(0), + col939: double('col91').primaryKey().autoincrement().default(0), + col940: double('col91').primaryKey().autoincrement().default(0), + col941: double('col91').primaryKey().autoincrement().default(0), + col942: double('col91').primaryKey().autoincrement().default(0), + col943: double('col91').primaryKey().autoincrement().default(0), + col944: varchar('col91', { length: 200 }).primaryKey().default('0'), + col945: varchar('col91', { length: 200 }).primaryKey().default('0'), + col946: varchar('col91', { length: 200 }).primaryKey().default('0'), + col948: varchar('col91', { length: 200 }).primaryKey().default('0'), + col949: varchar('col91', { length: 200 }).primaryKey().default('0'), + col950: varchar('col91', { length: 200 }).primaryKey().default('0'), + col951: varchar('col91', { length: 200 }).primaryKey().default('0'), + col952: varchar('col91', { length: 200 }).primaryKey().default('0'), + col953: varchar('col91', { length: 200 }).primaryKey().default('0'), + col954: varchar('col91', { length: 200 }).primaryKey().default('0'), + col955: varchar('col91', { length: 200 }).primaryKey().default('0'), + col956: varchar('col91', { length: 200 }).primaryKey().default('0'), + col958: varchar('col91', { length: 200 }).primaryKey().default('0'), + col959: varchar('col91', { length: 200 }).primaryKey().default('0'), + col960: varchar('col91', { length: 200 }).primaryKey().default('0'), + col961: varchar('col91', { length: 200 }).primaryKey().default('0'), + col962: varchar('col91', { length: 200 }).primaryKey().default('0'), + col963: varchar('col91', { length: 200 }).primaryKey().default('0'), + col964: varchar('col91', { length: 200 }).primaryKey().default('0'), + col965: varchar('col91', { length: 200 }).primaryKey().default('0'), + col966: varchar('col91', { length: 200 }).primaryKey().default('0'), + col968: varchar('col91', { length: 200 }).primaryKey().default('0'), + col969: varchar('col91', { length: 200 }).primaryKey().default('0'), + col970: varchar('col91', { length: 200 }).primaryKey().default('0'), + col971: varchar('col91', { length: 200 }).primaryKey().default('0'), + col972: varchar('col91', { length: 200 }).primaryKey().default('0'), + col973: varchar('col91', { length: 200 }).primaryKey().default('0'), + col974: varchar('col91', { length: 200 }).primaryKey().default('0'), + col975: varchar('col91', { length: 200 }).primaryKey().default('0'), + col976: varchar('col91', { length: 200 }).primaryKey().default('0'), + col978: varchar('col91', { length: 200 }).primaryKey().default('0'), + col979: varchar('col91', { length: 200 }).primaryKey().default('0'), + col980: varchar('col91', { length: 200 }).primaryKey().default('0'), + col981: varchar('col91', { length: 200 }).primaryKey().default('0'), + col982: varchar('col91', { length: 200 }).primaryKey().default('0'), + col983: varchar('col91', { length: 200 }).primaryKey().default('0'), + col984: varchar('col91', { length: 200 }).primaryKey().default('0'), + col985: varchar('col91', { length: 200 }).primaryKey().default('0'), + col986: varchar('col91', { length: 200 }).primaryKey().default('0'), + col988: varchar('col91', { length: 200 }).primaryKey().default('0'), + col989: varchar('col91', { length: 200 }).primaryKey().default('0'), + col990: varchar('col91', { length: 200 }).primaryKey().default('0'), + col991: varchar('col91', { length: 200 }).primaryKey().default('0'), + col992: varchar('col91', { length: 200 }).primaryKey().default('0'), + col993: varchar('col91', { length: 200 }).primaryKey().default('0'), + col994: varchar('col91', { length: 200 }).primaryKey().default('0'), + col995: varchar('col91', { length: 200 }).primaryKey().default('0'), + col996: varchar('col91', { length: 200 }).primaryKey().default('0'), + col998: varchar('col91', { length: 200 }).primaryKey().default('0'), + col999: varchar('col91', { length: 200 }).primaryKey().default('0'), +}); diff --git a/drizzle-orm/type-tests/singlestore/db.ts b/drizzle-orm/type-tests/singlestore/db.ts new file mode 100644 index 000000000..f9bc6ff5f --- /dev/null +++ b/drizzle-orm/type-tests/singlestore/db.ts @@ -0,0 +1,12 @@ +import { createPool } from 'mysql2/promise'; +import { drizzle } from '~/singlestore/index.ts'; + +const pool = createPool({}); + +export const db = drizzle(pool); + +{ + drizzle(pool); + drizzle(pool, {}); + drizzle(pool, { schema: {} }); +} diff --git a/drizzle-orm/type-tests/singlestore/delete.ts b/drizzle-orm/type-tests/singlestore/delete.ts new file mode 100644 index 000000000..0fce8882e --- /dev/null +++ b/drizzle-orm/type-tests/singlestore/delete.ts @@ -0,0 +1,61 @@ +import type { Equal } from 'type-tests/utils.ts'; +import { Expect } from 'type-tests/utils.ts'; +import { eq } from '~/expressions.ts'; +import type { SingleStoreDelete } from '~/singlestore-core/index.ts'; +import type { SingleStoreRawQueryResult } from '~/singlestore/index.ts'; +import { sql } from '~/sql/sql.ts'; +import { db } from './db.ts'; +import { users } from './tables.ts'; + +const deleteAll = await db.delete(users); +Expect>; + +const deleteAllStmt = db.delete(users).prepare(); +const deleteAllPrepared = await deleteAllStmt.execute(); +Expect>; + +const deleteWhere = await db.delete(users).where(eq(users.id, 1)); +Expect>; + +const deleteWhereStmt = db.delete(users).where(eq(users.id, 1)).prepare(); +const deleteWherePrepared = await deleteWhereStmt.execute(); +Expect>; + +const deleteReturningAll = await db.delete(users); +Expect>; + +const deleteReturningAllStmt = db.delete(users).prepare(); +const deleteReturningAllPrepared = await deleteReturningAllStmt.execute(); +Expect>; + +const deleteReturningPartial = await db.delete(users); +Expect>; + +const deleteReturningPartialStmt = db.delete(users).prepare(); +const deleteReturningPartialPrepared = await deleteReturningPartialStmt.execute(); +Expect>; + +{ + function dynamic(qb: T) { + return qb.where(sql``); + } + + const qbBase = db.delete(users).$dynamic(); + const qb = dynamic(qbBase); + const result = await qb; + Expect>; +} + +{ + db + .delete(users) + .where(sql``) + // @ts-expect-error method was already called + .where(sql``); + + db + .delete(users) + .$dynamic() + .where(sql``) + .where(sql``); +} diff --git a/drizzle-orm/type-tests/singlestore/generated-columns.ts b/drizzle-orm/type-tests/singlestore/generated-columns.ts new file mode 100644 index 000000000..e5b17a9b1 --- /dev/null +++ b/drizzle-orm/type-tests/singlestore/generated-columns.ts @@ -0,0 +1,158 @@ +import { type Equal, Expect } from 'type-tests/utils'; +import { type InferInsertModel, type InferSelectModel, sql } from '~/index'; +import { drizzle } from '~/singlestore'; +import { serial, singlestoreTable, text, varchar } from '~/singlestore-core'; +import { db } from './db'; + +const users = singlestoreTable( + 'users', + { + id: serial('id').primaryKey(), + firstName: varchar('first_name', { length: 255 }), + lastName: varchar('last_name', { length: 255 }), + email: text('email').notNull(), + fullName: text('full_name').generatedAlwaysAs(sql`concat_ws(first_name, ' ', last_name)`), + upperName: text('upper_name').generatedAlwaysAs( + sql` case when first_name is null then null else upper(first_name) end `, + ).$type(), // There is no way for drizzle to detect nullability in these cases. This is how the user can work around it + }, +); +{ + type User = typeof users.$inferSelect; + type NewUser = typeof users.$inferInsert; + + Expect< + Equal< + { + id: number; + firstName: string | null; + lastName: string | null; + email: string; + fullName: string | null; + upperName: string | null; + }, + User + > + >(); + + Expect< + Equal< + { + email: string; + id?: number | undefined; + firstName?: string | null | undefined; + lastName?: string | null | undefined; + }, + NewUser + > + >(); +} + +{ + type User = InferSelectModel; + type NewUser = InferInsertModel; + + Expect< + Equal< + { + id: number; + firstName: string | null; + lastName: string | null; + email: string; + fullName: string | null; + upperName: string | null; + }, + User + > + >(); + + Expect< + Equal< + { + email: string; + id?: number | undefined; + firstName?: string | null | undefined; + lastName?: string | null | undefined; + }, + NewUser + > + >(); +} + +{ + const dbUsers = await db.select().from(users); + + Expect< + Equal< + { + id: number; + firstName: string | null; + lastName: string | null; + email: string; + fullName: string | null; + upperName: string | null; + }[], + typeof dbUsers + > + >(); +} + +{ + const db = drizzle({} as any, { schema: { users } }); + + const dbUser = await db.query.users.findFirst(); + + Expect< + Equal< + { + id: number; + firstName: string | null; + lastName: string | null; + email: string; + fullName: string | null; + upperName: string | null; + } | undefined, + typeof dbUser + > + >(); +} + +{ + const db = drizzle({} as any, { schema: { users } }); + + const dbUser = await db.query.users.findMany(); + + Expect< + Equal< + { + id: number; + firstName: string | null; + lastName: string | null; + email: string; + fullName: string | null; + upperName: string | null; + }[], + typeof dbUser + > + >(); +} + +{ + // @ts-expect-error - Can't use the fullName because it's a generated column + await db.insert(users).values({ + firstName: 'test', + lastName: 'test', + email: 'test', + fullName: 'test', + }); +} + +{ + await db.update(users).set({ + firstName: 'test', + lastName: 'test', + email: 'test', + // @ts-expect-error - Can't use the fullName because it's a generated column + fullName: 'test', + }); +} diff --git a/drizzle-orm/type-tests/singlestore/insert.ts b/drizzle-orm/type-tests/singlestore/insert.ts new file mode 100644 index 000000000..738bf669d --- /dev/null +++ b/drizzle-orm/type-tests/singlestore/insert.ts @@ -0,0 +1,135 @@ +import type { Equal } from 'type-tests/utils.ts'; +import { Expect } from 'type-tests/utils.ts'; +import { int, singlestoreTable, text } from '~/singlestore-core/index.ts'; +import type { SingleStoreInsert } from '~/singlestore-core/index.ts'; +import type { SingleStoreRawQueryResult } from '~/singlestore/index.ts'; +import { sql } from '~/sql/sql.ts'; +import { db } from './db.ts'; +import { users } from './tables.ts'; + +const singlestoreInsertReturning = await db.insert(users).values({ + // ^? + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}).$returningId(); + +Expect>; + +const insert = await db.insert(users).values({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}); +Expect>; + +const insertStmt = db.insert(users).values({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}).prepare(); +const insertPrepared = await insertStmt.execute(); +Expect>; + +const insertSql = await db.insert(users).values({ + homeCity: sql`123`, + class: 'A', + age1: 1, + enumCol: sql`foobar`, +}); +Expect>; + +const insertSqlStmt = db.insert(users).values({ + homeCity: sql`123`, + class: 'A', + age1: 1, + enumCol: sql`foobar`, +}).prepare(); +const insertSqlPrepared = await insertSqlStmt.execute(); +Expect>; + +const insertReturning = await db.insert(users).values({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}); +Expect>; + +const insertReturningStmt = db.insert(users).values({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}).prepare(); +const insertReturningPrepared = await insertReturningStmt.execute(); +Expect>; + +const insertReturningPartial = await db.insert(users).values({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}); +Expect>; + +const insertReturningPartialStmt = db.insert(users).values({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}).prepare(); +const insertReturningPartialPrepared = await insertReturningPartialStmt.execute(); +Expect>; + +const insertReturningSql = await db.insert(users).values({ + homeCity: 1, + class: 'A', + age1: sql`2 + 2`, + enumCol: 'a', +}); +Expect>; + +const insertReturningSqlStmt = db.insert(users).values({ + homeCity: 1, + class: 'A', + age1: sql`2 + 2`, + enumCol: 'a', +}).prepare(); +const insertReturningSqlPrepared = await insertReturningSqlStmt.execute(); +Expect>; + +{ + const users = singlestoreTable('users', { + id: int('id').autoincrement().primaryKey(), + name: text('name').notNull(), + age: int('age'), + occupation: text('occupation'), + }); + + await db.insert(users).values({ name: 'John Wick', age: 58, occupation: 'housekeeper' }); +} + +{ + function dynamic(qb: T) { + return qb.onDuplicateKeyUpdate({ set: {} }); + } + + const qbBase = db.insert(users).values({ age1: 0, class: 'A', enumCol: 'a', homeCity: 0 }).$dynamic(); + const qb = dynamic(qbBase); + const result = await qb; + + Expect>; +} + +{ + db + .insert(users) + .values({ age1: 0, class: 'A', enumCol: 'a', homeCity: 0 }) + .onDuplicateKeyUpdate({ set: {} }) + // @ts-expect-error method was already called + .onDuplicateKeyUpdate({ set: {} }); +} diff --git a/drizzle-orm/type-tests/singlestore/select.ts b/drizzle-orm/type-tests/singlestore/select.ts new file mode 100644 index 000000000..10a7551a7 --- /dev/null +++ b/drizzle-orm/type-tests/singlestore/select.ts @@ -0,0 +1,606 @@ +import { + and, + between, + eq, + exists, + gt, + gte, + ilike, + inArray, + isNotNull, + isNull, + like, + lt, + lte, + ne, + not, + notBetween, + notExists, + notIlike, + notInArray, + notLike, + or, +} from '~/expressions.ts'; +import { alias } from '~/singlestore-core/alias.ts'; +import { param, sql } from '~/sql/sql.ts'; + +import type { Equal } from 'type-tests/utils.ts'; +import { Expect } from 'type-tests/utils.ts'; +import { QueryBuilder, type SingleStoreSelect, type SingleStoreSelectQueryBuilder } from '~/singlestore-core/index.ts'; +import { db } from './db.ts'; +import { cities, classes, newYorkers, users } from './tables.ts'; + +const city = alias(cities, 'city'); +const city1 = alias(cities, 'city1'); + +const join = await db + .select({ + users, + cities, + city, + city1: { + id: city1.id, + }, + }) + .from(users) + .leftJoin(cities, eq(users.id, cities.id)) + .rightJoin(city, eq(city.id, users.id)) + .rightJoin(city1, eq(city1.id, users.id)); + +Expect< + Equal< + { + users: { + id: number; + text: string | null; + homeCity: number; + currentCity: number | null; + serialNullable: number; + serialNotNull: number; + class: 'A' | 'C'; + subClass: 'B' | 'D' | null; + age1: number; + createdAt: Date; + enumCol: 'a' | 'b' | 'c'; + } | null; + cities: { + id: number; + name: string; + population: number | null; + } | null; + city: { + id: number; + name: string; + population: number | null; + } | null; + city1: { + id: number; + }; + }[], + typeof join + > +>; + +const join2 = await db + .select({ + userId: users.id, + cityId: cities.id, + }) + .from(users) + .fullJoin(cities, eq(users.id, cities.id)); + +Expect< + Equal< + { + userId: number | null; + cityId: number | null; + }[], + typeof join2 + > +>; + +const join3 = await db + .select({ + userId: users.id, + cityId: cities.id, + classId: classes.id, + }) + .from(users) + .fullJoin(cities, eq(users.id, cities.id)) + .rightJoin(classes, eq(users.id, classes.id)); + +Expect< + Equal< + { + userId: number | null; + cityId: number | null; + classId: number; + }[], + typeof join3 + > +>; + +db + .select() + .from(users) + .where(exists(db.select().from(cities).where(eq(users.homeCity, cities.id)))); + +function mapFunkyFuncResult(valueFromDriver: unknown) { + return { + foo: (valueFromDriver as Record)['foo'], + }; +} + +const age = 1; + +const allOperators = await db + .select({ + col2: sql`5 - ${users.id} + 1`, // unknown + col3: sql`${users.id} + 1`, // number + col33: sql`${users.id} + 1`.mapWith(users.id), // number + col34: sql`${users.id} + 1`.mapWith(mapFunkyFuncResult), // number + col4: sql`one_or_another(${users.id}, ${users.class})`, // string | number + col5: sql`true`, // unknown + col6: sql`true`, // boolean + col7: sql`random()`, // number + col8: sql`some_funky_func(${users.id})`.mapWith(mapFunkyFuncResult), // { foo: string } + col9: sql`greatest(${users.createdAt}, ${param(new Date(), users.createdAt)})`, // unknown + col10: sql`date_or_false(${users.createdAt}, ${param(new Date(), users.createdAt)})`, // Date | boolean + col11: sql`${users.age1} + ${age}`, // unknown + col12: sql`${users.age1} + ${param(age, users.age1)}`, // unknown + col13: sql`lower(${users.class})`, // unknown + col14: sql`length(${users.class})`, // number + count: sql`count(*)::int`, // number + }) + .from(users) + .where(and( + eq(users.id, 1), + ne(users.id, 1), + or(eq(users.id, 1), ne(users.id, 1)), + not(eq(users.id, 1)), + gt(users.id, 1), + gte(users.id, 1), + lt(users.id, 1), + lte(users.id, 1), + inArray(users.id, [1, 2, 3]), + inArray(users.id, db.select({ id: users.id }).from(users)), + inArray(users.id, sql`select id from ${users}`), + notInArray(users.id, [1, 2, 3]), + notInArray(users.id, db.select({ id: users.id }).from(users)), + notInArray(users.id, sql`select id from ${users}`), + isNull(users.subClass), + isNotNull(users.id), + exists(db.select({ id: users.id }).from(users)), + exists(sql`select id from ${users}`), + notExists(db.select({ id: users.id }).from(users)), + notExists(sql`select id from ${users}`), + between(users.id, 1, 2), + notBetween(users.id, 1, 2), + like(users.id, '%1%'), + notLike(users.id, '%1%'), + ilike(users.id, '%1%'), + notIlike(users.id, '%1%'), + )); + +Expect< + Equal<{ + col2: unknown; + col3: number; + col33: number; + col34: { foo: any }; + col4: string | number; + col5: unknown; + col6: boolean; + col7: number; + col8: { + foo: any; + }; + col9: unknown; + col10: boolean | Date; + col11: unknown; + col12: unknown; + col13: unknown; + col14: number; + count: number; + }[], typeof allOperators> +>; + +const textSelect = await db + .select({ + t: users.text, + }) + .from(users); + +Expect>; + +const homeCity = alias(cities, 'homeCity'); +const c = alias(classes, 'c'); +const otherClass = alias(classes, 'otherClass'); +const anotherClass = alias(classes, 'anotherClass'); +const friend = alias(users, 'friend'); +const currentCity = alias(cities, 'currentCity'); +const subscriber = alias(users, 'subscriber'); +const closestCity = alias(cities, 'closestCity'); + +const megaJoin = await db + .select({ + user: { + id: users.id, + maxAge: sql`max(${users.age1})`, + }, + city: { + id: cities.id, + }, + homeCity, + c, + otherClass, + anotherClass, + friend, + currentCity, + subscriber, + closestCity, + }) + .from(users) + .innerJoin(cities, sql`${users.id} = ${cities.id}`) + .innerJoin(homeCity, sql`${users.homeCity} = ${homeCity.id}`) + .innerJoin(c, eq(c.id, users.class)) + .innerJoin(otherClass, sql`${c.id} = ${otherClass.id}`) + .innerJoin(anotherClass, sql`${users.class} = ${anotherClass.id}`) + .innerJoin(friend, sql`${users.id} = ${friend.id}`) + .innerJoin(currentCity, sql`${homeCity.id} = ${currentCity.id}`) + .innerJoin(subscriber, sql`${users.class} = ${subscriber.id}`) + .innerJoin(closestCity, sql`${users.currentCity} = ${closestCity.id}`) + .where(and(sql`${users.age1} > 0`, eq(cities.id, 1))) + .limit(1) + .offset(1); + +Expect< + Equal< + { + user: { + id: number; + maxAge: unknown; + }; + city: { + id: number; + }; + homeCity: { + id: number; + name: string; + population: number | null; + }; + c: { + id: number; + class: 'A' | 'C' | null; + subClass: 'B' | 'D'; + }; + otherClass: { + id: number; + class: 'A' | 'C' | null; + subClass: 'B' | 'D'; + }; + anotherClass: { + id: number; + class: 'A' | 'C' | null; + subClass: 'B' | 'D'; + }; + friend: { + id: number; + homeCity: number; + currentCity: number | null; + serialNullable: number; + serialNotNull: number; + class: 'A' | 'C'; + subClass: 'B' | 'D' | null; + text: string | null; + age1: number; + createdAt: Date; + enumCol: 'a' | 'b' | 'c'; + }; + currentCity: { + id: number; + name: string; + population: number | null; + }; + subscriber: { + id: number; + homeCity: number; + currentCity: number | null; + serialNullable: number; + serialNotNull: number; + class: 'A' | 'C'; + subClass: 'B' | 'D' | null; + text: string | null; + age1: number; + createdAt: Date; + enumCol: 'a' | 'b' | 'c'; + }; + closestCity: { + id: number; + name: string; + population: number | null; + }; + }[], + typeof megaJoin + > +>; + +const friends = alias(users, 'friends'); + +const join4 = await db + .select({ + user: { + id: users.id, + }, + city: { + id: cities.id, + }, + class: classes, + friend: friends, + }) + .from(users) + .innerJoin(cities, sql`${users.id} = ${cities.id}`) + .innerJoin(classes, sql`${cities.id} = ${classes.id}`) + .innerJoin(friends, sql`${friends.id} = ${users.id}`) + .where(sql`${users.age1} > 0`); + +Expect< + Equal<{ + user: { + id: number; + }; + city: { + id: number; + }; + class: { + id: number; + class: 'A' | 'C' | null; + subClass: 'B' | 'D'; + }; + friend: { + id: number; + homeCity: number; + currentCity: number | null; + serialNullable: number; + serialNotNull: number; + class: 'A' | 'C'; + subClass: 'B' | 'D' | null; + text: string | null; + age1: number; + createdAt: Date; + enumCol: 'a' | 'b' | 'c'; + }; + }[], typeof join4> +>; + +{ + const authenticated = false as boolean; + + const result = await db + .select({ + id: users.id, + ...(authenticated ? { city: users.homeCity } : {}), + }) + .from(users); + + Expect< + Equal< + { + id: number; + city?: number; + }[], + typeof result + > + >; +} + +await db.select().from(users).for('update'); +await db.select().from(users).for('share', { skipLocked: true }); +await db.select().from(users).for('update', { noWait: true }); +await db + .select() + .from(users) + // @ts-expect-error - can't use both skipLocked and noWait + .for('share', { noWait: true, skipLocked: true }); + +{ + const result = await db.select().from(newYorkers); + Expect< + Equal< + { + userId: number; + cityId: number | null; + }[], + typeof result + > + >; +} + +{ + const result = await db.select({ userId: newYorkers.userId }).from(newYorkers); + Expect< + Equal< + { + userId: number; + }[], + typeof result + > + >; +} + +{ + const query = db.select().from(users).prepare().iterator(); + for await (const row of query) { + Expect>(); + } +} + +{ + db + .select() + .from(users) + .where(eq(users.id, 1)); + + db + .select() + .from(users) + .where(eq(users.id, 1)) + // @ts-expect-error - can't use where twice + .where(eq(users.id, 1)); + + db + .select() + .from(users) + .where(eq(users.id, 1)) + .limit(10) + // @ts-expect-error - can't use where twice + .where(eq(users.id, 1)); +} + +{ + function withFriends(qb: T) { + const friends = alias(users, 'friends'); + const friends2 = alias(users, 'friends2'); + const friends3 = alias(users, 'friends3'); + const friends4 = alias(users, 'friends4'); + const friends5 = alias(users, 'friends5'); + return qb + .leftJoin(friends, sql`true`) + .leftJoin(friends2, sql`true`) + .leftJoin(friends3, sql`true`) + .leftJoin(friends4, sql`true`) + .leftJoin(friends5, sql`true`); + } + + const qb = db.select().from(users).$dynamic(); + const result = await withFriends(qb); + Expect< + Equal + >; +} + +{ + function withFriends(qb: T) { + const friends = alias(users, 'friends'); + const friends2 = alias(users, 'friends2'); + const friends3 = alias(users, 'friends3'); + const friends4 = alias(users, 'friends4'); + const friends5 = alias(users, 'friends5'); + return qb + .leftJoin(friends, sql`true`) + .leftJoin(friends2, sql`true`) + .leftJoin(friends3, sql`true`) + .leftJoin(friends4, sql`true`) + .leftJoin(friends5, sql`true`); + } + + const qb = db.select().from(users).$dynamic(); + const result = await withFriends(qb); + Expect< + Equal + >; +} + +{ + function dynamic(qb: T) { + return qb.where(sql``).having(sql``).groupBy(sql``).orderBy(sql``).limit(1).offset(1).for('update'); + } + + const qb = db.select().from(users).$dynamic(); + const result = await dynamic(qb); + Expect>; +} + +{ + // TODO: add to docs + function dynamic(qb: T) { + return qb.where(sql``).having(sql``).groupBy(sql``).orderBy(sql``).limit(1).offset(1).for('update'); + } + + const query = new QueryBuilder().select().from(users).$dynamic(); + dynamic(query); +} + +{ + // TODO: add to docs + function paginated(qb: T, page: number) { + return qb.limit(10).offset((page - 1) * 10); + } + + const qb = db.select().from(users).$dynamic(); + const result = await paginated(qb, 1); + + Expect>; +} + +{ + db + .select() + .from(users) + .where(sql``) + .limit(10) + // @ts-expect-error method was already called + .where(sql``); + + db + .select() + .from(users) + .having(sql``) + .limit(10) + // @ts-expect-error method was already called + .having(sql``); + + db + .select() + .from(users) + .groupBy(sql``) + .limit(10) + // @ts-expect-error method was already called + .groupBy(sql``); + + db + .select() + .from(users) + .orderBy(sql``) + .limit(10) + // @ts-expect-error method was already called + .orderBy(sql``); + + db + .select() + .from(users) + .limit(10) + .where(sql``) + // @ts-expect-error method was already called + .limit(10); + + db + .select() + .from(users) + .offset(10) + .limit(10) + // @ts-expect-error method was already called + .offset(10); + + db + .select() + .from(users) + .for('update') + .limit(10) + // @ts-expect-error method was already called + .for('update'); +} diff --git a/drizzle-orm/type-tests/singlestore/set-operators.ts b/drizzle-orm/type-tests/singlestore/set-operators.ts new file mode 100644 index 000000000..aa4f21b9c --- /dev/null +++ b/drizzle-orm/type-tests/singlestore/set-operators.ts @@ -0,0 +1,223 @@ +import { type Equal, Expect } from 'type-tests/utils.ts'; +import { eq } from '~/expressions.ts'; +import { except, intersect, type SingleStoreSetOperator, union, unionAll } from '~/singlestore-core/index.ts'; +import { sql } from '~/sql/index.ts'; +import { db } from './db.ts'; +import { cities, classes, newYorkers, users } from './tables.ts'; + +const unionTest = await db + .select({ id: users.id }) + .from(users) + .union( + db + .select({ id: users.id }) + .from(users), + ); + +Expect>; + +const unionAllTest = await db + .select({ id: users.id, age: users.age1 }) + .from(users) + .unionAll( + db.select({ id: users.id, age: users.age1 }) + .from(users) + .leftJoin(cities, eq(users.id, cities.id)), + ); + +Expect>; + +const intersectTest = await db + .select({ id: users.id, homeCity: users.homeCity }) + .from(users) + .intersect(({ intersect }) => + intersect( + db + .select({ id: users.id, homeCity: users.homeCity }) + .from(users), + db + .select({ id: users.id, homeCity: sql`${users.homeCity}`.mapWith(Number) }) + .from(users), + ) + ); + +Expect>; + +const exceptTest = await db + .select({ id: users.id, homeCity: users.homeCity }) + .from(users) + .except( + db + .select({ id: users.id, homeCity: sql`${users.homeCity}`.mapWith(Number) }) + .from(users), + ); + +Expect>; + +const union2Test = await union(db.select().from(cities), db.select().from(cities), db.select().from(cities)); + +Expect>; + +const unionAll2Test = await unionAll( + db.select({ + id: cities.id, + name: cities.name, + population: cities.population, + }).from(cities), + db.select().from(cities), +); + +Expect>; + +const intersect2Test = await intersect( + db.select({ + id: cities.id, + name: cities.name, + population: cities.population, + }).from(cities), + db.select({ + id: cities.id, + name: cities.name, + population: cities.population, + }).from(cities), + db.select({ + id: cities.id, + name: cities.name, + population: cities.population, + }).from(cities), +); + +Expect>; + +const except2Test = await except( + db.select({ + userId: newYorkers.userId, + }) + .from(newYorkers), + db.select({ + userId: newYorkers.userId, + }).from(newYorkers), +); + +Expect>; + +const unionfull = await union(db.select().from(users), db.select().from(users)).orderBy(sql``).limit(1).offset(2); + +Expect< + Equal<{ + id: number; + text: string | null; + homeCity: number; + currentCity: number | null; + serialNullable: number; + serialNotNull: number; + class: 'A' | 'C'; + subClass: 'B' | 'D' | null; + age1: number; + createdAt: Date; + enumCol: 'a' | 'b' | 'c'; + }[], typeof unionfull> +>; + +union(db.select().from(users), db.select().from(users)) + .orderBy(sql``) + // @ts-expect-error - method was already called + .orderBy(sql``); + +union(db.select().from(users), db.select().from(users)) + .offset(1) + // @ts-expect-error - method was already called + .offset(2); + +union(db.select().from(users), db.select().from(users)) + .orderBy(sql``) + // @ts-expect-error - method was already called + .orderBy(sql``); + +{ + function dynamic(qb: T) { + return qb.orderBy(sql``).limit(1).offset(2); + } + + const qb = union(db.select().from(users), db.select().from(users)).$dynamic(); + const result = await dynamic(qb); + Expect>; +} + +await db + .select({ id: users.id, homeCity: users.homeCity }) + .from(users) + // All queries in combining statements should return the same number of columns + // and the corresponding columns should have compatible data type + // @ts-expect-error + .intersect(({ intersect }) => intersect(db.select().from(users), db.select().from(users))); + +// All queries in combining statements should return the same number of columns +// and the corresponding columns should have compatible data type +// @ts-expect-error +db.select().from(classes).union(db.select({ id: classes.id }).from(classes)); + +// All queries in combining statements should return the same number of columns +// and the corresponding columns should have compatible data type +// @ts-expect-error +db.select({ id: classes.id }).from(classes).union(db.select().from(classes).where(sql``)); + +// All queries in combining statements should return the same number of columns +// and the corresponding columns should have compatible data type +// @ts-expect-error +db.select({ id: classes.id }).from(classes).union(db.select().from(classes)); + +union( + db.select({ id: cities.id, name: cities.name }).from(cities).where(sql``), + db.select({ id: cities.id, name: cities.name }).from(cities), + // All queries in combining statements should return the same number of columns + // and the corresponding columns should have compatible data type + // @ts-expect-error + db.select().from(cities), +); + +union( + db.select({ id: cities.id, name: cities.name }).from(cities).where(sql``), + // All queries in combining statements should return the same number of columns + // and the corresponding columns should have compatible data type + // @ts-expect-error + db.select({ id: cities.id, name: cities.name, population: cities.population }).from(cities), + db.select({ id: cities.id, name: cities.name }).from(cities).where(sql``).limit(3).$dynamic(), + db.select({ id: cities.id, name: cities.name }).from(cities), +); + +union( + db.select({ id: cities.id }).from(cities), + db.select({ id: cities.id }).from(cities), + db.select({ id: cities.id }).from(cities), + // All queries in combining statements should return the same number of columns + // and the corresponding columns should have compatible data type + // @ts-expect-error + db.select({ id: cities.id, name: cities.name }).from(cities), + db.select({ id: cities.id }).from(cities), + db.select({ id: cities.id }).from(cities), +); + +union( + db.select({ id: cities.id }).from(cities), + db.select({ id: cities.id }).from(cities), + // All queries in combining statements should return the same number of columns + // and the corresponding columns should have compatible data type + // @ts-expect-error + db.select({ id: cities.id, name: cities.name }).from(cities), + db.select({ id: cities.id }).from(cities), + db.select({ id: newYorkers.userId }).from(newYorkers), + db.select({ id: cities.id }).from(cities), +); + +union( + db.select({ id: cities.id }).from(cities), + db.select({ id: cities.id }).from(cities), + db.select({ id: cities.id }).from(cities).where(sql``), + db.select({ id: sql`${cities.id}` }).from(cities), + db.select({ id: cities.id }).from(cities), + // All queries in combining statements should return the same number of columns + // and the corresponding columns should have compatible data type + // @ts-expect-error + db.select({ id: cities.id, name: cities.name, population: cities.population }).from(cities).where(sql``), +); diff --git a/drizzle-orm/type-tests/singlestore/subquery.ts b/drizzle-orm/type-tests/singlestore/subquery.ts new file mode 100644 index 000000000..e8ee4e80b --- /dev/null +++ b/drizzle-orm/type-tests/singlestore/subquery.ts @@ -0,0 +1,97 @@ +import { Expect } from 'type-tests/utils.ts'; +import { and, eq } from '~/expressions.ts'; +import { alias, int, serial, singlestoreTable, text } from '~/singlestore-core/index.ts'; +import { sql } from '~/sql/sql.ts'; +import type { DrizzleTypeError, Equal } from '~/utils.ts'; +import { db } from './db.ts'; + +const names = singlestoreTable('names', { + id: serial('id').primaryKey(), + name: text('name'), + authorId: int('author_id'), +}); + +const n1 = db + .select({ + id: names.id, + name: names.name, + authorId: names.authorId, + count1: sql`count(1)::int`.as('count1'), + }) + .from(names) + .groupBy(names.id, names.name, names.authorId) + .as('n1'); + +const n2 = db + .select({ + id: names.id, + authorId: names.authorId, + totalCount: sql`count(1)::int`.as('totalCount'), + }) + .from(names) + .groupBy(names.id, names.authorId) + .as('n2'); + +const result = await db + .select({ + name: n1.name, + authorId: n1.authorId, + count1: n1.count1, + totalCount: n2.totalCount, + }) + .from(n1) + .innerJoin(n2, and(eq(n2.id, n1.id), eq(n2.authorId, n1.authorId))); + +Expect< + Equal< + { + name: string | null; + authorId: number | null; + count1: number; + totalCount: number; + }[], + typeof result + > +>; + +const names2 = alias(names, 'names2'); + +const sq1 = db + .select({ + id: names.id, + name: names.name, + id2: names2.id, + }) + .from(names) + .leftJoin(names2, eq(names.name, names2.name)) + .as('sq1'); + +const res = await db.select().from(sq1); + +Expect< + Equal< + { + id: number; + name: string | null; + id2: number | null; + }[], + typeof res + > +>; + +{ + const sq = db.select({ count: sql`count(1)::int` }).from(names).as('sq'); + Expect ? true : false>; +} + +const sqUnion = db.select().from(names).union(db.select().from(names2)).as('sqUnion'); + +const resUnion = await db.select().from(sqUnion); + +Expect< + Equal<{ + id: number; + name: string | null; + authorId: number | null; + }[], typeof resUnion> +>; diff --git a/drizzle-orm/type-tests/singlestore/tables.ts b/drizzle-orm/type-tests/singlestore/tables.ts new file mode 100644 index 000000000..18ed96a30 --- /dev/null +++ b/drizzle-orm/type-tests/singlestore/tables.ts @@ -0,0 +1,751 @@ +import { type Equal, Expect } from 'type-tests/utils.ts'; +import { eq, gt } from '~/expressions.ts'; +import type { BuildColumn, InferSelectModel, Simplify } from '~/index.ts'; +import { + bigint, + char, + customType, + date, + datetime, + decimal, + index, + int, + json, + longtext, + mediumtext, + primaryKey, + serial, + type SingleStoreColumn, + singlestoreEnum, + singlestoreSchema, + singlestoreTable, + text, + timestamp, + tinytext, + unique, + uniqueIndex, + varchar, +} from '~/singlestore-core/index.ts'; + +import { singlestoreView, type SingleStoreViewWithSelection } from '~/singlestore-core/view.ts'; +import { sql } from '~/sql/sql.ts'; +import { db } from './db.ts'; + +export const users = singlestoreTable( + 'users_table', + { + id: serial('id').primaryKey(), + homeCity: int('home_city') + .notNull(), + currentCity: int('current_city'), + serialNullable: serial('serial1'), + serialNotNull: serial('serial2').notNull(), + class: text('class', { enum: ['A', 'C'] }).notNull(), + subClass: text('sub_class', { enum: ['B', 'D'] }), + text: text('text'), + age1: int('age1').notNull(), + createdAt: timestamp('created_at', { mode: 'date' }).notNull().defaultNow(), + enumCol: singlestoreEnum('enum_col', ['a', 'b', 'c']).notNull(), + }, + (users) => ({ + usersAge1Idx: uniqueIndex('usersAge1Idx').on(users.class), + usersAge2Idx: index('usersAge2Idx').on(users.class), + uniqueClass: uniqueIndex('uniqueClass') + .on(users.class, users.subClass) + .lock('default') + .algorythm('copy') + .using(`btree`), + pk: primaryKey(users.age1, users.class), + }), +); + +export const cities = singlestoreTable('cities_table', { + id: serial('id').primaryKey(), + name: text('name_db').notNull(), + population: int('population').default(0), +}, (cities) => ({ + citiesNameIdx: index('citiesNameIdx').on(cities.id), +})); + +Expect< + Equal< + { + id: SingleStoreColumn<{ + name: 'id'; + tableName: 'cities_table'; + dataType: 'number'; + columnType: 'SingleStoreSerial'; + data: number; + driverParam: number; + notNull: true; + hasDefault: true; + isPrimaryKey: true; + enumValues: undefined; + baseColumn: never; + generated: undefined; + isAutoincrement: true; + hasRuntimeDefault: false; + }, object>; + name: SingleStoreColumn<{ + name: 'name_db'; + tableName: 'cities_table'; + dataType: 'string'; + columnType: 'SingleStoreText'; + data: string; + driverParam: string; + notNull: true; + hasDefault: false; + isPrimaryKey: false; + enumValues: [string, ...string[]]; + baseColumn: never; + generated: undefined; + isAutoincrement: false; + hasRuntimeDefault: false; + }, object>; + population: SingleStoreColumn<{ + name: 'population'; + tableName: 'cities_table'; + dataType: 'number'; + columnType: 'SingleStoreInt'; + data: number; + driverParam: string | number; + notNull: false; + hasDefault: true; + isPrimaryKey: false; + enumValues: undefined; + baseColumn: never; + generated: undefined; + isAutoincrement: false; + hasRuntimeDefault: false; + }, object>; + }, + typeof cities._.columns + > +>; + +Expect< + Equal<{ + id: number; + name_db: string; + population: number | null; + }, InferSelectModel> +>; + +Expect< + Equal<{ + id?: number; + name: string; + population?: number | null; + }, typeof cities.$inferInsert> +>; + +export const customSchema = singlestoreSchema('custom_schema'); + +export const citiesCustom = customSchema.table('cities_table', { + id: serial('id').primaryKey(), + name: text('name_db').notNull(), + population: int('population').default(0), +}, (cities) => ({ + citiesNameIdx: index('citiesNameIdx').on(cities.id), +})); + +Expect>; + +export const classes = singlestoreTable('classes_table', { + id: serial('id').primaryKey(), + class: text('class', { enum: ['A', 'C'] }), + subClass: text('sub_class', { enum: ['B', 'D'] }).notNull(), +}); + +/* export const classes2 = singlestoreTable('classes_table', { + id: serial().primaryKey(), + class: text({ enum: ['A', 'C'] }).$dbName('class_db'), + subClass: text({ enum: ['B', 'D'] }).notNull(), +}); */ + +export const newYorkers = singlestoreView('new_yorkers') + .algorithm('merge') + .definer('root@localhost') + .sqlSecurity('definer') + .as((qb) => { + const sq = qb + .$with('sq') + .as( + qb.select({ userId: users.id, cityId: cities.id }) + .from(users) + .leftJoin(cities, eq(cities.id, users.homeCity)) + .where(sql`${users.age1} > 18`), + ); + return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); + }); + +Expect< + Equal< + SingleStoreViewWithSelection<'new_yorkers', false, { + userId: SingleStoreColumn<{ + name: 'id'; + dataType: 'number'; + columnType: 'SingleStoreSerial'; + data: number; + driverParam: number; + notNull: true; + hasDefault: true; + tableName: 'new_yorkers'; + enumValues: undefined; + baseColumn: never; + generated: undefined; + isPrimaryKey: true; + isAutoincrement: true; + hasRuntimeDefault: false; + }>; + cityId: SingleStoreColumn<{ + name: 'id'; + dataType: 'number'; + columnType: 'SingleStoreSerial'; + data: number; + driverParam: number; + notNull: false; + hasDefault: true; + tableName: 'new_yorkers'; + enumValues: undefined; + baseColumn: never; + generated: undefined; + isPrimaryKey: true; + isAutoincrement: true; + hasRuntimeDefault: false; + }>; + }>, + typeof newYorkers + > +>; + +{ + const newYorkers = customSchema.view('new_yorkers') + .algorithm('merge') + .definer('root@localhost') + .sqlSecurity('definer') + .as((qb) => { + const sq = qb + .$with('sq') + .as( + qb.select({ userId: users.id, cityId: cities.id }) + .from(users) + .leftJoin(cities, eq(cities.id, users.homeCity)) + .where(sql`${users.age1} > 18`), + ); + return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); + }); + + Expect< + Equal< + SingleStoreViewWithSelection<'new_yorkers', false, { + userId: SingleStoreColumn<{ + name: 'id'; + dataType: 'number'; + columnType: 'SingleStoreSerial'; + data: number; + driverParam: number; + notNull: true; + hasDefault: true; + tableName: 'new_yorkers'; + enumValues: undefined; + baseColumn: never; + generated: undefined; + isPrimaryKey: true; + isAutoincrement: true; + hasRuntimeDefault: false; + }>; + cityId: SingleStoreColumn<{ + name: 'id'; + dataType: 'number'; + columnType: 'SingleStoreSerial'; + data: number; + driverParam: number; + notNull: false; + hasDefault: true; + tableName: 'new_yorkers'; + enumValues: undefined; + baseColumn: never; + generated: undefined; + isPrimaryKey: true; + isAutoincrement: true; + hasRuntimeDefault: false; + }>; + }>, + typeof newYorkers + > + >; +} + +{ + const newYorkers = singlestoreView('new_yorkers', { + userId: int('user_id').notNull(), + cityId: int('city_id'), + }) + .algorithm('merge') + .definer('root@localhost') + .sqlSecurity('definer') + .as( + sql`select ${users.id} as user_id, ${cities.id} as city_id from ${users} left join ${cities} on ${ + eq(cities.id, users.homeCity) + } where ${gt(users.age1, 18)}`, + ); + + Expect< + Equal< + SingleStoreViewWithSelection<'new_yorkers', false, { + userId: SingleStoreColumn<{ + name: 'user_id'; + dataType: 'number'; + columnType: 'SingleStoreInt'; + data: number; + driverParam: string | number; + hasDefault: false; + notNull: true; + tableName: 'new_yorkers'; + enumValues: undefined; + baseColumn: never; + generated: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + cityId: SingleStoreColumn<{ + name: 'city_id'; + notNull: false; + hasDefault: false; + dataType: 'number'; + columnType: 'SingleStoreInt'; + data: number; + driverParam: string | number; + tableName: 'new_yorkers'; + enumValues: undefined; + baseColumn: never; + generated: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + }>, + typeof newYorkers + > + >; +} + +{ + const newYorkers = customSchema.view('new_yorkers', { + userId: int('user_id').notNull(), + cityId: int('city_id'), + }) + .algorithm('merge') + .definer('root@localhost') + .sqlSecurity('definer') + .as( + sql`select ${users.id} as user_id, ${cities.id} as city_id from ${users} left join ${cities} on ${ + eq(cities.id, users.homeCity) + } where ${gt(users.age1, 18)}`, + ); + + Expect< + Equal< + SingleStoreViewWithSelection<'new_yorkers', false, { + userId: SingleStoreColumn<{ + name: 'user_id'; + dataType: 'number'; + columnType: 'SingleStoreInt'; + data: number; + driverParam: string | number; + hasDefault: false; + notNull: true; + tableName: 'new_yorkers'; + enumValues: undefined; + baseColumn: never; + generated: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + cityId: SingleStoreColumn<{ + name: 'city_id'; + notNull: false; + hasDefault: false; + dataType: 'number'; + columnType: 'SingleStoreInt'; + data: number; + driverParam: string | number; + tableName: 'new_yorkers'; + enumValues: undefined; + baseColumn: never; + generated: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + }>, + typeof newYorkers + > + >; +} + +{ + const newYorkers = singlestoreView('new_yorkers', { + userId: int('user_id').notNull(), + cityId: int('city_id'), + }).existing(); + + Expect< + Equal< + SingleStoreViewWithSelection<'new_yorkers', true, { + userId: SingleStoreColumn<{ + name: 'user_id'; + dataType: 'number'; + columnType: 'SingleStoreInt'; + data: number; + driverParam: string | number; + hasDefault: false; + notNull: true; + tableName: 'new_yorkers'; + enumValues: undefined; + baseColumn: never; + generated: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + cityId: SingleStoreColumn<{ + name: 'city_id'; + notNull: false; + hasDefault: false; + dataType: 'number'; + columnType: 'SingleStoreInt'; + data: number; + driverParam: string | number; + tableName: 'new_yorkers'; + enumValues: undefined; + baseColumn: never; + generated: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + }>, + typeof newYorkers + > + >; +} + +{ + const newYorkers = customSchema.view('new_yorkers', { + userId: int('user_id').notNull(), + cityId: int('city_id'), + }).existing(); + + Expect< + Equal< + SingleStoreViewWithSelection<'new_yorkers', true, { + userId: SingleStoreColumn<{ + name: 'user_id'; + dataType: 'number'; + columnType: 'SingleStoreInt'; + data: number; + driverParam: string | number; + hasDefault: false; + notNull: true; + tableName: 'new_yorkers'; + enumValues: undefined; + baseColumn: never; + generated: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + cityId: SingleStoreColumn<{ + name: 'city_id'; + notNull: false; + hasDefault: false; + dataType: 'number'; + columnType: 'SingleStoreInt'; + data: number; + driverParam: string | number; + tableName: 'new_yorkers'; + enumValues: undefined; + baseColumn: never; + generated: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + }>, + typeof newYorkers + > + >; +} + +{ + const customText = customType<{ data: string }>({ + dataType() { + return 'text'; + }, + }); + + const t = customText('name').notNull(); + Expect< + Equal< + { + brand: 'Column'; + name: 'name'; + tableName: 'table'; + dataType: 'custom'; + columnType: 'SingleStoreCustomColumn'; + data: string; + driverParam: unknown; + notNull: true; + hasDefault: false; + enumValues: undefined; + baseColumn: never; + dialect: 'singlestore'; + generated: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }, + Simplify['_']> + > + >; +} + +{ + singlestoreTable('test', { + bigint: bigint('bigint', { mode: 'bigint' }), + number: bigint('number', { mode: 'number' }), + date: date('date').default(new Date()), + date2: date('date2', { mode: 'date' }).default(new Date()), + date3: date('date3', { mode: 'string' }).default('2020-01-01'), + date4: date('date4', { mode: undefined }).default(new Date()), + datetime: datetime('datetime').default(new Date()), + datetime2: datetime('datetime2', { mode: 'date' }).default(new Date()), + datetime3: datetime('datetime3', { mode: 'string' }).default('2020-01-01'), + datetime4: datetime('datetime4', { mode: undefined }).default(new Date()), + timestamp: timestamp('timestamp').default(new Date()), + timestamp2: timestamp('timestamp2', { mode: 'date' }).default(new Date()), + timestamp3: timestamp('timestamp3', { mode: 'string' }).default('2020-01-01'), + timestamp4: timestamp('timestamp4', { mode: undefined }).default(new Date()), + }); +} + +{ + singlestoreTable('test', { + col1: decimal('col1').default('1'), + }); +} + +{ + const test = singlestoreTable('test', { + test1: singlestoreEnum('test', ['a', 'b', 'c'] as const).notNull(), + test2: singlestoreEnum('test', ['a', 'b', 'c']).notNull(), + test3: varchar('test', { length: 255, enum: ['a', 'b', 'c'] as const }).notNull(), + test4: varchar('test', { length: 255, enum: ['a', 'b', 'c'] }).notNull(), + test5: text('test', { enum: ['a', 'b', 'c'] as const }).notNull(), + test6: text('test', { enum: ['a', 'b', 'c'] }).notNull(), + test7: tinytext('test', { enum: ['a', 'b', 'c'] as const }).notNull(), + test8: tinytext('test', { enum: ['a', 'b', 'c'] }).notNull(), + test9: mediumtext('test', { enum: ['a', 'b', 'c'] as const }).notNull(), + test10: mediumtext('test', { enum: ['a', 'b', 'c'] }).notNull(), + test11: longtext('test', { enum: ['a', 'b', 'c'] as const }).notNull(), + test12: longtext('test', { enum: ['a', 'b', 'c'] }).notNull(), + test13: char('test', { enum: ['a', 'b', 'c'] as const }).notNull(), + test14: char('test', { enum: ['a', 'b', 'c'] }).notNull(), + test15: text('test').notNull(), + }); + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; +} + +{ // All types with generated columns + const test = singlestoreTable('test', { + test1: singlestoreEnum('test', ['a', 'b', 'c'] as const).generatedAlwaysAs(sql``), + test2: singlestoreEnum('test', ['a', 'b', 'c']).generatedAlwaysAs(sql``), + test3: varchar('test', { length: 255, enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), + test4: varchar('test', { length: 255, enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), + test5: text('test', { enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), + test6: text('test', { enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), + test7: tinytext('test', { enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), + test8: tinytext('test', { enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), + test9: mediumtext('test', { enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), + test10: mediumtext('test', { enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), + test11: longtext('test', { enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), + test12: longtext('test', { enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), + test13: char('test', { enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), + test14: char('test', { enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), + test15: text('test').generatedAlwaysAs(sql``), + }); + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; +} + +{ + const getUsersTable = (schemaName: TSchema) => { + return singlestoreSchema(schemaName).table('users', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }); + }; + + const users1 = getUsersTable('id1'); + Expect>; + + const users2 = getUsersTable('id2'); + Expect>; +} + +{ + const internalStaff = singlestoreTable('internal_staff', { + userId: int('user_id').notNull(), + }); + + const customUser = singlestoreTable('custom_user', { + id: int('id').notNull(), + }); + + const ticket = singlestoreTable('ticket', { + staffId: int('staff_id').notNull(), + }); + + const subq = db + .select() + .from(internalStaff) + .leftJoin( + customUser, + eq(internalStaff.userId, customUser.id), + ).as('internal_staff'); + + const mainQuery = await db + .select() + .from(ticket) + .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); + + Expect< + Equal<{ + internal_staff: { + internal_staff: { + userId: number; + }; + custom_user: { + id: number | null; + }; + } | null; + ticket: { + staffId: number; + }; + }[], typeof mainQuery> + >; +} + +{ + const newYorkers = singlestoreView('new_yorkers') + .as((qb) => { + const sq = qb + .$with('sq') + .as( + qb.select({ userId: users.id, cityId: cities.id }) + .from(users) + .leftJoin(cities, eq(cities.id, users.homeCity)) + .where(sql`${users.age1} > 18`), + ); + return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); + }); + + await db.select().from(newYorkers).leftJoin(newYorkers, eq(newYorkers.userId, newYorkers.userId)); +} + +{ + const test = singlestoreTable('test', { + id: text('id').$defaultFn(() => crypto.randomUUID()).primaryKey(), + }); + + Expect< + Equal<{ + id?: string; + }, typeof test.$inferInsert> + >; +} + +{ + singlestoreTable('test', { + id: int('id').$default(() => 1), + id2: int('id').$defaultFn(() => 1), + // @ts-expect-error - should be number + id3: int('id').$default(() => '1'), + // @ts-expect-error - should be number + id4: int('id').$defaultFn(() => '1'), + }); +} +{ + const emailLog = singlestoreTable( + 'email_log', + { + id: int('id', { unsigned: true }).autoincrement().notNull(), + clientId: int('id_client', { unsigned: true }), + receiverEmail: varchar('receiver_email', { length: 255 }).notNull(), + messageId: varchar('message_id', { length: 255 }), + contextId: int('context_id', { unsigned: true }), + contextType: singlestoreEnum('context_type', ['test']).$type<['test']>(), + action: varchar('action', { length: 80 }).$type<['test']>(), + events: json('events').$type<{ t: 'test' }[]>(), + createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), + updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().onUpdateNow(), + }, + (table) => { + return { + emailLogId: primaryKey({ columns: [table.id], name: 'email_log_id' }), + emailLogMessageIdUnique: unique('email_log_message_id_unique').on(table.messageId), + }; + }, + ); + + Expect< + Equal<{ + receiverEmail: string; + id?: number | undefined; + createdAt?: string | undefined; + clientId?: number | null | undefined; + messageId?: string | null | undefined; + contextId?: number | null | undefined; + contextType?: ['test'] | null | undefined; + action?: ['test'] | null | undefined; + events?: + | { + t: 'test'; + }[] + | null + | undefined; + updatedAt?: string | null | undefined; + }, typeof emailLog.$inferInsert> + >; +} diff --git a/drizzle-orm/type-tests/singlestore/update.ts b/drizzle-orm/type-tests/singlestore/update.ts new file mode 100644 index 000000000..3f10ae2e4 --- /dev/null +++ b/drizzle-orm/type-tests/singlestore/update.ts @@ -0,0 +1,26 @@ +import { type Equal, Expect } from 'type-tests/utils.ts'; +import type { SingleStoreUpdate } from '~/singlestore-core/index.ts'; +import type { SingleStoreRawQueryResult } from '~/singlestore/session.ts'; +import { sql } from '~/sql/sql.ts'; +import { db } from './db.ts'; +import { users } from './tables.ts'; + +{ + function dynamic(qb: T) { + return qb.where(sql``); + } + + const qbBase = db.update(users).set({}).$dynamic(); + const qb = dynamic(qbBase); + const result = await qb; + Expect>; +} + +{ + db + .update(users) + .set({}) + .where(sql``) + // @ts-expect-error method was already called + .where(sql``); +} diff --git a/drizzle-orm/type-tests/singlestore/with.ts b/drizzle-orm/type-tests/singlestore/with.ts new file mode 100644 index 000000000..77309e32a --- /dev/null +++ b/drizzle-orm/type-tests/singlestore/with.ts @@ -0,0 +1,80 @@ +import type { Equal } from 'type-tests/utils.ts'; +import { Expect } from 'type-tests/utils.ts'; +import { gt, inArray } from '~/expressions.ts'; +import { int, serial, singlestoreTable, text } from '~/singlestore-core/index.ts'; +import { sql } from '~/sql/sql.ts'; +import { db } from './db.ts'; + +const orders = singlestoreTable('orders', { + id: serial('id').primaryKey(), + region: text('region').notNull(), + product: text('product').notNull(), + amount: int('amount').notNull(), + quantity: int('quantity').notNull(), + generated: text('generatedText').generatedAlwaysAs(sql``), +}); + +{ + const regionalSales = db + .$with('regional_sales') + .as( + db + .select({ + region: orders.region, + totalSales: sql`sum(${orders.amount})`.as('total_sales'), + }) + .from(orders) + .groupBy(orders.region), + ); + + const topRegions = db + .$with('top_regions') + .as( + db + .select({ + region: orders.region, + totalSales: orders.amount, + }) + .from(regionalSales) + .where( + gt( + regionalSales.totalSales, + db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), + ), + ), + ); + + const result = await db + .with(regionalSales, topRegions) + .select({ + region: orders.region, + product: orders.product, + productUnits: sql`sum(${orders.quantity})`, + productSales: sql`sum(${orders.amount})`, + }) + .from(orders) + .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))); + + Expect< + Equal<{ + region: string; + product: string; + productUnits: number; + productSales: number; + }[], typeof result> + >; + + const allOrdersWith = db.$with('all_orders_with').as(db.select().from(orders)); + const allFromWith = await db.with(allOrdersWith).select().from(allOrdersWith); + + Expect< + Equal<{ + id: number; + region: string; + product: string; + amount: number; + quantity: number; + generated: string | null; + }[], typeof allFromWith> + >; +} diff --git a/integration-tests/.env.example b/integration-tests/.env.example index 50b3c95f2..70952e4f7 100644 --- a/integration-tests/.env.example +++ b/integration-tests/.env.example @@ -1,5 +1,6 @@ PG_CONNECTION_STRING="postgres://postgres:postgres@localhost:55432/postgres" MYSQL_CONNECTION_STRING="mysql://root:mysql@127.0.0.1:33306/drizzle" +SINGLESTORE_CONNECTION_STRING="singlestore://root:singlestore@localhost:3306/drizzle" PLANETSCALE_CONNECTION_STRING= TIDB_CONNECTION_STRING= NEON_CONNECTION_STRING= diff --git a/integration-tests/drizzle2/singlestore/0000_nostalgic_carnage.sql b/integration-tests/drizzle2/singlestore/0000_nostalgic_carnage.sql new file mode 100644 index 000000000..50efe47da --- /dev/null +++ b/integration-tests/drizzle2/singlestore/0000_nostalgic_carnage.sql @@ -0,0 +1,20 @@ +CREATE TABLE `cities_migration` ( + `id` int, + `fullname_name` text, + `state` text +); +--> statement-breakpoint +CREATE TABLE `users_migration` ( + `id` int PRIMARY KEY NOT NULL, + `full_name` text, + `phone` int, + `invited_by` int, + `city_id` int, + `date` timestamp DEFAULT now() +); +--> statement-breakpoint +CREATE TABLE `users12` ( + `id` serial AUTO_INCREMENT PRIMARY KEY NOT NULL, + `name` text NOT NULL, + `email` text NOT NULL +); diff --git a/integration-tests/drizzle2/singlestore/meta/0000_snapshot.json b/integration-tests/drizzle2/singlestore/meta/0000_snapshot.json new file mode 100644 index 000000000..63d5ad187 --- /dev/null +++ b/integration-tests/drizzle2/singlestore/meta/0000_snapshot.json @@ -0,0 +1,132 @@ +{ + "version": "1", + "dialect": "singlestore", + "id": "8e8c8378-0496-40f6-88e3-98aab8282b1f", + "prevId": "00000000-0000-0000-0000-000000000000", + "tables": { + "cities_migration": { + "name": "cities_migration", + "columns": { + "id": { + "name": "id", + "type": "int", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "fullname_name": { + "name": "fullname_name", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "state": { + "name": "state", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {} + }, + "users_migration": { + "name": "users_migration", + "columns": { + "id": { + "name": "id", + "type": "int", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "full_name": { + "name": "full_name", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "phone": { + "name": "phone", + "type": "int", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "invited_by": { + "name": "invited_by", + "type": "int", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "city_id": { + "name": "city_id", + "type": "int", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "date": { + "name": "date", + "type": "timestamp", + "primaryKey": false, + "notNull": false, + "autoincrement": false, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {} + }, + "users12": { + "name": "users12", + "columns": { + "id": { + "name": "id", + "type": "serial", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "email": { + "name": "email", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": { + "my_unique_index": { + "name": "my_unique_index", + "columns": [ + "name" + ], + "isUnique": true, + "using": "btree" + } + }, + "foreignKeys": {}, + "compositePrimaryKeys": {} + } + }, + "schemas": {}, + "_meta": { + "schemas": {}, + "tables": {}, + "columns": {} + } +} diff --git a/integration-tests/drizzle2/singlestore/meta/_journal.json b/integration-tests/drizzle2/singlestore/meta/_journal.json new file mode 100644 index 000000000..49e74f169 --- /dev/null +++ b/integration-tests/drizzle2/singlestore/meta/_journal.json @@ -0,0 +1,13 @@ +{ + "version": "1", + "dialect": "singlestore", + "entries": [ + { + "idx": 0, + "version": "1", + "when": 1680270921944, + "tag": "0000_nostalgic_carnage", + "breakpoints": true + } + ] +} diff --git a/integration-tests/package.json b/integration-tests/package.json index 20c2d1fc3..a4fcab0b2 100644 --- a/integration-tests/package.json +++ b/integration-tests/package.json @@ -8,7 +8,7 @@ "test": "pnpm test:vitest", "test:vitest": "vitest run", "test:esm": "node tests/imports.test.mjs && node tests/imports.test.cjs", - "test:data-api": "sst shell vitest run tests/awsdatapi.test.ts" + "test:data-api": "sst shell vitest run tests/pg/awsdatapi.test.ts" }, "keywords": [], "author": "Drizzle Team", diff --git a/integration-tests/tests/mysql/mysql-common.ts b/integration-tests/tests/mysql/mysql-common.ts index c96ae9319..58f7a1e2c 100644 --- a/integration-tests/tests/mysql/mysql-common.ts +++ b/integration-tests/tests/mysql/mysql-common.ts @@ -1155,6 +1155,30 @@ export function tests(driver?: string) { expect(result).toEqual([{ id: 1, name: 'John' }]); }); + test('insert: placeholders on columns with encoder', async (ctx) => { + const { db } = ctx.mysql; + + const date = new Date('2024-08-07T15:30:00Z'); + + const statement = db.insert(usersTable).values({ + name: 'John', + createdAt: sql.placeholder('createdAt'), + }).prepare(); + + await statement.execute({ createdAt: date }); + + const result = await db + .select({ + id: usersTable.id, + createdAt: usersTable.createdAt, + }) + .from(usersTable); + + expect(result).toEqual([ + { id: 1, createdAt: date }, + ]); + }); + test('prepared statement reuse', async (ctx) => { const { db } = ctx.mysql; @@ -2059,6 +2083,45 @@ export function tests(driver?: string) { await db.execute(sql`drop table ${products}`); }); + test('transaction with options (set isolationLevel)', async (ctx) => { + const { db } = ctx.mysql; + + const users = mysqlTable('users_transactions', { + id: serial('id').primaryKey(), + balance: int('balance').notNull(), + }); + const products = mysqlTable('products_transactions', { + id: serial('id').primaryKey(), + price: int('price').notNull(), + stock: int('stock').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop table if exists ${products}`); + + await db.execute(sql`create table users_transactions (id serial not null primary key, balance int not null)`); + await db.execute( + sql`create table products_transactions (id serial not null primary key, price int not null, stock int not null)`, + ); + + const [{ insertId: userId }] = await db.insert(users).values({ balance: 100 }); + const user = await db.select().from(users).where(eq(users.id, userId)).then((rows) => rows[0]!); + const [{ insertId: productId }] = await db.insert(products).values({ price: 10, stock: 10 }); + const product = await db.select().from(products).where(eq(products.id, productId)).then((rows) => rows[0]!); + + await db.transaction(async (tx) => { + await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); + await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); + }, { isolationLevel: 'serializable' }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, balance: 90 }]); + + await db.execute(sql`drop table ${users}`); + await db.execute(sql`drop table ${products}`); + }); + test('transaction rollback', async (ctx) => { const { db } = ctx.mysql; diff --git a/integration-tests/tests/mysql/mysql-planetscale.test.ts b/integration-tests/tests/mysql/mysql-planetscale.test.ts index 8c7e74543..763b9c8e6 100644 --- a/integration-tests/tests/mysql/mysql-planetscale.test.ts +++ b/integration-tests/tests/mysql/mysql-planetscale.test.ts @@ -59,6 +59,7 @@ skipTests([ // to redefine in this file 'utc config for datetime', 'transaction', + 'transaction with options (set isolationLevel)', 'having', 'select count()', 'insert via db.execute w/ query builder', diff --git a/integration-tests/tests/mysql/mysql-proxy.test.ts b/integration-tests/tests/mysql/mysql-proxy.test.ts index cb8e4b758..1cf8345c4 100644 --- a/integration-tests/tests/mysql/mysql-proxy.test.ts +++ b/integration-tests/tests/mysql/mysql-proxy.test.ts @@ -129,6 +129,7 @@ skipTests([ 'nested transaction', 'transaction rollback', 'transaction', + 'transaction with options (set isolationLevel)', 'migrator', ]); diff --git a/integration-tests/tests/mysql/tidb-serverless.test.ts b/integration-tests/tests/mysql/tidb-serverless.test.ts index 8187882af..9121c31de 100644 --- a/integration-tests/tests/mysql/tidb-serverless.test.ts +++ b/integration-tests/tests/mysql/tidb-serverless.test.ts @@ -66,6 +66,7 @@ skipTests([ 'select iterator w/ prepared statement', 'select iterator', 'transaction', + 'transaction with options (set isolationLevel)', 'Insert all defaults in multiple rows', 'Insert all defaults in 1 row', '$default with empty array', diff --git a/integration-tests/tests/pg/awsdatapi.test.ts b/integration-tests/tests/pg/awsdatapi.test.ts index 22ad8e770..8ee39cf12 100644 --- a/integration-tests/tests/pg/awsdatapi.test.ts +++ b/integration-tests/tests/pg/awsdatapi.test.ts @@ -2,7 +2,7 @@ import 'dotenv/config'; import { RDSDataClient } from '@aws-sdk/client-rds-data'; import * as dotenv from 'dotenv'; -import { asc, eq, inArray, notInArray, sql, TransactionRollbackError } from 'drizzle-orm'; +import { asc, eq, inArray, notInArray, relations, sql, TransactionRollbackError } from 'drizzle-orm'; import type { AwsDataApiPgDatabase } from 'drizzle-orm/aws-data-api/pg'; import { drizzle } from 'drizzle-orm/aws-data-api/pg'; import { migrate } from 'drizzle-orm/aws-data-api/pg/migrator'; @@ -18,6 +18,7 @@ import { text, time, timestamp, + uuid, } from 'drizzle-orm/pg-core'; import { Resource } from 'sst'; import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; @@ -34,8 +35,13 @@ const usersTable = pgTable('users', { name: text('name').notNull(), verified: boolean('verified').notNull().default(false), jsonb: jsonb('jsonb').$type(), - bestTexts: text('best_texts').array().default(sql`'{}'`).notNull(), - createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), + bestTexts: text('best_texts') + .array() + .default(sql`'{}'`) + .notNull(), + createdAt: timestamp('created_at', { withTimezone: true }) + .notNull() + .defaultNow(), }); const usersMigratorTable = pgTable('users12', { @@ -44,7 +50,51 @@ const usersMigratorTable = pgTable('users12', { email: text('email').notNull(), }); -let db: AwsDataApiPgDatabase; +const todo = pgTable('todo', { + id: uuid('id').primaryKey(), + title: text('title').notNull(), + description: text('description'), +}); + +const todoRelations = relations(todo, (ctx) => ({ + user: ctx.many(todoUser), +})); + +const user = pgTable('user', { + id: uuid('id').primaryKey(), + email: text('email').notNull(), +}); + +const userRelations = relations(user, (ctx) => ({ + todos: ctx.many(todoUser), +})); + +const todoUser = pgTable('todo_user', { + todoId: uuid('todo_id').references(() => todo.id), + userId: uuid('user_id').references(() => user.id), +}); + +const todoToGroupRelations = relations(todoUser, (ctx) => ({ + todo: ctx.one(todo, { + fields: [todoUser.todoId], + references: [todo.id], + }), + user: ctx.one(user, { + fields: [todoUser.userId], + references: [user.id], + }), +})); + +const schema = { + todo, + todoRelations, + user, + userRelations, + todoUser, + todoToGroupRelations, +}; + +let db: AwsDataApiPgDatabase; beforeAll(async () => { const rdsClient = new RDSDataClient(); @@ -57,6 +107,7 @@ beforeAll(async () => { // @ts-ignore resourceArn: Resource.Postgres.clusterArn, logger: ENABLE_LOGGING, + schema, }); }); @@ -75,6 +126,35 @@ beforeEach(async () => { ) `, ); + + await db.execute( + sql` + create table todo ( + id uuid primary key, + title text not null, + description text + ) + `, + ); + + await db.execute( + sql` + create table "user" ( + id uuid primary key, + email text not null + ) + + `, + ); + + await db.execute( + sql` + create table todo_user ( + todo_id uuid references todo(id), + user_id uuid references "user"(id) + ) + `, + ); }); test('select all fields', async () => { @@ -86,56 +166,68 @@ test('select all fields', async () => { expect(result[0]!.createdAt).toBeInstanceOf(Date); // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 100); - expect(result).toEqual([{ - bestTexts: [], - id: 1, - name: 'John', - verified: false, - jsonb: null, - createdAt: result[0]!.createdAt, - }]); + expect(result).toEqual([ + { + bestTexts: [], + id: 1, + name: 'John', + verified: false, + jsonb: null, + createdAt: result[0]!.createdAt, + }, + ]); }); test('select sql', async () => { await db.insert(usersTable).values({ name: 'John' }); - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable); + const users = await db + .select({ + name: sql`upper(${usersTable.name})`, + }) + .from(usersTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('select with empty array in inArray', async () => { - await db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Jane' }, - { name: 'Jane' }, - ]); - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable).where(inArray(usersTable.id, [])); + await db + .insert(usersTable) + .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + const users = await db + .select({ + name: sql`upper(${usersTable.name})`, + }) + .from(usersTable) + .where(inArray(usersTable.id, [])); expect(users).toEqual([]); }); test('select with empty array in notInArray', async () => { - await db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Jane' }, - { name: 'Jane' }, - ]); - const result = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable).where(notInArray(usersTable.id, [])); + await db + .insert(usersTable) + .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + const result = await db + .select({ + name: sql`upper(${usersTable.name})`, + }) + .from(usersTable) + .where(notInArray(usersTable.id, [])); - expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + expect(result).toEqual([ + { name: 'JOHN' }, + { name: 'JANE' }, + { name: 'JANE' }, + ]); }); test('select typed sql', async () => { await db.insert(usersTable).values({ name: 'John' }); - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable); + const users = await db + .select({ + name: sql`upper(${usersTable.name})`, + }) + .from(usersTable); expect(users).toEqual([{ name: 'JOHN' }]); }); @@ -147,7 +239,9 @@ test('select distinct', async () => { }); await db.execute(sql`drop table if exists ${usersDistinctTable}`); - await db.execute(sql`create table ${usersDistinctTable} (id integer, name text)`); + await db.execute( + sql`create table ${usersDistinctTable} (id integer, name text)`, + ); await db.insert(usersDistinctTable).values([ { id: 1, name: 'John' }, @@ -155,20 +249,28 @@ test('select distinct', async () => { { id: 2, name: 'John' }, { id: 1, name: 'Jane' }, ]); - const users1 = await db.selectDistinct().from(usersDistinctTable).orderBy( - usersDistinctTable.id, - usersDistinctTable.name, - ); - const users2 = await db.selectDistinctOn([usersDistinctTable.id]).from(usersDistinctTable).orderBy( - usersDistinctTable.id, - ); - const users3 = await db.selectDistinctOn([usersDistinctTable.name], { name: usersDistinctTable.name }).from( - usersDistinctTable, - ).orderBy(usersDistinctTable.name); + const users1 = await db + .selectDistinct() + .from(usersDistinctTable) + .orderBy(usersDistinctTable.id, usersDistinctTable.name); + const users2 = await db + .selectDistinctOn([usersDistinctTable.id]) + .from(usersDistinctTable) + .orderBy(usersDistinctTable.id); + const users3 = await db + .selectDistinctOn([usersDistinctTable.name], { + name: usersDistinctTable.name, + }) + .from(usersDistinctTable) + .orderBy(usersDistinctTable.name); await db.execute(sql`drop table ${usersDistinctTable}`); - expect(users1).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); + expect(users1).toEqual([ + { id: 1, name: 'Jane' }, + { id: 1, name: 'John' }, + { id: 2, name: 'John' }, + ]); expect(users2.length).toEqual(2); expect(users2[0]?.id).toEqual(1); @@ -180,79 +282,107 @@ test('select distinct', async () => { }); test('insert returning sql', async () => { - const users = await db.insert(usersTable).values({ name: 'John' }).returning({ - name: sql`upper(${usersTable.name})`, - }); + const users = await db + .insert(usersTable) + .values({ name: 'John' }) + .returning({ + name: sql`upper(${usersTable.name})`, + }); expect(users).toEqual([{ name: 'JOHN' }]); }); test('delete returning sql', async () => { await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ - name: sql`upper(${usersTable.name})`, - }); + const users = await db + .delete(usersTable) + .where(eq(usersTable.name, 'John')) + .returning({ + name: sql`upper(${usersTable.name})`, + }); expect(users).toEqual([{ name: 'JOHN' }]); }); test('update returning sql', async () => { await db.insert(usersTable).values({ name: 'John' }); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ - name: sql`upper(${usersTable.name})`, - }); + const users = await db + .update(usersTable) + .set({ name: 'Jane' }) + .where(eq(usersTable.name, 'John')) + .returning({ + name: sql`upper(${usersTable.name})`, + }); expect(users).toEqual([{ name: 'JANE' }]); }); test('update with returning all fields', async () => { await db.insert(usersTable).values({ name: 'John' }); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning(); + const users = await db + .update(usersTable) + .set({ name: 'Jane' }) + .where(eq(usersTable.name, 'John')) + .returning(); expect(users[0]!.createdAt).toBeInstanceOf(Date); // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 100); - expect(users).toEqual([{ - id: 1, - bestTexts: [], - name: 'Jane', - verified: false, - jsonb: null, - createdAt: users[0]!.createdAt, - }]); + expect(users).toEqual([ + { + id: 1, + bestTexts: [], + name: 'Jane', + verified: false, + jsonb: null, + createdAt: users[0]!.createdAt, + }, + ]); }); test('update with returning partial', async () => { await db.insert(usersTable).values({ name: 'John' }); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ - id: usersTable.id, - name: usersTable.name, - }); + const users = await db + .update(usersTable) + .set({ name: 'Jane' }) + .where(eq(usersTable.name, 'John')) + .returning({ + id: usersTable.id, + name: usersTable.name, + }); expect(users).toEqual([{ id: 1, name: 'Jane' }]); }); test('delete with returning all fields', async () => { await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning(); + const users = await db + .delete(usersTable) + .where(eq(usersTable.name, 'John')) + .returning(); expect(users[0]!.createdAt).toBeInstanceOf(Date); // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 100); - expect(users).toEqual([{ - bestTexts: [], - id: 1, - name: 'John', - verified: false, - jsonb: null, - createdAt: users[0]!.createdAt, - }]); + expect(users).toEqual([ + { + bestTexts: [], + id: 1, + name: 'John', + verified: false, + jsonb: null, + createdAt: users[0]!.createdAt, + }, + ]); }); test('delete with returning partial', async () => { await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ - id: usersTable.id, - name: usersTable.name, - }); + const users = await db + .delete(usersTable) + .where(eq(usersTable.name, 'John')) + .returning({ + id: usersTable.id, + name: usersTable.name, + }); expect(users).toEqual([{ id: 1, name: 'John' }]); }); @@ -260,30 +390,48 @@ test('delete with returning partial', async () => { test('insert + select', async () => { await db.insert(usersTable).values({ name: 'John' }); const result = await db.select().from(usersTable); - expect(result).toEqual([{ - bestTexts: [], - id: 1, - name: 'John', - verified: false, - jsonb: null, - createdAt: result[0]!.createdAt, - }]); + expect(result).toEqual([ + { + bestTexts: [], + id: 1, + name: 'John', + verified: false, + jsonb: null, + createdAt: result[0]!.createdAt, + }, + ]); await db.insert(usersTable).values({ name: 'Jane' }); const result2 = await db.select().from(usersTable); expect(result2).toEqual([ - { bestTexts: [], id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, - { bestTexts: [], id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, + { + bestTexts: [], + id: 1, + name: 'John', + verified: false, + jsonb: null, + createdAt: result2[0]!.createdAt, + }, + { + bestTexts: [], + id: 2, + name: 'Jane', + verified: false, + jsonb: null, + createdAt: result2[1]!.createdAt, + }, ]); }); test('json insert', async () => { await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - }).from(usersTable); + const result = await db + .select({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + }) + .from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); }); @@ -292,29 +440,35 @@ test('insert with overridden default values', async () => { await db.insert(usersTable).values({ name: 'John', verified: true }); const result = await db.select().from(usersTable); - expect(result).toEqual([{ - bestTexts: [], - id: 1, - name: 'John', - verified: true, - jsonb: null, - createdAt: result[0]!.createdAt, - }]); + expect(result).toEqual([ + { + bestTexts: [], + id: 1, + name: 'John', + verified: true, + jsonb: null, + createdAt: result[0]!.createdAt, + }, + ]); }); test('insert many', async () => { - await db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]); - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - verified: usersTable.verified, - }).from(usersTable); + await db + .insert(usersTable) + .values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + const result = await db + .select({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + verified: usersTable.verified, + }) + .from(usersTable); expect(result).toEqual([ { id: 1, name: 'John', jsonb: null, verified: false }, @@ -325,12 +479,14 @@ test('insert many', async () => { }); test('insert many with returning', async () => { - const result = await db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]) + const result = await db + .insert(usersTable) + .values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]) .returning({ id: usersTable.id, name: usersTable.name, @@ -347,45 +503,73 @@ test('insert many with returning', async () => { }); test('select with group by as field', async () => { - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + await db + .insert(usersTable) + .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - const result = await db.select({ name: usersTable.name }).from(usersTable) + const result = await db + .select({ name: usersTable.name }) + .from(usersTable) .groupBy(usersTable.name); expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); }); test('select with group by as sql', async () => { - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + await db + .insert(usersTable) + .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - const result = await db.select({ name: usersTable.name }).from(usersTable) + const result = await db + .select({ name: usersTable.name }) + .from(usersTable) .groupBy(sql`${usersTable.name}`); expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); }); test('select with group by as sql + column', async () => { - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + await db + .insert(usersTable) + .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - const result = await db.select({ name: usersTable.name }).from(usersTable) + const result = await db + .select({ name: usersTable.name }) + .from(usersTable) .groupBy(sql`${usersTable.name}`, usersTable.id); - expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); + expect(result).toEqual([ + { name: 'Jane' }, + { name: 'Jane' }, + { name: 'John' }, + ]); }); test('select with group by as column + sql', async () => { - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + await db + .insert(usersTable) + .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - const result = await db.select({ name: usersTable.name }).from(usersTable) + const result = await db + .select({ name: usersTable.name }) + .from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`); - expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); + expect(result).toEqual([ + { name: 'Jane' }, + { name: 'Jane' }, + { name: 'John' }, + ]); }); test('select with group by complex query', async () => { - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + await db + .insert(usersTable) + .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - const result = await db.select({ name: usersTable.name }).from(usersTable) + const result = await db + .select({ name: usersTable.name }) + .from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`) .orderBy(asc(usersTable.name)) .limit(1); @@ -394,7 +578,9 @@ test('select with group by complex query', async () => { }); test('build query', async () => { - const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) + const query = db + .select({ id: usersTable.id, name: usersTable.name }) + .from(usersTable) .groupBy(usersTable.id, usersTable.name) .toSQL(); @@ -407,14 +593,19 @@ test('build query', async () => { test('insert sql', async () => { await db.insert(usersTable).values({ name: sql`${'John'}` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + const result = await db + .select({ id: usersTable.id, name: usersTable.name }) + .from(usersTable); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('partial join with alias', async () => { const customerAlias = alias(usersTable, 'customer'); - await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + await db.insert(usersTable).values([ + { id: 10, name: 'Ivan' }, + { id: 11, name: 'Hans' }, + ]); const result = await db .select({ user: { @@ -425,44 +616,53 @@ test('partial join with alias', async () => { id: customerAlias.id, name: customerAlias.name, }, - }).from(usersTable) + }) + .from(usersTable) .leftJoin(customerAlias, eq(customerAlias.id, 11)) .where(eq(usersTable.id, 10)); - expect(result).toEqual([{ - user: { id: 10, name: 'Ivan' }, - customer: { id: 11, name: 'Hans' }, - }]); + expect(result).toEqual([ + { + user: { id: 10, name: 'Ivan' }, + customer: { id: 11, name: 'Hans' }, + }, + ]); }); test('full join with alias', async () => { const customerAlias = alias(usersTable, 'customer'); - await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + await db.insert(usersTable).values([ + { id: 10, name: 'Ivan' }, + { id: 11, name: 'Hans' }, + ]); const result = await db - .select().from(usersTable) + .select() + .from(usersTable) .leftJoin(customerAlias, eq(customerAlias.id, 11)) .where(eq(usersTable.id, 10)); - expect(result).toEqual([{ - users: { - id: 10, - bestTexts: [], - name: 'Ivan', - verified: false, - jsonb: null, - createdAt: result[0]!.users.createdAt, - }, - customer: { - bestTexts: [], - id: 11, - name: 'Hans', - verified: false, - jsonb: null, - createdAt: result[0]!.customer!.createdAt, + expect(result).toEqual([ + { + users: { + id: 10, + bestTexts: [], + name: 'Ivan', + verified: false, + jsonb: null, + createdAt: result[0]!.users.createdAt, + }, + customer: { + bestTexts: [], + id: 11, + name: 'Hans', + verified: false, + jsonb: null, + createdAt: result[0]!.customer!.createdAt, + }, }, - }]); + ]); }); test('select from alias', async () => { @@ -474,35 +674,44 @@ test('select from alias', async () => { }); await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); + await db.execute( + sql`create table ${users} (id serial primary key, name text not null)`, + ); const user = alias(users, 'user'); const customers = alias(users, 'customer'); - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + await db.insert(users).values([ + { id: 10, name: 'Ivan' }, + { id: 11, name: 'Hans' }, + ]); const result = await db .select() .from(user) .leftJoin(customers, eq(customers.id, 11)) .where(eq(user.id, 10)); - expect(result).toEqual([{ - user: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', + expect(result).toEqual([ + { + user: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, }, - }]); + ]); await db.execute(sql`drop table ${users}`); }); test('insert with spaces', async () => { await db.insert(usersTable).values({ name: sql`'Jo h n'` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + const result = await db + .select({ id: usersTable.id, name: usersTable.name }) + .from(usersTable); expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); }); @@ -522,20 +731,25 @@ test('prepared statement', async () => { }); test('prepared statement reuse', async () => { - const stmt = db.insert(usersTable).values({ - verified: true, - name: sql.placeholder('name'), - }).prepare('stmt2'); + const stmt = db + .insert(usersTable) + .values({ + verified: true, + name: sql.placeholder('name'), + }) + .prepare('stmt2'); for (let i = 0; i < 10; i++) { await stmt.execute({ name: `John ${i}` }); } - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - verified: usersTable.verified, - }).from(usersTable); + const result = await db + .select({ + id: usersTable.id, + name: usersTable.name, + verified: usersTable.verified, + }) + .from(usersTable); expect(result).toEqual([ { id: 1, name: 'John 0', verified: true }, @@ -590,10 +804,15 @@ test('migrator : migrate with custom schema', async () => { await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsSchema: customSchema }); + await migrate(db, { + migrationsFolder: './drizzle2/pg', + migrationsSchema: customSchema, + }); // test if the custom migrations table was created - const { rows } = await db.execute(sql`select * from ${sql.identifier(customSchema)}."__drizzle_migrations";`); + const { rows } = await db.execute( + sql`select * from ${sql.identifier(customSchema)}."__drizzle_migrations";`, + ); expect(rows).toBeTruthy(); expect(rows!.length).toBeGreaterThan(0); @@ -604,7 +823,9 @@ test('migrator : migrate with custom schema', async () => { await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); - await db.execute(sql`drop table ${sql.identifier(customSchema)}."__drizzle_migrations"`); + await db.execute( + sql`drop table ${sql.identifier(customSchema)}."__drizzle_migrations"`, + ); }); test('migrator : migrate with custom table', async () => { @@ -613,10 +834,15 @@ test('migrator : migrate with custom table', async () => { await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); + await migrate(db, { + migrationsFolder: './drizzle2/pg', + migrationsTable: customTable, + }); // test if the custom migrations table was created - const { rows } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); + const { rows } = await db.execute( + sql`select * from "drizzle".${sql.identifier(customTable)};`, + ); expect(rows).toBeTruthy(); expect(rows!.length).toBeGreaterThan(0); @@ -645,7 +871,11 @@ test('migrator : migrate with custom table and custom schema', async () => { // test if the custom migrations table was created const { rows } = await db.execute( - sql`select * from ${sql.identifier(customSchema)}.${sql.identifier(customTable)};`, + sql`select * from ${sql.identifier(customSchema)}.${ + sql.identifier( + customTable, + ) + };`, ); expect(rows).toBeTruthy(); expect(rows!.length).toBeGreaterThan(0); @@ -657,13 +887,27 @@ test('migrator : migrate with custom table and custom schema', async () => { await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); - await db.execute(sql`drop table ${sql.identifier(customSchema)}.${sql.identifier(customTable)}`); + await db.execute( + sql`drop table ${sql.identifier(customSchema)}.${ + sql.identifier( + customTable, + ) + }`, + ); }); test('insert via db.execute + select via db.execute', async () => { - await db.execute(sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`); + await db.execute( + sql`insert into ${usersTable} (${ + sql.identifier( + usersTable.name.name, + ) + }) values (${'John'})`, + ); - const result = await db.execute<{ id: number; name: string }>(sql`select id, name from "users"`); + const result = await db.execute<{ id: number; name: string }>( + sql`select id, name from "users"`, + ); expectTypeOf(result.rows).toEqualTypeOf<{ id: number; name: string }[]>(); expect(result.rows).toEqual([{ id: 1, name: 'John' }]); }); @@ -671,7 +915,9 @@ test('insert via db.execute + select via db.execute', async () => { test('insert via db.execute + returning', async () => { const inserted = await db.execute( sql`insert into ${usersTable} (${ - sql.identifier(usersTable.name.name) + sql.identifier( + usersTable.name.name, + ) }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, ); expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); @@ -679,13 +925,17 @@ test('insert via db.execute + returning', async () => { test('insert via db.execute w/ query builder', async () => { const inserted = await db.execute( - db.insert(usersTable).values({ name: 'John' }).returning({ id: usersTable.id, name: usersTable.name }), + db + .insert(usersTable) + .values({ name: 'John' }) + .returning({ id: usersTable.id, name: usersTable.name }), ); expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); }); test('build query insert with onConflict do update', async () => { - const query = db.insert(usersTable) + const query = db + .insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) .toSQL(); @@ -699,9 +949,13 @@ test('build query insert with onConflict do update', async () => { }); test('build query insert with onConflict do update / multiple columns', async () => { - const query = db.insert(usersTable) + const query = db + .insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoUpdate({ target: [usersTable.id, usersTable.name], set: { name: 'John1' } }) + .onConflictDoUpdate({ + target: [usersTable.id, usersTable.name], + set: { name: 'John1' }, + }) .toSQL(); expect(query).toEqual({ @@ -713,7 +967,8 @@ test('build query insert with onConflict do update / multiple columns', async () }); test('build query insert with onConflict do nothing', async () => { - const query = db.insert(usersTable) + const query = db + .insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoNothing() .toSQL(); @@ -727,7 +982,8 @@ test('build query insert with onConflict do nothing', async () => { }); test('build query insert with onConflict do nothing + target', async () => { - const query = db.insert(usersTable) + const query = db + .insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoNothing({ target: usersTable.id }) .toSQL(); @@ -741,46 +997,49 @@ test('build query insert with onConflict do nothing + target', async () => { }); test('insert with onConflict do update', async () => { - await db.insert(usersTable) - .values({ name: 'John' }); + await db.insert(usersTable).values({ name: 'John' }); - await db.insert(usersTable) + await db + .insert(usersTable) .values({ id: 1, name: 'John' }) .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }); - const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); + const res = await db + .select({ id: usersTable.id, name: usersTable.name }) + .from(usersTable) + .where(eq(usersTable.id, 1)); expect(res).toEqual([{ id: 1, name: 'John1' }]); }); test('insert with onConflict do nothing', async () => { - await db.insert(usersTable) - .values({ name: 'John' }); + await db.insert(usersTable).values({ name: 'John' }); - await db.insert(usersTable) + await db + .insert(usersTable) .values({ id: 1, name: 'John' }) .onConflictDoNothing(); - const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); + const res = await db + .select({ id: usersTable.id, name: usersTable.name }) + .from(usersTable) + .where(eq(usersTable.id, 1)); expect(res).toEqual([{ id: 1, name: 'John' }]); }); test('insert with onConflict do nothing + target', async () => { - await db.insert(usersTable) - .values({ name: 'John' }); + await db.insert(usersTable).values({ name: 'John' }); - await db.insert(usersTable) + await db + .insert(usersTable) .values({ id: 1, name: 'John' }) .onConflictDoNothing({ target: usersTable.id }); - const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); + const res = await db + .select({ id: usersTable.id, name: usersTable.name }) + .from(usersTable) + .where(eq(usersTable.id, 1)); expect(res).toEqual([{ id: 1, name: 'John' }]); }); @@ -799,17 +1058,33 @@ test('transaction', async () => { await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop table if exists ${products}`); - await db.execute(sql`create table users_transactions (id serial not null primary key, balance integer not null)`); + await db.execute( + sql`create table users_transactions (id serial not null primary key, balance integer not null)`, + ); await db.execute( sql`create table products_transactions (id serial not null primary key, price integer not null, stock integer not null)`, ); - const user = await db.insert(users).values({ balance: 100 }).returning().then((rows) => rows[0]!); - const product = await db.insert(products).values({ price: 10, stock: 10 }).returning().then((rows) => rows[0]!); + const user = await db + .insert(users) + .values({ balance: 100 }) + .returning() + .then((rows) => rows[0]!); + const product = await db + .insert(products) + .values({ price: 10, stock: 10 }) + .returning() + .then((rows) => rows[0]!); await db.transaction(async (tx) => { - await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); - await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); + await tx + .update(users) + .set({ balance: user.balance - product.price }) + .where(eq(users.id, user.id)); + await tx + .update(products) + .set({ stock: product.stock - 1 }) + .where(eq(products.id, product.id)); }); const result = await db.select().from(users); @@ -906,81 +1181,109 @@ test('nested transaction rollback', async () => { test('select from raw sql', async () => { const result = await db.execute(sql`select 1 as id, 'John' as name`); - expect(result.rows).toEqual([ - { id: 1, name: 'John' }, - ]); + expect(result.rows).toEqual([{ id: 1, name: 'John' }]); }); test('select from raw sql with mapped values', async () => { - const result = await db.select({ - id: sql`id`, - name: sql`name`, - }).from(sql`(select 1 as id, 'John' as name) as users`); + const result = await db + .select({ + id: sql`id`, + name: sql`name`, + }) + .from(sql`(select 1 as id, 'John' as name) as users`); - expect(result).toEqual([ - { id: 1, name: 'John' }, - ]); + expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('insert with array values works', async () => { const bestTexts = ['text1', 'text2', 'text3']; - const [insertResult] = await db.insert(usersTable).values({ - name: 'John', - bestTexts, - }).returning(); + const [insertResult] = await db + .insert(usersTable) + .values({ + name: 'John', + bestTexts, + }) + .returning(); expect(insertResult?.bestTexts).toEqual(bestTexts); }); test('update with array values works', async () => { - const [newUser] = await db.insert(usersTable).values({ name: 'John' }).returning(); + const [newUser] = await db + .insert(usersTable) + .values({ name: 'John' }) + .returning(); const bestTexts = ['text4', 'text5', 'text6']; - const [insertResult] = await db.update(usersTable).set({ - bestTexts, - }).where(eq(usersTable.id, newUser!.id)).returning(); + const [insertResult] = await db + .update(usersTable) + .set({ + bestTexts, + }) + .where(eq(usersTable.id, newUser!.id)) + .returning(); expect(insertResult?.bestTexts).toEqual(bestTexts); }); test('insert with array values works', async () => { const bestTexts = ['text1', 'text2', 'text3']; - const [insertResult] = await db.insert(usersTable).values({ - name: 'John', - bestTexts, - }).returning(); + const [insertResult] = await db + .insert(usersTable) + .values({ + name: 'John', + bestTexts, + }) + .returning(); expect(insertResult?.bestTexts).toEqual(bestTexts); }); test('update with array values works', async () => { - const [newUser] = await db.insert(usersTable).values({ name: 'John' }).returning(); + const [newUser] = await db + .insert(usersTable) + .values({ name: 'John' }) + .returning(); const bestTexts = ['text4', 'text5', 'text6']; - const [insertResult] = await db.update(usersTable).set({ - bestTexts, - }).where(eq(usersTable.id, newUser!.id)).returning(); + const [insertResult] = await db + .update(usersTable) + .set({ + bestTexts, + }) + .where(eq(usersTable.id, newUser!.id)) + .returning(); expect(insertResult?.bestTexts).toEqual(bestTexts); }); test('insert with array values works', async () => { const bestTexts = ['text1', 'text2', 'text3']; - const [insertResult] = await db.insert(usersTable).values({ - name: 'John', - bestTexts, - }).returning(); + const [insertResult] = await db + .insert(usersTable) + .values({ + name: 'John', + bestTexts, + }) + .returning(); expect(insertResult?.bestTexts).toEqual(bestTexts); }); test('update with array values works', async () => { - const [newUser] = await db.insert(usersTable).values({ name: 'John' }).returning(); + const [newUser] = await db + .insert(usersTable) + .values({ name: 'John' }) + .returning(); const bestTexts = ['text4', 'text5', 'text6']; - const [insertResult] = await db.update(usersTable).set({ - bestTexts, - }).where(eq(usersTable.id, newUser!.id)).returning(); + const [insertResult] = await db + .update(usersTable) + .set({ + bestTexts, + }) + .where(eq(usersTable.id, newUser!.id)) + .returning(); expect(insertResult?.bestTexts).toEqual(bestTexts); }); @@ -993,7 +1296,10 @@ test('all date and time columns', async () => { datetime: timestamp('datetime').notNull(), // datetimeWTZ: timestamp('datetime_wtz', { withTimezone: true }).notNull(), datetimeString: timestamp('datetime_string', { mode: 'string' }).notNull(), - datetimeFullPrecision: timestamp('datetime_full_precision', { precision: 6, mode: 'string' }).notNull(), + datetimeFullPrecision: timestamp('datetime_full_precision', { + precision: 6, + mode: 'string', + }).notNull(), // datetimeWTZString: timestamp('datetime_wtz_string', { withTimezone: true, mode: 'string' }).notNull(), }); @@ -1029,29 +1335,35 @@ test('all date and time columns', async () => { const result = await db.select().from(table); Expect< - Equal<{ - id: number; - dateString: string; - time: string; - datetime: Date; - // datetimeWTZ: Date; - datetimeString: string; - datetimeFullPrecision: string; - // datetimeWTZString: string; - }[], typeof result> + Equal< + { + id: number; + dateString: string; + time: string; + datetime: Date; + // datetimeWTZ: Date; + datetimeString: string; + datetimeFullPrecision: string; + // datetimeWTZString: string; + }[], + typeof result + > >; Expect< - Equal<{ - dateString: string; - time: string; - datetime: Date; - // datetimeWTZ: Date; - datetimeString: string; - datetimeFullPrecision: string; - // datetimeWTZString: string; - id?: number | undefined; - }, typeof table.$inferInsert> + Equal< + { + dateString: string; + time: string; + datetime: Date; + // datetimeWTZ: Date; + datetimeString: string; + datetimeFullPrecision: string; + // datetimeWTZString: string; + id?: number | undefined; + }, + typeof table.$inferInsert + > >; expect(result).toEqual([ @@ -1073,9 +1385,19 @@ test('all date and time columns', async () => { test.skip('all date and time columns with timezone', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), - timestampAsDate: timestamp('timestamp_date', { withTimezone: true, precision: 3 }).notNull(), - timestampTimeZones: timestamp('timestamp_date_2', { withTimezone: true, precision: 3 }).notNull(), + timestamp: timestamp('timestamp_string', { + mode: 'string', + withTimezone: true, + precision: 6, + }).notNull(), + timestampAsDate: timestamp('timestamp_date', { + withTimezone: true, + precision: 3, + }).notNull(), + timestampTimeZones: timestamp('timestamp_date_2', { + withTimezone: true, + precision: 3, + }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); @@ -1098,8 +1420,16 @@ test.skip('all date and time columns with timezone', async () => { const timestampDateWTZ2 = new Date('2022-01-01 00:00:00.123 +0200'); await db.insert(table).values([ - { timestamp: timestampString, timestampAsDate: timestampDate, timestampTimeZones: timestampDateWTZ }, - { timestamp: timestampString2, timestampAsDate: timestampDate2, timestampTimeZones: timestampDateWTZ2 }, + { + timestamp: timestampString, + timestampAsDate: timestampDate, + timestampTimeZones: timestampDateWTZ, + }, + { + timestamp: timestampString2, + timestampAsDate: timestampDate2, + timestampTimeZones: timestampDateWTZ2, + }, ]); const result = await db.select().from(table); @@ -1132,18 +1462,21 @@ test.skip('all date and time columns with timezone', async () => { id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00', timestamp_date: timestampDate.toISOString().replace('T', ' ').replace('Z', '') + '+00', - timestamp_date_2: timestampDateWTZ.toISOString().replace('T', ' ').replace('Z', '') + '+00', + timestamp_date_2: timestampDateWTZ.toISOString().replace('T', ' ').replace('Z', '') + + '+00', }, { id: 2, timestamp_string: '2022-01-01 04:00:00.123456+00', timestamp_date: timestampDate2.toISOString().replace('T', ' ').replace('Z', '') + '+00', - timestamp_date_2: timestampDateWTZ2.toISOString().replace('T', ' ').replace('Z', '') + '+00', + timestamp_date_2: timestampDateWTZ2.toISOString().replace('T', ' ').replace('Z', '') + + '+00', }, ]); - expect(result[0]?.timestampTimeZones.getTime()) - .toEqual(new Date((result2.rows?.[0] as any).timestamp_date_2 as any).getTime()); + expect(result[0]?.timestampTimeZones.getTime()).toEqual( + new Date((result2.rows?.[0] as any).timestamp_date_2 as any).getTime(), + ); await db.execute(sql`drop table if exists ${table}`); }); @@ -1151,8 +1484,14 @@ test.skip('all date and time columns with timezone', async () => { test('all date and time columns without timezone', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), - timestampString: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), - timestampString2: timestamp('timestamp_string2', { precision: 3, mode: 'string' }).notNull(), + timestampString: timestamp('timestamp_string', { + mode: 'string', + precision: 6, + }).notNull(), + timestampString2: timestamp('timestamp_string2', { + precision: 3, + mode: 'string', + }).notNull(), timestampDate: timestamp('timestamp_date', { precision: 3 }).notNull(), }); @@ -1180,7 +1519,11 @@ test('all date and time columns without timezone', async () => { await db.insert(table).values([ { timestampString, timestampString2, timestampDate }, - { timestampString: timestampString_2, timestampString2: timestampString2_2, timestampDate: timestampDate2 }, + { + timestampString: timestampString_2, + timestampString2: timestampString2_2, + timestampDate: timestampDate2, + }, ]); const result = await db.select().from(table); @@ -1213,26 +1556,66 @@ test('all date and time columns without timezone', async () => { id: 1, timestamp_string: timestampString, timestamp_string2: '2022-01-02 00:00:00.123', - timestamp_date: timestampDate.toISOString().replace('T', ' ').replace('Z', ''), + timestamp_date: timestampDate + .toISOString() + .replace('T', ' ') + .replace('Z', ''), }, { id: 2, timestamp_string: timestampString_2, timestamp_string2: '2022-01-01 00:00:00.123', - timestamp_date: timestampDate2.toISOString().replace('T', ' ').replace('Z', ''), + timestamp_date: timestampDate2 + .toISOString() + .replace('T', ' ') + .replace('Z', ''), }, ]); - expect((result2.rows?.[0] as any).timestamp_string).toEqual('2022-01-01 00:00:00.123456'); - // need to add the 'Z', otherwise javascript assumes it's in local time - expect(new Date((result2.rows?.[0] as any).timestamp_date + 'Z' as any).getTime()).toEqual( - timestampDate.getTime(), + expect((result2.rows?.[0] as any).timestamp_string).toEqual( + '2022-01-01 00:00:00.123456', ); + // need to add the 'Z', otherwise javascript assumes it's in local time + expect( + new Date(((result2.rows?.[0] as any).timestamp_date + 'Z') as any).getTime(), + ).toEqual(timestampDate.getTime()); await db.execute(sql`drop table if exists ${table}`); }); +test('Typehints mix for RQB', async () => { + const uuid = 'd997d46d-5769-4c78-9a35-93acadbe6076'; + + const res = await db.query.user.findMany({ + where: eq(user.id, uuid), + with: { + todos: { + with: { + todo: true, + }, + }, + }, + }); + + expect(res).toStrictEqual([]); +}); + +test('Typehints mix for findFirst', async () => { + const uuid = 'd997d46d-5769-4c78-9a35-93acadbe6076'; + + await db.insert(user).values({ id: uuid, email: 'd' }); + + const res = await db.query.user.findFirst({ + where: eq(user.id, uuid), + }); + + expect(res).toStrictEqual({ id: 'd997d46d-5769-4c78-9a35-93acadbe6076', email: 'd' }); +}); + afterAll(async () => { await db.execute(sql`drop table if exists "users"`); + await db.execute(sql`drop table if exists "todo_user"`); + await db.execute(sql`drop table if exists "user"`); + await db.execute(sql`drop table if exists "todo"`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); }); diff --git a/integration-tests/tests/pg/pg-common.ts b/integration-tests/tests/pg/pg-common.ts index fb69c5877..c48a533f9 100644 --- a/integration-tests/tests/pg/pg-common.ts +++ b/integration-tests/tests/pg/pg-common.ts @@ -49,6 +49,7 @@ import { intersect, intersectAll, interval, + json, jsonb, macaddr, macaddr8, @@ -199,6 +200,12 @@ const users2MySchemaTable = mySchema.table('users2', { cityId: integer('city_id').references(() => citiesTable.id), }); +const jsonTestTable = pgTable('jsontest', { + id: serial('id').primaryKey(), + json: json('json').$type<{ string: string; number: number }>(), + jsonb: jsonb('jsonb').$type<{ string: string; number: number }>(), +}); + let pgContainer: Docker.Container; export async function createDockerDB(): Promise<{ connectionString: string; container: Docker.Container }> { @@ -246,7 +253,7 @@ export function tests() { create table users ( id serial primary key, name text not null, - verified boolean not null default false, + verified boolean not null default false, jsonb jsonb, created_at timestamptz not null default now() ) @@ -358,6 +365,16 @@ export function tests() { ) `, ); + + await db.execute( + sql` + create table jsontest ( + id serial primary key, + json json, + jsonb jsonb + ) + `, + ); }); async function setupSetOperationTest(db: PgDatabase) { @@ -1116,6 +1133,28 @@ export function tests() { expect(result).toEqual([{ id: 1, name: 'John' }]); }); + test('insert: placeholders on columns with encoder', async (ctx) => { + const { db } = ctx.pg; + + const statement = db.insert(usersTable).values({ + name: 'John', + jsonb: sql.placeholder('jsonb'), + }).prepare('encoder_statement'); + + await statement.execute({ jsonb: ['foo', 'bar'] }); + + const result = await db + .select({ + id: usersTable.id, + jsonb: usersTable.jsonb, + }) + .from(usersTable); + + expect(result).toEqual([ + { id: 1, jsonb: ['foo', 'bar'] }, + ]); + }); + test('prepared statement reuse', async (ctx) => { const { db } = ctx.pg; @@ -4481,5 +4520,145 @@ export function tests() { expect(users.length).toBeGreaterThan(0); }); + + test('proper json and jsonb handling', async (ctx) => { + const { db } = ctx.pg; + + const jsonTable = pgTable('json_table', { + json: json('json').$type<{ name: string; age: number }>(), + jsonb: jsonb('jsonb').$type<{ name: string; age: number }>(), + }); + + await db.execute(sql`drop table if exists ${jsonTable}`); + + await db.execute(sql`create table ${jsonTable} (json json, jsonb jsonb)`); + + await db.insert(jsonTable).values({ json: { name: 'Tom', age: 75 }, jsonb: { name: 'Pete', age: 23 } }); + + const result = await db.select().from(jsonTable); + + const justNames = await db.select({ + name1: sql`${jsonTable.json}->>'name'`.as('name1'), + name2: sql`${jsonTable.jsonb}->>'name'`.as('name2'), + }).from(jsonTable); + + expect(result).toStrictEqual([ + { + json: { name: 'Tom', age: 75 }, + jsonb: { name: 'Pete', age: 23 }, + }, + ]); + + expect(justNames).toStrictEqual([ + { + name1: 'Tom', + name2: 'Pete', + }, + ]); + }); + + test('set json/jsonb fields with objects and retrieve with the ->> operator', async (ctx) => { + const { db } = ctx.pg; + + const obj = { string: 'test', number: 123 }; + const { string: testString, number: testNumber } = obj; + + await db.insert(jsonTestTable).values({ + json: obj, + jsonb: obj, + }); + + const result = await db.select({ + jsonStringField: sql`${jsonTestTable.json}->>'string'`, + jsonNumberField: sql`${jsonTestTable.json}->>'number'`, + jsonbStringField: sql`${jsonTestTable.jsonb}->>'string'`, + jsonbNumberField: sql`${jsonTestTable.jsonb}->>'number'`, + }).from(jsonTestTable); + + expect(result).toStrictEqual([{ + jsonStringField: testString, + jsonNumberField: String(testNumber), + jsonbStringField: testString, + jsonbNumberField: String(testNumber), + }]); + }); + + test('set json/jsonb fields with strings and retrieve with the ->> operator', async (ctx) => { + const { db } = ctx.pg; + + const obj = { string: 'test', number: 123 }; + const { string: testString, number: testNumber } = obj; + + await db.insert(jsonTestTable).values({ + json: sql`${JSON.stringify(obj)}`, + jsonb: sql`${JSON.stringify(obj)}`, + }); + + const result = await db.select({ + jsonStringField: sql`${jsonTestTable.json}->>'string'`, + jsonNumberField: sql`${jsonTestTable.json}->>'number'`, + jsonbStringField: sql`${jsonTestTable.jsonb}->>'string'`, + jsonbNumberField: sql`${jsonTestTable.jsonb}->>'number'`, + }).from(jsonTestTable); + + expect(result).toStrictEqual([{ + jsonStringField: testString, + jsonNumberField: String(testNumber), + jsonbStringField: testString, + jsonbNumberField: String(testNumber), + }]); + }); + + test('set json/jsonb fields with objects and retrieve with the -> operator', async (ctx) => { + const { db } = ctx.pg; + + const obj = { string: 'test', number: 123 }; + const { string: testString, number: testNumber } = obj; + + await db.insert(jsonTestTable).values({ + json: obj, + jsonb: obj, + }); + + const result = await db.select({ + jsonStringField: sql`${jsonTestTable.json}->'string'`, + jsonNumberField: sql`${jsonTestTable.json}->'number'`, + jsonbStringField: sql`${jsonTestTable.jsonb}->'string'`, + jsonbNumberField: sql`${jsonTestTable.jsonb}->'number'`, + }).from(jsonTestTable); + + expect(result).toStrictEqual([{ + jsonStringField: testString, + jsonNumberField: testNumber, + jsonbStringField: testString, + jsonbNumberField: testNumber, + }]); + }); + + test('set json/jsonb fields with strings and retrieve with the -> operator', async (ctx) => { + const { db } = ctx.pg; + + const obj = { string: 'test', number: 123 }; + const { string: testString, number: testNumber } = obj; + + await db.insert(jsonTestTable).values({ + json: sql`${JSON.stringify(obj)}`, + jsonb: sql`${JSON.stringify(obj)}`, + }); + + const result = await db.select({ + jsonStringField: sql`${jsonTestTable.json}->'string'`, + jsonNumberField: sql`${jsonTestTable.json}->'number'`, + jsonbStringField: sql`${jsonTestTable.jsonb}->'string'`, + jsonbNumberField: sql`${jsonTestTable.jsonb}->'number'`, + }).from(jsonTestTable); + + expect(result).toStrictEqual([{ + jsonStringField: testString, + jsonNumberField: testNumber, + jsonbStringField: testString, + jsonbNumberField: testNumber, + }]); + }); }); } diff --git a/integration-tests/tests/prisma/.gitignore b/integration-tests/tests/prisma/.gitignore deleted file mode 100644 index 794cddf53..000000000 --- a/integration-tests/tests/prisma/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -*/client -*/drizzle diff --git a/integration-tests/tests/prisma/mysql/prisma.test.ts b/integration-tests/tests/prisma/mysql/prisma.test.ts deleted file mode 100644 index ee5511a25..000000000 --- a/integration-tests/tests/prisma/mysql/prisma.test.ts +++ /dev/null @@ -1,30 +0,0 @@ -import 'dotenv/config'; -import 'zx/globals'; - -import type { PrismaMySqlDatabase } from 'drizzle-orm/prisma/mysql'; -import { drizzle } from 'drizzle-orm/prisma/mysql'; -import { beforeAll, expect, expectTypeOf, test } from 'vitest'; - -import { PrismaClient } from './client'; -import { User } from './drizzle/schema.ts'; - -const ENABLE_LOGGING = false; - -let db: PrismaMySqlDatabase; - -beforeAll(async () => { - await $`prisma generate --schema tests/prisma/mysql/schema.prisma`.quiet(); - await $`prisma db push --force-reset --schema tests/prisma/mysql/schema.prisma`.quiet(); - const prisma = new PrismaClient().$extends(drizzle({ logger: ENABLE_LOGGING })); - db = prisma.$drizzle; -}); - -test('extension works', async () => { - const insert = await db.insert(User).values({ email: 'test@test.com' }); - expectTypeOf(insert).toEqualTypeOf<[]>(); - expect(insert).toEqual([]); - - const result = await db.select().from(User); - expectTypeOf(result).toEqualTypeOf(); - expect(result).toEqual([{ id: 1, email: 'test@test.com', name: null }]); -}); diff --git a/integration-tests/tests/prisma/mysql/schema.prisma b/integration-tests/tests/prisma/mysql/schema.prisma deleted file mode 100644 index 5bb496dcb..000000000 --- a/integration-tests/tests/prisma/mysql/schema.prisma +++ /dev/null @@ -1,20 +0,0 @@ -generator client { - provider = "prisma-client-js" - output = "./client" -} - -generator drizzle { - provider = "drizzle-prisma-generator" - output = "./drizzle" -} - -datasource db { - provider = "mysql" - url = env("MYSQL_CONNECTION_STRING") -} - -model User { - id Int @id @default(autoincrement()) - email String @unique - name String? -} diff --git a/integration-tests/tests/prisma/pg/prisma.test.ts b/integration-tests/tests/prisma/pg/prisma.test.ts deleted file mode 100644 index 16c5ce106..000000000 --- a/integration-tests/tests/prisma/pg/prisma.test.ts +++ /dev/null @@ -1,29 +0,0 @@ -import 'dotenv/config'; -import 'zx/globals'; - -import { drizzle } from 'drizzle-orm/prisma/pg'; -import type { PrismaPgDatabase } from 'drizzle-orm/prisma/pg'; -import { beforeAll, expect, expectTypeOf, test } from 'vitest'; - -import { PrismaClient } from './client'; -import { User } from './drizzle/schema.ts'; - -const ENABLE_LOGGING = false; - -let db: PrismaPgDatabase; - -beforeAll(async () => { - await $`prisma db push --force-reset --schema tests/prisma/pg/schema.prisma`.quiet(); - const prisma = new PrismaClient().$extends(drizzle({ logger: ENABLE_LOGGING })); - db = prisma.$drizzle; -}); - -test('extension works', async () => { - const insert = await db.insert(User).values({ email: 'test@test.com' }); - expectTypeOf(insert).toEqualTypeOf<[]>(); - expect(insert).toEqual([]); - - const result = await db.select().from(User); - expectTypeOf(result).toEqualTypeOf(); - expect(result).toEqual([{ id: 1, email: 'test@test.com', name: null }]); -}); diff --git a/integration-tests/tests/prisma/pg/schema.prisma b/integration-tests/tests/prisma/pg/schema.prisma deleted file mode 100644 index a5345d047..000000000 --- a/integration-tests/tests/prisma/pg/schema.prisma +++ /dev/null @@ -1,20 +0,0 @@ -generator client { - provider = "prisma-client-js" - output = "./client" -} - -generator drizzle { - provider = "drizzle-prisma-generator" - output = "./drizzle" -} - -datasource db { - provider = "postgresql" - url = env("PG_CONNECTION_STRING") -} - -model User { - id Int @id @default(autoincrement()) - email String @unique - name String? -} diff --git a/integration-tests/tests/prisma/sqlite/.gitignore b/integration-tests/tests/prisma/sqlite/.gitignore deleted file mode 100644 index 2fa69c243..000000000 --- a/integration-tests/tests/prisma/sqlite/.gitignore +++ /dev/null @@ -1 +0,0 @@ -db.sqlite diff --git a/integration-tests/tests/prisma/sqlite/prisma.test.ts b/integration-tests/tests/prisma/sqlite/prisma.test.ts deleted file mode 100644 index 4e8979cb8..000000000 --- a/integration-tests/tests/prisma/sqlite/prisma.test.ts +++ /dev/null @@ -1,41 +0,0 @@ -import 'dotenv/config'; -import 'zx/globals'; - -import { drizzle } from 'drizzle-orm/prisma/sqlite'; -import type { PrismaSQLiteDatabase } from 'drizzle-orm/prisma/sqlite'; -import { beforeAll, expect, expectTypeOf, test } from 'vitest'; - -import { PrismaClient } from './client'; -import { User } from './drizzle/schema.ts'; - -const ENABLE_LOGGING = false; - -let db: PrismaSQLiteDatabase; - -beforeAll(async () => { - await $`prisma db push --force-reset --schema tests/prisma/sqlite/schema.prisma`.quiet(); - const prisma = new PrismaClient().$extends(drizzle({ logger: ENABLE_LOGGING })); - db = prisma.$drizzle; -}); - -test('extension works', async () => { - const insert = await db.insert(User).values({ email: 'test@test.com' }); - expectTypeOf(insert).toEqualTypeOf<[]>(); - expect(insert).toEqual([]); - - const result = await db.select().from(User); - expectTypeOf(result).toEqualTypeOf(); - expect(result).toEqual([{ id: 1, email: 'test@test.com', name: null }]); - - const all = await db.select().from(User).all(); - expectTypeOf(all).toEqualTypeOf(); - expect(all).toEqual([{ id: 1, email: 'test@test.com', name: null }]); - - const get = await db.select().from(User).get(); - expectTypeOf(get).toEqualTypeOf(); - expect(get).toEqual({ id: 1, email: 'test@test.com', name: null }); - - const run = await db.insert(User).values({ email: 'test2@test.com' }).run(); - expectTypeOf(run).toEqualTypeOf<[]>(); - expect(run).toEqual([]); -}); diff --git a/integration-tests/tests/prisma/sqlite/schema.prisma b/integration-tests/tests/prisma/sqlite/schema.prisma deleted file mode 100644 index 6dbf2643e..000000000 --- a/integration-tests/tests/prisma/sqlite/schema.prisma +++ /dev/null @@ -1,20 +0,0 @@ -generator client { - provider = "prisma-client-js" - output = "./client" -} - -generator drizzle { - provider = "drizzle-prisma-generator" - output = "./drizzle" -} - -datasource db { - provider = "sqlite" - url = "file:./db.sqlite" -} - -model User { - id Int @id @default(autoincrement()) - email String @unique - name String? -} diff --git a/integration-tests/tests/relational/singlestore.schema.ts b/integration-tests/tests/relational/singlestore.schema.ts new file mode 100644 index 000000000..ca3386ba0 --- /dev/null +++ b/integration-tests/tests/relational/singlestore.schema.ts @@ -0,0 +1,106 @@ +import { bigint, boolean, primaryKey, serial, singlestoreTable, text, timestamp } from 'drizzle-orm/singlestore-core'; + +import { relations } from 'drizzle-orm'; + +export const usersTable = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + invitedBy: bigint('invited_by', { mode: 'number' }), +}); +export const usersConfig = relations(usersTable, ({ one, many }) => ({ + invitee: one(usersTable, { + fields: [usersTable.invitedBy], + references: [usersTable.id], + }), + usersToGroups: many(usersToGroupsTable), + posts: many(postsTable), + comments: many(commentsTable), +})); + +export const groupsTable = singlestoreTable('groups', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + description: text('description'), +}); +export const groupsConfig = relations(groupsTable, ({ many }) => ({ + usersToGroups: many(usersToGroupsTable), +})); + +export const usersToGroupsTable = singlestoreTable( + 'users_to_groups', + { + id: serial('id').primaryKey(), + userId: bigint('user_id', { mode: 'number' }).notNull(), + groupId: bigint('group_id', { mode: 'number' }).notNull(), + }, + (t) => ({ + pk: primaryKey(t.userId, t.groupId), + }), +); +export const usersToGroupsConfig = relations(usersToGroupsTable, ({ one }) => ({ + group: one(groupsTable, { + fields: [usersToGroupsTable.groupId], + references: [groupsTable.id], + }), + user: one(usersTable, { + fields: [usersToGroupsTable.userId], + references: [usersTable.id], + }), +})); + +export const postsTable = singlestoreTable('posts', { + id: serial('id').primaryKey(), + content: text('content').notNull(), + ownerId: bigint('owner_id', { mode: 'number' }), + createdAt: timestamp('created_at') + .notNull() + .defaultNow(), +}); +export const postsConfig = relations(postsTable, ({ one, many }) => ({ + author: one(usersTable, { + fields: [postsTable.ownerId], + references: [usersTable.id], + }), + comments: many(commentsTable), +})); + +export const commentsTable = singlestoreTable('comments', { + id: serial('id').primaryKey(), + content: text('content').notNull(), + creator: bigint('creator', { mode: 'number' }), + postId: bigint('post_id', { mode: 'number' }), + createdAt: timestamp('created_at') + .notNull() + .defaultNow(), +}); +export const commentsConfig = relations(commentsTable, ({ one, many }) => ({ + post: one(postsTable, { + fields: [commentsTable.postId], + references: [postsTable.id], + }), + author: one(usersTable, { + fields: [commentsTable.creator], + references: [usersTable.id], + }), + likes: many(commentLikesTable), +})); + +export const commentLikesTable = singlestoreTable('comment_likes', { + id: serial('id').primaryKey(), + creator: bigint('creator', { mode: 'number' }), + commentId: bigint('comment_id', { mode: 'number' }), + createdAt: timestamp('created_at') + .notNull() + .defaultNow(), +}); +export const commentLikesConfig = relations(commentLikesTable, ({ one }) => ({ + comment: one(commentsTable, { + fields: [commentLikesTable.commentId], + references: [commentsTable.id], + }), + author: one(usersTable, { + fields: [commentLikesTable.creator], + references: [usersTable.id], + }), +})); diff --git a/integration-tests/tests/relational/singlestore.test.ts b/integration-tests/tests/relational/singlestore.test.ts new file mode 100644 index 000000000..50aa2e8f4 --- /dev/null +++ b/integration-tests/tests/relational/singlestore.test.ts @@ -0,0 +1,6402 @@ +import retry from 'async-retry'; +import Docker from 'dockerode'; +import 'dotenv/config'; +import { desc, DrizzleError, eq, gt, gte, or, placeholder, sql, TransactionRollbackError } from 'drizzle-orm'; +import { drizzle, type SingleStoreDriverDatabase } from 'drizzle-orm/singlestore'; +import getPort from 'get-port'; +import * as mysql from 'mysql2/promise'; +import { v4 as uuid } from 'uuid'; +import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; +import * as schema from './singlestore.schema.ts'; + +const { usersTable, postsTable, commentsTable, usersToGroupsTable, groupsTable } = schema; + +const ENABLE_LOGGING = false; + +/* + Test cases: + - querying nested relation without PK with additional fields +*/ + +declare module 'vitest' { + export interface TestContext { + docker: Docker; + singlestoreContainer: Docker.Container; + singlestoreDb: SingleStoreDriverDatabase; + singlestoreClient: mysql.Connection; + } +} + +let globalDocker: Docker; +let singlestoreContainer: Docker.Container; +let db: SingleStoreDriverDatabase; +let client: mysql.Connection; + +async function createDockerDB(): Promise { + const docker = new Docker(); + const port = await getPort({ port: 3306 }); + const image = 'ghcr.io/singlestore-labs/singlestoredb-dev:latest'; + + const pullStream = await docker.pull(image); + await new Promise((resolve, reject) => + docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) + ); + + singlestoreContainer = await docker.createContainer({ + Image: image, + Env: ['ROOT_PASSWORD=singlestore'], + name: `drizzle-integration-tests-${uuid()}`, + HostConfig: { + AutoRemove: true, + PortBindings: { + '3306/tcp': [{ HostPort: `${port}` }], + }, + }, + }); + + await singlestoreContainer.start(); + await new Promise((resolve) => setTimeout(resolve, 4000)); + + return `singlestore://root:singlestore@localhost:${port}/`; +} + +beforeAll(async () => { + const connectionString = process.env['SINGLESTORE_CONNECTION_STRING'] ?? (await createDockerDB()); + client = await retry(async () => { + client = await mysql.createConnection(connectionString); + await client.connect(); + return client; + }, { + retries: 20, + factor: 1, + minTimeout: 250, + maxTimeout: 250, + randomize: false, + onRetry() { + client?.end(); + }, + }); + + await client.query(`CREATE DATABASE IF NOT EXISTS drizzle;`); + await client.changeUser({ database: 'drizzle' }); + db = drizzle(client, { schema, logger: ENABLE_LOGGING }); +}); + +afterAll(async () => { + await client?.end().catch(console.error); + await singlestoreContainer?.stop().catch(console.error); +}); + +beforeEach(async (ctx) => { + ctx.singlestoreDb = db; + ctx.singlestoreClient = client; + ctx.docker = globalDocker; + ctx.singlestoreContainer = singlestoreContainer; + + await ctx.singlestoreDb.execute(sql`drop table if exists \`users\``); + await ctx.singlestoreDb.execute(sql`drop table if exists \`groups\``); + await ctx.singlestoreDb.execute(sql`drop table if exists \`users_to_groups\``); + await ctx.singlestoreDb.execute(sql`drop table if exists \`posts\``); + await ctx.singlestoreDb.execute(sql`drop table if exists \`comments\``); + await ctx.singlestoreDb.execute(sql`drop table if exists \`comment_likes\``); + + await ctx.singlestoreDb.execute( + sql` + CREATE TABLE \`users\` ( + \`id\` serial PRIMARY KEY NOT NULL, + \`name\` text NOT NULL, + \`verified\` boolean DEFAULT false NOT NULL, + \`invited_by\` bigint + ); + `, + ); + await ctx.singlestoreDb.execute( + sql` + CREATE TABLE \`groups\` ( + \`id\` serial PRIMARY KEY NOT NULL, + \`name\` text NOT NULL, + \`description\` text + ); + `, + ); + await ctx.singlestoreDb.execute( + sql` + CREATE TABLE \`users_to_groups\` ( + \`id\` serial PRIMARY KEY NOT NULL, + \`user_id\` bigint, + \`group_id\` bigint + ); + `, + ); + await ctx.singlestoreDb.execute( + sql` + CREATE TABLE \`posts\` ( + \`id\` serial PRIMARY KEY NOT NULL, + \`content\` text NOT NULL, + \`owner_id\` bigint, + \`created_at\` timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL + ); + `, + ); + await ctx.singlestoreDb.execute( + sql` + CREATE TABLE \`comments\` ( + \`id\` serial PRIMARY KEY NOT NULL, + \`content\` text NOT NULL, + \`creator\` bigint, + \`post_id\` bigint, + \`created_at\` timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL + ); + `, + ); + await ctx.singlestoreDb.execute( + sql` + CREATE TABLE \`comment_likes\` ( + \`id\` serial PRIMARY KEY NOT NULL, + \`creator\` bigint, + \`comment_id\` bigint, + \`created_at\` timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL + ); + `, + ); +}); + +/* + [Find Many] One relation users+posts +*/ + +test('[Find Many] Get users with posts', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + with: { + posts: true, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(usersWithPosts.length).eq(3); + expect(usersWithPosts[0]?.posts.length).eq(1); + expect(usersWithPosts[1]?.posts.length).eq(1); + expect(usersWithPosts[2]?.posts.length).eq(1); + + expect(usersWithPosts[0]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], + }); + expect(usersWithPosts[1]).toEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], + }); + expect(usersWithPosts[2]).toEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], + }); +}); + +test.skip('[Find Many] Get users with posts + limit posts', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + with: { + posts: { + limit: 1, + }, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); + usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + usersWithPosts[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(usersWithPosts.length).eq(3); + expect(usersWithPosts[0]?.posts.length).eq(1); + expect(usersWithPosts[1]?.posts.length).eq(1); + expect(usersWithPosts[2]?.posts.length).eq(1); + + expect(usersWithPosts[0]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], + }); + expect(usersWithPosts[1]).toEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], + }); + expect(usersWithPosts[2]).toEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], + }); +}); + +test.skip('[Find Many] Get users with posts + limit posts and users', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + limit: 2, + with: { + posts: { + limit: 1, + }, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); + usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(usersWithPosts.length).eq(2); + expect(usersWithPosts[0]?.posts.length).eq(1); + expect(usersWithPosts[1]?.posts.length).eq(1); + + expect(usersWithPosts[0]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], + }); + expect(usersWithPosts[1]).toEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], + }); +}); + +test('[Find Many] Get users with posts + custom fields', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + with: { + posts: true, + }, + extras: ({ name }) => ({ + lowerName: sql`lower(${name})`.as('name_lower'), + }), + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + lowerName: string; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); + usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + usersWithPosts[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(usersWithPosts.length).toEqual(3); + expect(usersWithPosts[0]?.posts.length).toEqual(3); + expect(usersWithPosts[1]?.posts.length).toEqual(2); + expect(usersWithPosts[2]?.posts.length).toEqual(2); + + expect(usersWithPosts[0]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + lowerName: 'dan', + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }, { + id: 2, + ownerId: 1, + content: 'Post1.2', + createdAt: usersWithPosts[0]?.posts[1]?.createdAt, + }, { id: 3, ownerId: 1, content: 'Post1.3', createdAt: usersWithPosts[0]?.posts[2]?.createdAt }], + }); + expect(usersWithPosts[1]).toEqual({ + id: 2, + name: 'Andrew', + lowerName: 'andrew', + verified: false, + invitedBy: null, + posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }, { + id: 5, + ownerId: 2, + content: 'Post2.1', + createdAt: usersWithPosts[1]?.posts[1]?.createdAt, + }], + }); + expect(usersWithPosts[2]).toEqual({ + id: 3, + name: 'Alex', + lowerName: 'alex', + verified: false, + invitedBy: null, + posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }, { + id: 7, + ownerId: 3, + content: 'Post3.1', + createdAt: usersWithPosts[2]?.posts[1]?.createdAt, + }], + }); +}); + +test.skip('[Find Many] Get users with posts + custom fields + limits', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + limit: 1, + with: { + posts: { + limit: 1, + }, + }, + extras: (usersTable, { sql }) => ({ + lowerName: sql`lower(${usersTable.name})`.as('name_lower'), + }), + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + lowerName: string; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts.length).toEqual(1); + expect(usersWithPosts[0]?.posts.length).toEqual(1); + + expect(usersWithPosts[0]).toEqual({ + id: 1, + name: 'Dan', + lowerName: 'dan', + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], + }); +}); + +test.skip('[Find Many] Get users with posts + orderBy', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: '1' }, + { ownerId: 1, content: '2' }, + { ownerId: 1, content: '3' }, + { ownerId: 2, content: '4' }, + { ownerId: 2, content: '5' }, + { ownerId: 3, content: '6' }, + { ownerId: 3, content: '7' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + with: { + posts: { + orderBy: (postsTable, { desc }) => [desc(postsTable.content)], + }, + }, + orderBy: (usersTable, { desc }) => [desc(usersTable.id)], + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts.length).eq(3); + expect(usersWithPosts[0]?.posts.length).eq(2); + expect(usersWithPosts[1]?.posts.length).eq(2); + expect(usersWithPosts[2]?.posts.length).eq(3); + + expect(usersWithPosts[2]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ id: 3, ownerId: 1, content: '3', createdAt: usersWithPosts[2]?.posts[2]?.createdAt }, { + id: 2, + ownerId: 1, + content: '2', + createdAt: usersWithPosts[2]?.posts[1]?.createdAt, + }, { id: 1, ownerId: 1, content: '1', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], + }); + expect(usersWithPosts[1]).toEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + posts: [{ + id: 5, + ownerId: 2, + content: '5', + createdAt: usersWithPosts[1]?.posts[1]?.createdAt, + }, { id: 4, ownerId: 2, content: '4', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], + }); + expect(usersWithPosts[0]).toEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + posts: [{ + id: 7, + ownerId: 3, + content: '7', + createdAt: usersWithPosts[0]?.posts[1]?.createdAt, + }, { id: 6, ownerId: 3, content: '6', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], + }); +}); + +test('[Find Many] Get users with posts + where', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + where: (({ id }, { eq }) => eq(id, 1)), + with: { + posts: { + where: (({ id }, { eq }) => eq(id, 1)), + }, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts.length).eq(1); + expect(usersWithPosts[0]?.posts.length).eq(1); + + expect(usersWithPosts[0]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], + }); +}); + +test('[Find Many] Get users with posts + where + partial', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + columns: { + id: true, + name: true, + }, + with: { + posts: { + columns: { + id: true, + content: true, + }, + where: (({ id }, { eq }) => eq(id, 1)), + }, + }, + where: (({ id }, { eq }) => eq(id, 1)), + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + posts: { + id: number; + content: string; + }[]; + }[]>(); + + expect(usersWithPosts.length).eq(1); + expect(usersWithPosts[0]?.posts.length).eq(1); + + expect(usersWithPosts[0]).toEqual({ + id: 1, + name: 'Dan', + posts: [{ id: 1, content: 'Post1' }], + }); +}); + +test('[Find Many] Get users with posts + where + partial. Did not select posts id, but used it in where', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + columns: { + id: true, + name: true, + }, + with: { + posts: { + columns: { + id: true, + content: true, + }, + where: (({ id }, { eq }) => eq(id, 1)), + }, + }, + where: (({ id }, { eq }) => eq(id, 1)), + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + posts: { + id: number; + content: string; + }[]; + }[]>(); + + expect(usersWithPosts.length).eq(1); + expect(usersWithPosts[0]?.posts.length).eq(1); + + expect(usersWithPosts[0]).toEqual({ + id: 1, + name: 'Dan', + posts: [{ id: 1, content: 'Post1' }], + }); +}); + +test('[Find Many] Get users with posts + where + partial(true + false)', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + columns: { + id: true, + name: false, + }, + with: { + posts: { + columns: { + id: true, + content: false, + }, + where: (({ id }, { eq }) => eq(id, 1)), + }, + }, + where: (({ id }, { eq }) => eq(id, 1)), + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + posts: { + id: number; + }[]; + }[]>(); + + expect(usersWithPosts.length).eq(1); + expect(usersWithPosts[0]?.posts.length).eq(1); + + expect(usersWithPosts[0]).toEqual({ + id: 1, + posts: [{ id: 1 }], + }); +}); + +test('[Find Many] Get users with posts + where + partial(false)', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + columns: { + name: false, + }, + with: { + posts: { + columns: { + content: false, + }, + where: (({ id }, { eq }) => eq(id, 1)), + }, + }, + where: (({ id }, { eq }) => eq(id, 1)), + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts.length).eq(1); + expect(usersWithPosts[0]?.posts.length).eq(1); + + expect(usersWithPosts[0]).toEqual({ + id: 1, + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], + }); +}); + +test('[Find Many] Get users with posts in transaction', async (t) => { + const { singlestoreDb: db } = t; + + let usersWithPosts: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[] = []; + + await db.transaction(async (tx) => { + await tx.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await tx.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + usersWithPosts = await tx.query.usersTable.findMany({ + where: (({ id }, { eq }) => eq(id, 1)), + with: { + posts: { + where: (({ id }, { eq }) => eq(id, 1)), + }, + }, + }); + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts.length).eq(1); + expect(usersWithPosts[0]?.posts.length).eq(1); + + expect(usersWithPosts[0]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], + }); +}); + +test('[Find Many] Get users with posts in rollbacked transaction', async (t) => { + const { singlestoreDb: db } = t; + + let usersWithPosts: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[] = []; + + await expect(db.transaction(async (tx) => { + await tx.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await tx.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + tx.rollback(); + + usersWithPosts = await tx.query.usersTable.findMany({ + where: (({ id }, { eq }) => eq(id, 1)), + with: { + posts: { + where: (({ id }, { eq }) => eq(id, 1)), + }, + }, + }); + })).rejects.toThrowError(new TransactionRollbackError()); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts.length).eq(0); +}); + +// select only custom +test('[Find Many] Get only custom fields', async () => { + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { id: 1, ownerId: 1, content: 'Post1' }, + { id: 2, ownerId: 1, content: 'Post1.2' }, + { id: 3, ownerId: 1, content: 'Post1.3' }, + { id: 4, ownerId: 2, content: 'Post2' }, + { id: 5, ownerId: 2, content: 'Post2.1' }, + { id: 6, ownerId: 3, content: 'Post3' }, + { id: 7, ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + columns: {}, + with: { + posts: { + columns: {}, + extras: ({ content }) => ({ + lowerName: sql`lower(${content})`.as('content_lower'), + }), + }, + }, + extras: ({ name }) => ({ + lowerName: sql`lower(${name})`.as('name_lower'), + }), + }); + + // Type Assertion + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + lowerName: string; + posts: { + lowerName: string; + }[]; + }[]>(); + + // General Assertions + expect(usersWithPosts).toHaveLength(3); + + // Helper function to find user by lowerName + const findUser = (lowerName: string) => usersWithPosts.find((user) => user.lowerName === lowerName); + + // Assertions for each user + const dan = findUser('dan'); + const andrew = findUser('andrew'); + const alex = findUser('alex'); + + expect(dan).toBeDefined(); + expect(andrew).toBeDefined(); + expect(alex).toBeDefined(); + + // Verify the number of posts for each user + expect(dan?.posts).toHaveLength(3); + expect(andrew?.posts).toHaveLength(2); + expect(alex?.posts).toHaveLength(2); + + // Define expected posts for each user + const expectedDanPosts = ['post1', 'post1.2', 'post1.3']; + const expectedAndrewPosts = ['post2', 'post2.1']; + const expectedAlexPosts = ['post3', 'post3.1']; + + // Helper function to extract lowerNames from posts + const getPostLowerNames = (posts: { lowerName: string }[]) => posts.map((post) => post.lowerName); + + // Assertions for Dan's posts + expect(getPostLowerNames(dan!.posts)).toEqual(expect.arrayContaining(expectedDanPosts)); + expect(getPostLowerNames(dan!.posts)).toHaveLength(expectedDanPosts.length); + + // Assertions for Andrew's posts + expect(getPostLowerNames(andrew!.posts)).toEqual(expect.arrayContaining(expectedAndrewPosts)); + expect(getPostLowerNames(andrew!.posts)).toHaveLength(expectedAndrewPosts.length); + + // Assertions for Alex's posts + expect(getPostLowerNames(alex!.posts)).toEqual(expect.arrayContaining(expectedAlexPosts)); + expect(getPostLowerNames(alex!.posts)).toHaveLength(expectedAlexPosts.length); +}); + +// select only custom with where clause (Order Agnostic) +test('[Find Many] Get only custom fields + where', async (t) => { + const { singlestoreDb: db } = t; + + // Insert Users + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + // Insert Posts + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + // Query Users with Posts where users.id = 1 and posts.id >= 2 + const usersWithPosts = await db.query.usersTable.findMany({ + columns: {}, + with: { + posts: { + columns: {}, + where: gte(postsTable.id, 2), + extras: ({ content }) => ({ + lowerName: sql`lower(${content})`.as('content_lower'), + }), + }, + }, + where: eq(usersTable.id, 1), + extras: ({ name }) => ({ + lowerName: sql`lower(${name})`.as('name_lower'), + }), + }); + + // Type Assertion + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + lowerName: string; + posts: { + lowerName: string; + }[]; + }[]>(); + + // General Assertions + expect(usersWithPosts).toHaveLength(1); + + // Since we expect only one user, we can extract it directly + const danWithPosts = usersWithPosts[0]; + + // Assert that the user exists and has the correct lowerName + expect(danWithPosts).toBeDefined(); + expect(danWithPosts?.lowerName).toBe('dan'); + + // Assert that the user has the expected number of posts + expect(danWithPosts?.posts).toHaveLength(2); + + // Define the expected posts + const expectedPosts = ['post1.2', 'post1.3']; + + // Extract the lowerName of each post + const actualPostLowerNames = danWithPosts?.posts.map((post) => post.lowerName); + + // Assert that all expected posts are present, regardless of order + for (const expectedPost of expectedPosts) { + expect(actualPostLowerNames).toContain(expectedPost); + } + + // Additionally, ensure no unexpected posts are present + expect(actualPostLowerNames).toHaveLength(expectedPosts.length); +}); + +test.skip('[Find Many] Get only custom fields + where + limit', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + columns: {}, + with: { + posts: { + columns: {}, + where: gte(postsTable.id, 2), + limit: 1, + extras: ({ content }) => ({ + lowerName: sql`lower(${content})`.as('content_lower'), + }), + }, + }, + where: eq(usersTable.id, 1), + extras: ({ name }) => ({ + lowerName: sql`lower(${name})`.as('name_lower'), + }), + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + lowerName: string; + posts: { + lowerName: string; + }[]; + }[]>(); + + expect(usersWithPosts.length).toEqual(1); + expect(usersWithPosts[0]?.posts.length).toEqual(1); + + expect(usersWithPosts).toContainEqual({ + lowerName: 'dan', + posts: [{ lowerName: 'post1.2' }], + }); +}); + +test.skip('[Find Many] Get only custom fields + where + orderBy', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + columns: {}, + with: { + posts: { + columns: {}, + where: gte(postsTable.id, 2), + orderBy: [desc(postsTable.id)], + extras: ({ content }) => ({ + lowerName: sql`lower(${content})`.as('content_lower'), + }), + }, + }, + where: eq(usersTable.id, 1), + extras: ({ name }) => ({ + lowerName: sql`lower(${name})`.as('name_lower'), + }), + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + lowerName: string; + posts: { + lowerName: string; + }[]; + }[]>(); + + expect(usersWithPosts.length).toEqual(1); + expect(usersWithPosts[0]?.posts.length).toEqual(2); + + expect(usersWithPosts).toContainEqual({ + lowerName: 'dan', + posts: [{ lowerName: 'post1.3' }, { lowerName: 'post1.2' }], + }); +}); + +// select only custom find one (Order Agnostic) +test('[Find One] Get only custom fields (Order Agnostic)', async () => { + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + // Query to find the first user without any specific order + const usersWithPosts = await db.query.usersTable.findFirst({ + columns: {}, + with: { + posts: { + columns: {}, + extras: ({ content }) => ({ + lowerName: sql`lower(${content})`.as('content_lower'), + }), + }, + }, + extras: ({ name }) => ({ + lowerName: sql`lower(${name})`.as('name_lower'), + }), + }); + + // Type Assertion + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + lowerName: string; + posts: { + lowerName: string; + }[]; + } | undefined + >(); + + // General Assertions + expect(usersWithPosts).toBeDefined(); + + // Since findFirst without orderBy can return any user, we'll verify the returned user and their posts + if (usersWithPosts) { + // Define expected users and their corresponding posts + const expectedUsers: { [key: string]: string[] } = { + dan: ['post1', 'post1.2', 'post1.3'], + andrew: ['post2', 'post2.1'], + alex: ['post3', 'post3.1'], + }; + + // Verify that the returned user is one of the expected users + expect(Object.keys(expectedUsers)).toContain(usersWithPosts.lowerName); + + // Get the expected posts for the returned user + const expectedPosts = expectedUsers[usersWithPosts.lowerName] as string[]; + + // Verify the number of posts + expect(usersWithPosts.posts).toHaveLength(expectedPosts.length); + + // Extract the lowerName of each post + const actualPostLowerNames = usersWithPosts.posts.map((post) => post.lowerName); + + // Assert that all expected posts are present, regardless of order + for (const expectedPost of expectedPosts) { + expect(actualPostLowerNames).toContain(expectedPost.toLowerCase()); + } + } +}); + +// select only custom find one with where clause (Order Agnostic) +test('[Find One] Get only custom fields + where (Order Agnostic)', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + // Query to find the first user with id = 1 and posts with id >= 2 + const usersWithPosts = await db.query.usersTable.findFirst({ + columns: {}, + with: { + posts: { + columns: {}, + where: gte(postsTable.id, 2), + extras: ({ content }) => ({ + lowerName: sql`lower(${content})`.as('content_lower'), + }), + }, + }, + where: eq(usersTable.id, 1), + extras: ({ name }) => ({ + lowerName: sql`lower(${name})`.as('name_lower'), + }), + }); + + // Type Assertion + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + lowerName: string; + posts: { + lowerName: string; + }[]; + } | undefined + >(); + + // General Assertions + expect(usersWithPosts).toBeDefined(); + + if (usersWithPosts) { + // Assert that the returned user has the expected lowerName + expect(usersWithPosts.lowerName).toBe('dan'); + + // Assert that the user has exactly two posts + expect(usersWithPosts.posts).toHaveLength(2); + + // Define the expected posts + const expectedPosts = ['post1.2', 'post1.3']; + + // Extract the lowerName of each post + const actualPostLowerNames = usersWithPosts.posts.map((post) => post.lowerName); + + // Assert that all expected posts are present, regardless of order + for (const expectedPost of expectedPosts) { + expect(actualPostLowerNames).toContain(expectedPost.toLowerCase()); + } + + // Additionally, ensure no unexpected posts are present + expect(actualPostLowerNames).toHaveLength(expectedPosts.length); + } +}); + +test.skip('[Find One] Get only custom fields + where + limit', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + columns: {}, + with: { + posts: { + columns: {}, + where: gte(postsTable.id, 2), + limit: 1, + extras: ({ content }) => ({ + lowerName: sql`lower(${content})`.as('content_lower'), + }), + }, + }, + where: eq(usersTable.id, 1), + extras: ({ name }) => ({ + lowerName: sql`lower(${name})`.as('name_lower'), + }), + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + lowerName: string; + posts: { + lowerName: string; + }[]; + } | undefined + >(); + + expect(usersWithPosts?.posts.length).toEqual(1); + + expect(usersWithPosts).toEqual({ + lowerName: 'dan', + posts: [{ lowerName: 'post1.2' }], + }); +}); + +test.skip('[Find One] Get only custom fields + where + orderBy', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + columns: {}, + with: { + posts: { + columns: {}, + where: gte(postsTable.id, 2), + orderBy: [desc(postsTable.id)], + extras: ({ content }) => ({ + lowerName: sql`lower(${content})`.as('content_lower'), + }), + }, + }, + where: eq(usersTable.id, 1), + extras: ({ name }) => ({ + lowerName: sql`lower(${name})`.as('name_lower'), + }), + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + lowerName: string; + posts: { + lowerName: string; + }[]; + } | undefined + >(); + + expect(usersWithPosts?.posts.length).toEqual(2); + + expect(usersWithPosts).toEqual({ + lowerName: 'dan', + posts: [{ lowerName: 'post1.3' }, { lowerName: 'post1.2' }], + }); +}); + +// columns {} +test('[Find Many] Get select {}', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await expect( + async () => + await db.query.usersTable.findMany({ + columns: {}, + }), + ).rejects.toThrow(DrizzleError); +}); + +// columns {} +test('[Find One] Get select {}', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await expect(async () => + await db.query.usersTable.findFirst({ + columns: {}, + }) + ).rejects.toThrow(DrizzleError); +}); + +// deep select {} +test('[Find Many] Get deep select {}', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + await expect(async () => + await db.query.usersTable.findMany({ + columns: {}, + with: { + posts: { + columns: {}, + }, + }, + }) + ).rejects.toThrow(DrizzleError); +}); + +// deep select {} +test('[Find One] Get deep select {}', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + await expect(async () => + await db.query.usersTable.findFirst({ + columns: {}, + with: { + posts: { + columns: {}, + }, + }, + }) + ).rejects.toThrow(DrizzleError); +}); + +/* + Prepared statements for users+posts +*/ +test.skip('[Find Many] Get users with posts + prepared limit', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const prepared = db.query.usersTable.findMany({ + with: { + posts: { + limit: placeholder('limit'), + }, + }, + }).prepare(); + + const usersWithPosts = await prepared.execute({ limit: 1 }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts.length).eq(3); + expect(usersWithPosts[0]?.posts.length).eq(1); + expect(usersWithPosts[1]?.posts.length).eq(1); + expect(usersWithPosts[2]?.posts.length).eq(1); + + expect(usersWithPosts).toContainEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], + }); + expect(usersWithPosts).toContainEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], + }); + expect(usersWithPosts).toContainEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], + }); +}); + +test.skip('[Find Many] Get users with posts + prepared limit + offset', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const prepared = db.query.usersTable.findMany({ + limit: placeholder('uLimit'), + offset: placeholder('uOffset'), + with: { + posts: { + limit: placeholder('pLimit'), + }, + }, + }).prepare(); + + const usersWithPosts = await prepared.execute({ pLimit: 1, uLimit: 3, uOffset: 1 }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts.length).eq(2); + expect(usersWithPosts[0]?.posts.length).eq(1); + expect(usersWithPosts[1]?.posts.length).eq(1); + + expect(usersWithPosts).toContainEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], + }); + expect(usersWithPosts).toContainEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], + }); +}); + +test('[Find Many] Get users with posts + prepared where', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const prepared = db.query.usersTable.findMany({ + where: (({ id }, { eq }) => eq(id, placeholder('id'))), + with: { + posts: { + where: (({ id }, { eq }) => eq(id, 1)), + }, + }, + }).prepare(); + + const usersWithPosts = await prepared.execute({ id: 1 }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts.length).eq(1); + expect(usersWithPosts[0]?.posts.length).eq(1); + + expect(usersWithPosts[0]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], + }); +}); + +test.skip('[Find Many] Get users with posts + prepared + limit + offset + where', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const prepared = db.query.usersTable.findMany({ + limit: placeholder('uLimit'), + offset: placeholder('uOffset'), + where: (({ id }, { eq, or }) => or(eq(id, placeholder('id')), eq(id, 3))), + with: { + posts: { + where: (({ id }, { eq }) => eq(id, placeholder('pid'))), + limit: placeholder('pLimit'), + }, + }, + }).prepare(); + + const usersWithPosts = await prepared.execute({ pLimit: 1, uLimit: 3, uOffset: 1, id: 2, pid: 6 }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts.length).eq(1); + expect(usersWithPosts[0]?.posts.length).eq(1); + + expect(usersWithPosts).toContainEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], + }); +}); + +/* + [Find One] One relation users+posts +*/ + +test('[Find One] Get users with posts', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + with: { + posts: true, + }, + }); + + // Type Assertion + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + } | undefined + >(); + + // General Assertions + expect(usersWithPosts).toBeDefined(); + + if (usersWithPosts) { + const { id, name, posts } = usersWithPosts; + + // Verify that the user is one of the inserted users + const validUsers: { [key: number]: string } = { + 1: 'dan', + 2: 'andrew', + 3: 'alex', + }; + expect(validUsers[id]).toBe(name.toLowerCase()); + + // Assert that the user has exactly one post + expect(posts).toHaveLength(1); + + const post = posts[0]; + + // Verify that the post belongs to the user + expect(post?.ownerId).toBe(id); + + // Verify that the post content matches the user + const expectedPostContent = `Post${id}`; + expect(post?.content.toLowerCase()).toBe(expectedPostContent.toLowerCase()); + + // Optionally, verify the presence of `createdAt` + expect(post?.createdAt).toBeInstanceOf(Date); + } +}); + +test.skip('[Find One] Get users with posts + limit posts', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + with: { + posts: { + limit: 1, + }, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + } | undefined + >(); + + expect(usersWithPosts!.posts.length).eq(1); + + expect(usersWithPosts).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], + }); +}); + +test.skip('[Find One] Get users with posts no results found', async (t) => { + const { singlestoreDb: db } = t; + + const usersWithPosts = await db.query.usersTable.findFirst({ + with: { + posts: { + limit: 1, + }, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + } | undefined + >(); + + expect(usersWithPosts).toBeUndefined(); +}); + +test.skip('[Find One] Get users with posts + limit posts and users', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + with: { + posts: { + limit: 1, + }, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + } | undefined + >(); + + expect(usersWithPosts!.posts.length).eq(1); + + expect(usersWithPosts).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], + }); +}); + +test('[Find One] Get users with posts + custom fields', async () => { + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + with: { + posts: true, + }, + extras: ({ name }) => ({ + lowerName: sql`lower(${name})`.as('name_lower'), + }), + }); + + // Type Assertion + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + lowerName: string; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + } | undefined + >(); + + // General Assertions + expect(usersWithPosts).toBeDefined(); + + if (usersWithPosts) { + const { id, lowerName, posts } = usersWithPosts; + + // Define valid users and their expected lower names + const validUsers: { [key: number]: string } = { + 1: 'dan', + 2: 'andrew', + 3: 'alex', + }; + + // Verify that the returned user's lowerName matches the expected value + expect(validUsers[id]).toBe(lowerName); + + // Define the expected posts based on the user ID + const expectedPostsByUser: Record = { + 1: ['post1', 'post1.2', 'post1.3'], + 2: ['post2', 'post2.1'], + 3: ['post3', 'post3.1'], + }; + + // Get the expected posts for the returned user + const expectedPosts = expectedPostsByUser[id] || []; + + // Extract the lowerName of each post + const actualPostContents = posts.map((post) => post.content.toLowerCase()); + + // Assert that all expected posts are present, regardless of order + for (const expectedPost of expectedPosts) { + expect(actualPostContents).toContain(expectedPost.toLowerCase()); + } + + // Optionally, ensure that no unexpected posts are present + expect(actualPostContents).toHaveLength(expectedPosts.length); + } +}); + +test.skip('[Find One] Get users with posts + custom fields + limits', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + with: { + posts: { + limit: 1, + }, + }, + extras: (usersTable, { sql }) => ({ + lowerName: sql`lower(${usersTable.name})`.as('name_lower'), + }), + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + lowerName: string; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + } | undefined + >(); + + expect(usersWithPosts!.posts.length).toEqual(1); + + expect(usersWithPosts).toEqual({ + id: 1, + name: 'Dan', + lowerName: 'dan', + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], + }); +}); + +test.skip('[Find One] Get users with posts + orderBy', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: '1' }, + { ownerId: 1, content: '2' }, + { ownerId: 1, content: '3' }, + { ownerId: 2, content: '4' }, + { ownerId: 2, content: '5' }, + { ownerId: 3, content: '6' }, + { ownerId: 3, content: '7' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + with: { + posts: { + orderBy: (postsTable, { desc }) => [desc(postsTable.content)], + }, + }, + orderBy: (usersTable, { desc }) => [desc(usersTable.id)], + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + } | undefined + >(); + + expect(usersWithPosts!.posts.length).eq(2); + + expect(usersWithPosts).toEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + posts: [{ + id: 7, + ownerId: 3, + content: '7', + createdAt: usersWithPosts?.posts[1]?.createdAt, + }, { id: 6, ownerId: 3, content: '6', createdAt: usersWithPosts?.posts[0]?.createdAt }], + }); +}); + +test('[Find One] Get users with posts + where', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + where: (({ id }, { eq }) => eq(id, 1)), + with: { + posts: { + where: (({ id }, { eq }) => eq(id, 1)), + }, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + } | undefined + >(); + + expect(usersWithPosts!.posts.length).eq(1); + + expect(usersWithPosts).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], + }); +}); + +test('[Find One] Get users with posts + where + partial', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + columns: { + id: true, + name: true, + }, + with: { + posts: { + columns: { + id: true, + content: true, + }, + where: (({ id }, { eq }) => eq(id, 1)), + }, + }, + where: (({ id }, { eq }) => eq(id, 1)), + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + id: number; + name: string; + posts: { + id: number; + content: string; + }[]; + } | undefined + >(); + + expect(usersWithPosts!.posts.length).eq(1); + + expect(usersWithPosts).toEqual({ + id: 1, + name: 'Dan', + posts: [{ id: 1, content: 'Post1' }], + }); +}); + +test('[Find One] Get users with posts + where + partial. Did not select posts id, but used it in where', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + columns: { + id: true, + name: true, + }, + with: { + posts: { + columns: { + id: true, + content: true, + }, + where: (({ id }, { eq }) => eq(id, 1)), + }, + }, + where: (({ id }, { eq }) => eq(id, 1)), + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + id: number; + name: string; + posts: { + id: number; + content: string; + }[]; + } | undefined + >(); + + expect(usersWithPosts!.posts.length).eq(1); + + expect(usersWithPosts).toEqual({ + id: 1, + name: 'Dan', + posts: [{ id: 1, content: 'Post1' }], + }); +}); + +test('[Find One] Get users with posts + where + partial(true + false)', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + columns: { + id: true, + name: false, + }, + with: { + posts: { + columns: { + id: true, + content: false, + }, + where: (({ id }, { eq }) => eq(id, 1)), + }, + }, + where: (({ id }, { eq }) => eq(id, 1)), + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + id: number; + posts: { + id: number; + }[]; + } | undefined + >(); + + expect(usersWithPosts!.posts.length).eq(1); + + expect(usersWithPosts).toEqual({ + id: 1, + posts: [{ id: 1 }], + }); +}); + +test('[Find One] Get users with posts + where + partial(false)', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + columns: { + name: false, + }, + with: { + posts: { + columns: { + content: false, + }, + where: (({ id }, { eq }) => eq(id, 1)), + }, + }, + where: (({ id }, { eq }) => eq(id, 1)), + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + id: number; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + ownerId: number | null; + createdAt: Date; + }[]; + } | undefined + >(); + + expect(usersWithPosts!.posts.length).eq(1); + + expect(usersWithPosts).toEqual({ + id: 1, + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, createdAt: usersWithPosts?.posts[0]?.createdAt }], + }); +}); + +/* + One relation users+users. Self referencing +*/ + +test.skip('Get user with invitee', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + const usersWithInvitee = await db.query.usersTable.findMany({ + with: { + invitee: true, + }, + }); + + expectTypeOf(usersWithInvitee).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + invitee: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[] + >(); + + usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(usersWithInvitee.length).eq(4); + expect(usersWithInvitee[0]?.invitee).toBeNull(); + expect(usersWithInvitee[1]?.invitee).toBeNull(); + expect(usersWithInvitee[2]?.invitee).not.toBeNull(); + expect(usersWithInvitee[3]?.invitee).not.toBeNull(); + + expect(usersWithInvitee[0]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + invitee: null, + }); + expect(usersWithInvitee[1]).toEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + invitee: null, + }); + expect(usersWithInvitee[2]).toEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, + }); + expect(usersWithInvitee[3]).toEqual({ + id: 4, + name: 'John', + verified: false, + invitedBy: 2, + invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, + }); +}); + +test.skip('Get user + limit with invitee', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew', invitedBy: 1 }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + const usersWithInvitee = await db.query.usersTable.findMany({ + with: { + invitee: true, + }, + limit: 2, + }); + + expectTypeOf(usersWithInvitee).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + invitee: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[] + >(); + + usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(usersWithInvitee.length).eq(2); + expect(usersWithInvitee[0]?.invitee).toBeNull(); + expect(usersWithInvitee[1]?.invitee).not.toBeNull(); + + expect(usersWithInvitee[0]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + invitee: null, + }); + expect(usersWithInvitee[1]).toEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, + }); +}); + +test.skip('Get user with invitee and custom fields', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + const usersWithInvitee = await db.query.usersTable.findMany({ + extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), + with: { + invitee: { + extras: (invitee, { sql }) => ({ lower: sql`lower(${invitee.name})`.as('lower_name') }), + }, + }, + }); + + expectTypeOf(usersWithInvitee).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + lower: string; + invitedBy: number | null; + invitee: { + id: number; + name: string; + verified: boolean; + lower: string; + invitedBy: number | null; + } | null; + }[] + >(); + + usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(usersWithInvitee.length).eq(4); + expect(usersWithInvitee[0]?.invitee).toBeNull(); + expect(usersWithInvitee[1]?.invitee).toBeNull(); + expect(usersWithInvitee[2]?.invitee).not.toBeNull(); + expect(usersWithInvitee[3]?.invitee).not.toBeNull(); + + expect(usersWithInvitee[0]).toEqual({ + id: 1, + name: 'Dan', + lower: 'dan', + verified: false, + invitedBy: null, + invitee: null, + }); + expect(usersWithInvitee[1]).toEqual({ + id: 2, + name: 'Andrew', + lower: 'andrew', + verified: false, + invitedBy: null, + invitee: null, + }); + expect(usersWithInvitee[2]).toEqual({ + id: 3, + name: 'Alex', + lower: 'alex', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, + }); + expect(usersWithInvitee[3]).toEqual({ + id: 4, + name: 'John', + lower: 'john', + verified: false, + invitedBy: 2, + invitee: { id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null }, + }); +}); + +test.skip('Get user with invitee and custom fields + limits', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + const usersWithInvitee = await db.query.usersTable.findMany({ + extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), + limit: 3, + with: { + invitee: { + extras: (invitee, { sql }) => ({ lower: sql`lower(${invitee.name})`.as('lower_name') }), + }, + }, + }); + + expectTypeOf(usersWithInvitee).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + lower: string; + invitedBy: number | null; + invitee: { + id: number; + name: string; + verified: boolean; + lower: string; + invitedBy: number | null; + } | null; + }[] + >(); + + usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(usersWithInvitee.length).eq(3); + expect(usersWithInvitee[0]?.invitee).toBeNull(); + expect(usersWithInvitee[1]?.invitee).toBeNull(); + expect(usersWithInvitee[2]?.invitee).not.toBeNull(); + + expect(usersWithInvitee[0]).toEqual({ + id: 1, + name: 'Dan', + lower: 'dan', + verified: false, + invitedBy: null, + invitee: null, + }); + expect(usersWithInvitee[1]).toEqual({ + id: 2, + name: 'Andrew', + lower: 'andrew', + verified: false, + invitedBy: null, + invitee: null, + }); + expect(usersWithInvitee[2]).toEqual({ + id: 3, + name: 'Alex', + lower: 'alex', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, + }); +}); + +test.skip('Get user with invitee + order by', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + const usersWithInvitee = await db.query.usersTable.findMany({ + orderBy: (users, { desc }) => [desc(users.id)], + with: { + invitee: true, + }, + }); + + expectTypeOf(usersWithInvitee).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + invitee: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[] + >(); + + expect(usersWithInvitee.length).eq(4); + expect(usersWithInvitee[3]?.invitee).toBeNull(); + expect(usersWithInvitee[2]?.invitee).toBeNull(); + expect(usersWithInvitee[1]?.invitee).not.toBeNull(); + expect(usersWithInvitee[0]?.invitee).not.toBeNull(); + + expect(usersWithInvitee[3]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + invitee: null, + }); + expect(usersWithInvitee[2]).toEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + invitee: null, + }); + expect(usersWithInvitee[1]).toEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, + }); + expect(usersWithInvitee[0]).toEqual({ + id: 4, + name: 'John', + verified: false, + invitedBy: 2, + invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, + }); +}); + +test.skip('Get user with invitee + where', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + const usersWithInvitee = await db.query.usersTable.findMany({ + where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), + with: { + invitee: true, + }, + }); + + expectTypeOf(usersWithInvitee).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + invitee: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[] + >(); + + expect(usersWithInvitee.length).eq(2); + expect(usersWithInvitee[0]?.invitee).not.toBeNull(); + expect(usersWithInvitee[1]?.invitee).not.toBeNull(); + + expect(usersWithInvitee).toContainEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, + }); + expect(usersWithInvitee).toContainEqual({ + id: 4, + name: 'John', + verified: false, + invitedBy: 2, + invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, + }); +}); + +test.skip('Get user with invitee + where + partial', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + const usersWithInvitee = await db.query.usersTable.findMany({ + where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), + columns: { + id: true, + name: true, + }, + with: { + invitee: { + columns: { + id: true, + name: true, + }, + }, + }, + }); + + expectTypeOf(usersWithInvitee).toEqualTypeOf< + { + id: number; + name: string; + invitee: { + id: number; + name: string; + } | null; + }[] + >(); + + expect(usersWithInvitee.length).eq(2); + expect(usersWithInvitee[0]?.invitee).not.toBeNull(); + expect(usersWithInvitee[1]?.invitee).not.toBeNull(); + + expect(usersWithInvitee).toContainEqual({ + id: 3, + name: 'Alex', + invitee: { id: 1, name: 'Dan' }, + }); + expect(usersWithInvitee).toContainEqual({ + id: 4, + name: 'John', + invitee: { id: 2, name: 'Andrew' }, + }); +}); + +test.skip('Get user with invitee + where + partial. Did not select users id, but used it in where', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + const usersWithInvitee = await db.query.usersTable.findMany({ + where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), + columns: { + name: true, + }, + with: { + invitee: { + columns: { + id: true, + name: true, + }, + }, + }, + }); + + expectTypeOf(usersWithInvitee).toEqualTypeOf< + { + name: string; + invitee: { + id: number; + name: string; + } | null; + }[] + >(); + + expect(usersWithInvitee.length).eq(2); + expect(usersWithInvitee[0]?.invitee).not.toBeNull(); + expect(usersWithInvitee[1]?.invitee).not.toBeNull(); + + expect(usersWithInvitee).toContainEqual({ + name: 'Alex', + invitee: { id: 1, name: 'Dan' }, + }); + expect(usersWithInvitee).toContainEqual({ + name: 'John', + invitee: { id: 2, name: 'Andrew' }, + }); +}); + +test.skip('Get user with invitee + where + partial(true+false)', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + const usersWithInvitee = await db.query.usersTable.findMany({ + where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), + columns: { + id: true, + name: true, + verified: false, + }, + with: { + invitee: { + columns: { + id: true, + name: true, + verified: false, + }, + }, + }, + }); + + expectTypeOf(usersWithInvitee).toEqualTypeOf< + { + id: number; + name: string; + invitee: { + id: number; + name: string; + } | null; + }[] + >(); + + expect(usersWithInvitee.length).eq(2); + expect(usersWithInvitee[0]?.invitee).not.toBeNull(); + expect(usersWithInvitee[1]?.invitee).not.toBeNull(); + + expect(usersWithInvitee).toContainEqual({ + id: 3, + name: 'Alex', + invitee: { id: 1, name: 'Dan' }, + }); + expect(usersWithInvitee).toContainEqual({ + id: 4, + name: 'John', + invitee: { id: 2, name: 'Andrew' }, + }); +}); + +test.skip('Get user with invitee + where + partial(false)', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + const usersWithInvitee = await db.query.usersTable.findMany({ + where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), + columns: { + verified: false, + }, + with: { + invitee: { + columns: { + name: false, + }, + }, + }, + }); + + expectTypeOf(usersWithInvitee).toEqualTypeOf< + { + id: number; + name: string; + invitedBy: number | null; + invitee: { + id: number; + verified: boolean; + invitedBy: number | null; + } | null; + }[] + >(); + + expect(usersWithInvitee.length).eq(2); + expect(usersWithInvitee[0]?.invitee).not.toBeNull(); + expect(usersWithInvitee[1]?.invitee).not.toBeNull(); + + expect(usersWithInvitee).toContainEqual({ + id: 3, + name: 'Alex', + invitedBy: 1, + invitee: { id: 1, verified: false, invitedBy: null }, + }); + expect(usersWithInvitee).toContainEqual({ + id: 4, + name: 'John', + invitedBy: 2, + invitee: { id: 2, verified: false, invitedBy: null }, + }); +}); + +/* + Two first-level relations users+users and users+posts +*/ + +test.skip('Get user with invitee and posts', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const response = await db.query.usersTable.findMany({ + with: { + invitee: true, + posts: true, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; + invitee: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[] + >(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).eq(4); + + expect(response[0]?.invitee).toBeNull(); + expect(response[1]?.invitee).toBeNull(); + expect(response[2]?.invitee).not.toBeNull(); + expect(response[3]?.invitee).not.toBeNull(); + + expect(response[0]?.posts.length).eq(1); + expect(response[1]?.posts.length).eq(1); + expect(response[2]?.posts.length).eq(1); + + expect(response).toContainEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + invitee: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt }], + }); + expect(response).toContainEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + invitee: null, + posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt }], + }); + expect(response).toContainEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, + posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: response[2]?.posts[0]?.createdAt }], + }); + expect(response).toContainEqual({ + id: 4, + name: 'John', + verified: false, + invitedBy: 2, + invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, + posts: [], + }); +}); + +test.skip('Get user with invitee and posts + limit posts and users', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const response = await db.query.usersTable.findMany({ + limit: 3, + with: { + invitee: true, + posts: { + limit: 1, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; + invitee: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[] + >(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).eq(3); + + expect(response[0]?.invitee).toBeNull(); + expect(response[1]?.invitee).toBeNull(); + expect(response[2]?.invitee).not.toBeNull(); + + expect(response[0]?.posts.length).eq(1); + expect(response[1]?.posts.length).eq(1); + expect(response[2]?.posts.length).eq(1); + + expect(response).toContainEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + invitee: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt }], + }); + expect(response).toContainEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + invitee: null, + posts: [{ id: 3, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt }], + }); + expect(response).toContainEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, + posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[2]?.posts[0]?.createdAt }], + }); +}); + +test.skip('Get user with invitee and posts + limits + custom fields in each', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const response = await db.query.usersTable.findMany({ + limit: 3, + extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), + with: { + invitee: { + extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_invitee_name') }), + }, + posts: { + limit: 1, + extras: (posts, { sql }) => ({ lower: sql`lower(${posts.content})`.as('lower_content') }), + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + lower: string; + invitedBy: number | null; + posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; + invitee: { + id: number; + name: string; + lower: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[] + >(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).eq(3); + + expect(response[0]?.invitee).toBeNull(); + expect(response[1]?.invitee).toBeNull(); + expect(response[2]?.invitee).not.toBeNull(); + + expect(response[0]?.posts.length).eq(1); + expect(response[1]?.posts.length).eq(1); + expect(response[2]?.posts.length).eq(1); + + expect(response).toContainEqual({ + id: 1, + name: 'Dan', + lower: 'dan', + verified: false, + invitedBy: null, + invitee: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', lower: 'post1', createdAt: response[0]?.posts[0]?.createdAt }], + }); + expect(response).toContainEqual({ + id: 2, + name: 'Andrew', + lower: 'andrew', + verified: false, + invitedBy: null, + invitee: null, + posts: [{ id: 3, ownerId: 2, content: 'Post2', lower: 'post2', createdAt: response[1]?.posts[0]?.createdAt }], + }); + expect(response).toContainEqual({ + id: 3, + name: 'Alex', + lower: 'alex', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, + posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[2]?.posts[0]?.createdAt }], + }); +}); + +test.skip('Get user with invitee and posts + custom fields in each', async () => { + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const response = await db.query.usersTable.findMany({ + extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), + with: { + invitee: { + extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), + }, + posts: { + extras: (posts, { sql }) => ({ lower: sql`lower(${posts.content})`.as('lower_name') }), + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + lower: string; + invitedBy: number | null; + posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; + invitee: { + id: number; + name: string; + lower: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[] + >(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + response[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + response[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + response[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).eq(4); + + expect(response[0]?.invitee).toBeNull(); + expect(response[1]?.invitee).toBeNull(); + expect(response[2]?.invitee).not.toBeNull(); + expect(response[3]?.invitee).not.toBeNull(); + + expect(response[0]?.posts.length).eq(2); + expect(response[1]?.posts.length).eq(2); + expect(response[2]?.posts.length).eq(2); + expect(response[3]?.posts.length).eq(0); + + expect(response).toContainEqual({ + id: 1, + name: 'Dan', + lower: 'dan', + verified: false, + invitedBy: null, + invitee: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', lower: 'post1', createdAt: response[0]?.posts[0]?.createdAt }, { + id: 2, + ownerId: 1, + content: 'Post1.1', + lower: 'post1.1', + createdAt: response[0]?.posts[1]?.createdAt, + }], + }); + expect(response).toContainEqual({ + id: 2, + name: 'Andrew', + lower: 'andrew', + verified: false, + invitedBy: null, + invitee: null, + posts: [{ id: 3, ownerId: 2, content: 'Post2', lower: 'post2', createdAt: response[1]?.posts[0]?.createdAt }, { + id: 4, + ownerId: 2, + content: 'Post2.1', + lower: 'post2.1', + createdAt: response[1]?.posts[1]?.createdAt, + }], + }); + expect(response).toContainEqual({ + id: 3, + name: 'Alex', + lower: 'alex', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, + posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[2]?.posts[0]?.createdAt }, { + id: 6, + ownerId: 3, + content: 'Post3.1', + lower: 'post3.1', + createdAt: response[2]?.posts[1]?.createdAt, + }], + }); + expect(response).toContainEqual({ + id: 4, + name: 'John', + lower: 'john', + verified: false, + invitedBy: 2, + invitee: { id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null }, + posts: [], + }); +}); + +test.skip('Get user with invitee and posts + orderBy', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const response = await db.query.usersTable.findMany({ + orderBy: (users, { desc }) => [desc(users.id)], + with: { + invitee: true, + posts: { + orderBy: (posts, { desc }) => [desc(posts.id)], + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; + invitee: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[] + >(); + + expect(response.length).eq(4); + + expect(response[3]?.invitee).toBeNull(); + expect(response[2]?.invitee).toBeNull(); + expect(response[1]?.invitee).not.toBeNull(); + expect(response[0]?.invitee).not.toBeNull(); + + expect(response[0]?.posts.length).eq(0); + expect(response[1]?.posts.length).eq(1); + expect(response[2]?.posts.length).eq(2); + expect(response[3]?.posts.length).eq(2); + + expect(response[3]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + invitee: null, + posts: [{ id: 2, ownerId: 1, content: 'Post1.1', createdAt: response[3]?.posts[0]?.createdAt }, { + id: 1, + ownerId: 1, + content: 'Post1', + createdAt: response[3]?.posts[1]?.createdAt, + }], + }); + expect(response[2]).toEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + invitee: null, + posts: [{ id: 4, ownerId: 2, content: 'Post2.1', createdAt: response[2]?.posts[0]?.createdAt }, { + id: 3, + ownerId: 2, + content: 'Post2', + createdAt: response[2]?.posts[1]?.createdAt, + }], + }); + expect(response[1]).toEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, + posts: [{ + id: 5, + ownerId: 3, + content: 'Post3', + createdAt: response[3]?.posts[1]?.createdAt, + }], + }); + expect(response[0]).toEqual({ + id: 4, + name: 'John', + verified: false, + invitedBy: 2, + invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, + posts: [], + }); +}); + +test.skip('Get user with invitee and posts + where', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const response = await db.query.usersTable.findMany({ + where: (users, { eq, or }) => (or(eq(users.id, 2), eq(users.id, 3))), + with: { + invitee: true, + posts: { + where: (posts, { eq }) => (eq(posts.ownerId, 2)), + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; + invitee: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[] + >(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).eq(2); + + expect(response[0]?.invitee).toBeNull(); + expect(response[1]?.invitee).not.toBeNull(); + + expect(response[0]?.posts.length).eq(1); + expect(response[1]?.posts.length).eq(0); + + expect(response).toContainEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + invitee: null, + posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[0]?.posts[0]?.createdAt }], + }); + expect(response).toContainEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, + posts: [], + }); +}); + +test.skip('Get user with invitee and posts + limit posts and users + where', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const response = await db.query.usersTable.findMany({ + where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), + limit: 1, + with: { + invitee: true, + posts: { + where: (posts, { eq }) => (eq(posts.ownerId, 3)), + limit: 1, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; + invitee: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[] + >(); + + expect(response.length).eq(1); + + expect(response[0]?.invitee).not.toBeNull(); + expect(response[0]?.posts.length).eq(1); + + expect(response).toContainEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, + posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[0]?.posts[0]?.createdAt }], + }); +}); + +test.skip('Get user with invitee and posts + orderBy + where + custom', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const response = await db.query.usersTable.findMany({ + orderBy: [desc(usersTable.id)], + where: or(eq(usersTable.id, 3), eq(usersTable.id, 4)), + extras: { + lower: sql`lower(${usersTable.name})`.as('lower_name'), + }, + with: { + invitee: true, + posts: { + where: eq(postsTable.ownerId, 3), + orderBy: [desc(postsTable.id)], + extras: { + lower: sql`lower(${postsTable.content})`.as('lower_name'), + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + lower: string; + posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; + invitee: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[] + >(); + + expect(response.length).eq(2); + + expect(response[1]?.invitee).not.toBeNull(); + expect(response[0]?.invitee).not.toBeNull(); + + expect(response[0]?.posts.length).eq(0); + expect(response[1]?.posts.length).eq(1); + + expect(response[1]).toEqual({ + id: 3, + name: 'Alex', + lower: 'alex', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, + posts: [{ + id: 5, + ownerId: 3, + content: 'Post3', + lower: 'post3', + createdAt: response[1]?.posts[0]?.createdAt, + }], + }); + expect(response[0]).toEqual({ + id: 4, + name: 'John', + lower: 'john', + verified: false, + invitedBy: 2, + invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, + posts: [], + }); +}); + +test.skip('Get user with invitee and posts + orderBy + where + partial + custom', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const response = await db.query.usersTable.findMany({ + orderBy: [desc(usersTable.id)], + where: or(eq(usersTable.id, 3), eq(usersTable.id, 4)), + extras: { + lower: sql`lower(${usersTable.name})`.as('lower_name'), + }, + columns: { + id: true, + name: true, + }, + with: { + invitee: { + columns: { + id: true, + name: true, + }, + extras: { + lower: sql`lower(${usersTable.name})`.as('lower_name'), + }, + }, + posts: { + columns: { + id: true, + content: true, + }, + where: eq(postsTable.ownerId, 3), + orderBy: [desc(postsTable.id)], + extras: { + lower: sql`lower(${postsTable.content})`.as('lower_name'), + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + lower: string; + posts: { id: number; lower: string; content: string }[]; + invitee: { + id: number; + name: string; + lower: string; + } | null; + }[] + >(); + + expect(response.length).eq(2); + + expect(response[1]?.invitee).not.toBeNull(); + expect(response[0]?.invitee).not.toBeNull(); + + expect(response[0]?.posts.length).eq(0); + expect(response[1]?.posts.length).eq(1); + + expect(response[1]).toEqual({ + id: 3, + name: 'Alex', + lower: 'alex', + invitee: { id: 1, name: 'Dan', lower: 'dan' }, + posts: [{ + id: 5, + content: 'Post3', + lower: 'post3', + }], + }); + expect(response[0]).toEqual({ + id: 4, + name: 'John', + lower: 'john', + invitee: { id: 2, name: 'Andrew', lower: 'andrew' }, + posts: [], + }); +}); + +/* + One two-level relation users+posts+comments +*/ + +test.skip('Get user with posts and posts with comments', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { id: 1, ownerId: 1, content: 'Post1' }, + { id: 2, ownerId: 2, content: 'Post2' }, + { id: 3, ownerId: 3, content: 'Post3' }, + ]); + + await db.insert(commentsTable).values([ + { postId: 1, content: 'Comment1', creator: 2 }, + { postId: 2, content: 'Comment2', creator: 2 }, + { postId: 3, content: 'Comment3', creator: 3 }, + ]); + + const response = await db.query.usersTable.findMany({ + with: { + posts: { + with: { + comments: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + comments: { + id: number; + content: string; + createdAt: Date; + creator: number | null; + postId: number | null; + }[]; + }[]; + }[] + >(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).eq(3); + expect(response[0]?.posts.length).eq(1); + expect(response[1]?.posts.length).eq(1); + expect(response[2]?.posts.length).eq(1); + + expect(response[0]?.posts[0]?.comments.length).eq(1); + expect(response[1]?.posts[0]?.comments.length).eq(1); + expect(response[2]?.posts[0]?.comments.length).eq(1); + + expect(response[0]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ + id: 1, + ownerId: 1, + content: 'Post1', + createdAt: response[0]?.posts[0]?.createdAt, + comments: [ + { + id: 1, + content: 'Comment1', + creator: 2, + postId: 1, + createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, + }, + ], + }], + }); + expect(response[1]).toEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + posts: [{ + id: 2, + ownerId: 2, + content: 'Post2', + createdAt: response[1]?.posts[0]?.createdAt, + comments: [ + { + id: 2, + content: 'Comment2', + creator: 2, + postId: 2, + createdAt: response[1]?.posts[0]?.comments[0]?.createdAt, + }, + ], + }], + }); + // expect(response[2]).toEqual({ + // id: 3, + // name: 'Alex', + // verified: false, + // invitedBy: null, + // posts: [{ + // id: 3, + // ownerId: 3, + // content: 'Post3', + // createdAt: response[2]?.posts[0]?.createdAt, + // comments: [ + // { + // id: , + // content: 'Comment3', + // creator: 3, + // postId: 3, + // createdAt: response[2]?.posts[0]?.comments[0]?.createdAt, + // }, + // ], + // }], + // }); +}); + +// Get user with limit posts and limit comments + +// Get user with custom field + post + comment with custom field + +// Get user with limit + posts orderBy + comment orderBy + +// Get user with where + posts where + comment where + +// Get user with where + posts partial where + comment where + +// Get user with where + posts partial where + comment partial(false) where + +// Get user with where partial(false) + posts partial where partial(false) + comment partial(false+true) where + +// Get user with where + posts partial where + comment where. Didn't select field from where in posts + +// Get user with where + posts partial where + comment where. Didn't select field from where for all + +// Get with limit+offset in each + +/* + One two-level + One first-level relation users+posts+comments and users+users +*/ + +/* + One three-level relation users+posts+comments+comment_owner +*/ + +test.skip('Get user with posts and posts with comments and comments with owner', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { id: 1, ownerId: 1, content: 'Post1' }, + { id: 2, ownerId: 2, content: 'Post2' }, + { id: 3, ownerId: 3, content: 'Post3' }, + ]); + + await db.insert(commentsTable).values([ + { postId: 1, content: 'Comment1', creator: 2 }, + { postId: 2, content: 'Comment2', creator: 2 }, + { postId: 3, content: 'Comment3', creator: 3 }, + ]); + + const response = await db.query.usersTable.findMany({ + with: { + posts: { + with: { + comments: { + with: { + author: true, + }, + }, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + comments: { + id: number; + content: string; + createdAt: Date; + creator: number | null; + postId: number | null; + author: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[]; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).eq(3); + expect(response[0]?.posts.length).eq(1); + expect(response[1]?.posts.length).eq(1); + expect(response[2]?.posts.length).eq(1); + + expect(response[0]?.posts[0]?.comments.length).eq(1); + expect(response[1]?.posts[0]?.comments.length).eq(1); + expect(response[2]?.posts[0]?.comments.length).eq(1); + + expect(response[0]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ + id: 1, + ownerId: 1, + content: 'Post1', + createdAt: response[0]?.posts[0]?.createdAt, + comments: [ + { + id: 1, + content: 'Comment1', + creator: 2, + author: { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }, + postId: 1, + createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, + }, + ], + }], + }); + expect(response[1]).toEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + posts: [{ + id: 2, + ownerId: 2, + content: 'Post2', + createdAt: response[1]?.posts[0]?.createdAt, + comments: [ + { + id: 2, + content: 'Comment2', + creator: 2, + author: { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }, + postId: 2, + createdAt: response[1]?.posts[0]?.comments[0]?.createdAt, + }, + ], + }], + }); +}); + +test.skip('Get user with posts and posts with comments and comments with owner where exists', async () => { + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { id: 1, ownerId: 1, content: 'Post1' }, + { id: 2, ownerId: 2, content: 'Post2' }, + { id: 3, ownerId: 3, content: 'Post3' }, + ]); + + await db.insert(commentsTable).values([ + { postId: 1, content: 'Comment1', creator: 2 }, + { postId: 2, content: 'Comment2', creator: 2 }, + { postId: 3, content: 'Comment3', creator: 3 }, + ]); + + const response = await db.query.usersTable.findMany({ + with: { + posts: { + with: { + comments: { + with: { + author: true, + }, + }, + }, + }, + }, + where: (table, { exists, eq }) => exists(db.select({ one: sql`1` }).from(usersTable).where(eq(sql`1`, table.id))), + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + comments: { + id: number; + content: string; + createdAt: Date; + creator: number | null; + postId: number | null; + author: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[]; + }[]; + }[]>(); + + expect(response.length).eq(1); + expect(response[0]?.posts.length).eq(1); + + expect(response[0]?.posts[0]?.comments.length).eq(1); + + expect(response[0]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ + id: 1, + ownerId: 1, + content: 'Post1', + createdAt: response[0]?.posts[0]?.createdAt, + comments: [ + { + id: 1, + content: 'Comment1', + creator: 2, + author: { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }, + postId: 1, + createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, + }, + ], + }], + }); +}); + +/* + One three-level relation + 1 first-level relatioon + 1. users+posts+comments+comment_owner + 2. users+users +*/ + +/* + One four-level relation users+posts+comments+coment_likes +*/ + +/* + [Find Many] Many-to-many cases + + Users+users_to_groups+groups +*/ + +test.skip('[Find Many] Get users with groups', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findMany({ + with: { + usersToGroups: { + columns: {}, + with: { + group: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + usersToGroups: { + group: { + id: number; + name: string; + description: string | null; + }; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).toEqual(3); + + expect(response[0]?.usersToGroups.length).toEqual(1); + expect(response[1]?.usersToGroups.length).toEqual(1); + expect(response[2]?.usersToGroups.length).toEqual(2); + + expect(response).toContainEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 1, + name: 'Group1', + description: null, + }, + }], + }); + + expect(response).toContainEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 2, + name: 'Group2', + description: null, + }, + }], + }); + + expect(response).toContainEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 3, + name: 'Group3', + description: null, + }, + }, { + group: { + id: 2, + name: 'Group2', + description: null, + }, + }], + }); +}); + +test.skip('[Find Many] Get groups with users', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findMany({ + with: { + usersToGroups: { + columns: {}, + with: { + user: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + description: string | null; + usersToGroups: { + user: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).toEqual(3); + + expect(response[0]?.usersToGroups.length).toEqual(1); + expect(response[1]?.usersToGroups.length).toEqual(2); + expect(response[2]?.usersToGroups.length).toEqual(1); + + expect(response).toContainEqual({ + id: 1, + name: 'Group1', + description: null, + usersToGroups: [{ + user: { + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + }, + }], + }); + + expect(response).toContainEqual({ + id: 2, + name: 'Group2', + description: null, + usersToGroups: [{ + user: { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }, + }, { + user: { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }, + }], + }); + + expect(response).toContainEqual({ + id: 3, + name: 'Group3', + description: null, + usersToGroups: [{ + user: { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }, + }], + }); +}); + +test.skip('[Find Many] Get users with groups + limit', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 2, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findMany({ + limit: 2, + with: { + usersToGroups: { + limit: 1, + columns: {}, + with: { + group: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + usersToGroups: { + group: { + id: number; + name: string; + description: string | null; + }; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).toEqual(2); + + expect(response[0]?.usersToGroups.length).toEqual(1); + expect(response[1]?.usersToGroups.length).toEqual(1); + + expect(response).toContainEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 1, + name: 'Group1', + description: null, + }, + }], + }); + + expect(response).toContainEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 2, + name: 'Group2', + description: null, + }, + }], + }); +}); + +test.skip('[Find Many] Get groups with users + limit', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findMany({ + limit: 2, + with: { + usersToGroups: { + limit: 1, + columns: {}, + with: { + user: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + description: string | null; + usersToGroups: { + user: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).toEqual(2); + + expect(response[0]?.usersToGroups.length).toEqual(1); + expect(response[1]?.usersToGroups.length).toEqual(1); + + expect(response).toContainEqual({ + id: 1, + name: 'Group1', + description: null, + usersToGroups: [{ + user: { + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + }, + }], + }); + + expect(response).toContainEqual({ + id: 2, + name: 'Group2', + description: null, + usersToGroups: [{ + user: { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }, + }], + }); +}); + +test.skip('[Find Many] Get users with groups + limit + where', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 2, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findMany({ + limit: 1, + where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), + with: { + usersToGroups: { + where: eq(usersToGroupsTable.groupId, 1), + columns: {}, + with: { + group: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + usersToGroups: { + group: { + id: number; + name: string; + description: string | null; + }; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).toEqual(1); + + expect(response[0]?.usersToGroups.length).toEqual(1); + + expect(response).toContainEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 1, + name: 'Group1', + description: null, + }, + }], + }); +}); + +test.skip('[Find Many] Get groups with users + limit + where', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findMany({ + limit: 1, + where: gt(groupsTable.id, 1), + with: { + usersToGroups: { + where: eq(usersToGroupsTable.userId, 2), + limit: 1, + columns: {}, + with: { + user: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + description: string | null; + usersToGroups: { + user: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).toEqual(1); + + expect(response[0]?.usersToGroups.length).toEqual(1); + + expect(response).toContainEqual({ + id: 2, + name: 'Group2', + description: null, + usersToGroups: [{ + user: { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }, + }], + }); +}); + +test.skip('[Find Many] Get users with groups + where', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 2, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findMany({ + where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), + with: { + usersToGroups: { + where: eq(usersToGroupsTable.groupId, 2), + columns: {}, + with: { + group: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + usersToGroups: { + group: { + id: number; + name: string; + description: string | null; + }; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).toEqual(2); + + expect(response[0]?.usersToGroups.length).toEqual(0); + expect(response[1]?.usersToGroups.length).toEqual(1); + + expect(response).toContainEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + usersToGroups: [], + }); + + expect(response).toContainEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 2, + name: 'Group2', + description: null, + }, + }], + }); +}); + +test.skip('[Find Many] Get groups with users + where', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findMany({ + where: gt(groupsTable.id, 1), + with: { + usersToGroups: { + where: eq(usersToGroupsTable.userId, 2), + columns: {}, + with: { + user: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + description: string | null; + usersToGroups: { + user: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).toEqual(2); + + expect(response[0]?.usersToGroups.length).toEqual(1); + expect(response[1]?.usersToGroups.length).toEqual(0); + + expect(response).toContainEqual({ + id: 2, + name: 'Group2', + description: null, + usersToGroups: [{ + user: { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }, + }], + }); + + expect(response).toContainEqual({ + id: 3, + name: 'Group3', + description: null, + usersToGroups: [], + }); +}); + +test.skip('[Find Many] Get users with groups + orderBy', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findMany({ + orderBy: (users, { desc }) => [desc(users.id)], + with: { + usersToGroups: { + orderBy: [desc(usersToGroupsTable.groupId)], + columns: {}, + with: { + group: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + usersToGroups: { + group: { + id: number; + name: string; + description: string | null; + }; + }[]; + }[]>(); + + expect(response.length).toEqual(3); + + expect(response[0]?.usersToGroups.length).toEqual(2); + expect(response[1]?.usersToGroups.length).toEqual(1); + expect(response[2]?.usersToGroups.length).toEqual(1); + + expect(response[2]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 1, + name: 'Group1', + description: null, + }, + }], + }); + + expect(response[1]).toEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 2, + name: 'Group2', + description: null, + }, + }], + }); + + expect(response[0]).toEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 3, + name: 'Group3', + description: null, + }, + }, { + group: { + id: 2, + name: 'Group2', + description: null, + }, + }], + }); +}); + +test.skip('[Find Many] Get groups with users + orderBy', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findMany({ + orderBy: [desc(groupsTable.id)], + with: { + usersToGroups: { + orderBy: (utg, { desc }) => [desc(utg.userId)], + columns: {}, + with: { + user: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + description: string | null; + usersToGroups: { + user: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }; + }[]; + }[]>(); + + expect(response.length).toEqual(3); + + expect(response[0]?.usersToGroups.length).toEqual(1); + expect(response[1]?.usersToGroups.length).toEqual(2); + expect(response[2]?.usersToGroups.length).toEqual(1); + + expect(response[2]).toEqual({ + id: 1, + name: 'Group1', + description: null, + usersToGroups: [{ + user: { + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + }, + }], + }); + + expect(response[1]).toEqual({ + id: 2, + name: 'Group2', + description: null, + usersToGroups: [{ + user: { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }, + }, { + user: { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }, + }], + }); + + expect(response[0]).toEqual({ + id: 3, + name: 'Group3', + description: null, + usersToGroups: [{ + user: { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }, + }], + }); +}); + +test.skip('[Find Many] Get users with groups + orderBy + limit', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findMany({ + orderBy: (users, { desc }) => [desc(users.id)], + limit: 2, + with: { + usersToGroups: { + limit: 1, + orderBy: [desc(usersToGroupsTable.groupId)], + columns: {}, + with: { + group: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + usersToGroups: { + group: { + id: number; + name: string; + description: string | null; + }; + }[]; + }[]>(); + + expect(response.length).toEqual(2); + + expect(response[0]?.usersToGroups.length).toEqual(1); + expect(response[1]?.usersToGroups.length).toEqual(1); + + expect(response[1]).toEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 2, + name: 'Group2', + description: null, + }, + }], + }); + + expect(response[0]).toEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 3, + name: 'Group3', + description: null, + }, + }], + }); +}); + +/* + [Find One] Many-to-many cases + + Users+users_to_groups+groups +*/ + +test.skip('[Find One] Get users with groups', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findFirst({ + with: { + usersToGroups: { + columns: {}, + with: { + group: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + usersToGroups: { + group: { + id: number; + name: string; + description: string | null; + }; + }[]; + } | undefined + >(); + + expect(response?.usersToGroups.length).toEqual(1); + + expect(response).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 1, + name: 'Group1', + description: null, + }, + }], + }); +}); + +test.skip('[Find One] Get groups with users', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findFirst({ + with: { + usersToGroups: { + columns: {}, + with: { + user: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + description: string | null; + usersToGroups: { + user: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }; + }[]; + } | undefined + >(); + + expect(response?.usersToGroups.length).toEqual(1); + + expect(response).toEqual({ + id: 1, + name: 'Group1', + description: null, + usersToGroups: [{ + user: { + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + }, + }], + }); +}); + +test.skip('[Find One] Get users with groups + limit', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 2, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findFirst({ + with: { + usersToGroups: { + limit: 1, + columns: {}, + with: { + group: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + usersToGroups: { + group: { + id: number; + name: string; + description: string | null; + }; + }[]; + } | undefined + >(); + + expect(response?.usersToGroups.length).toEqual(1); + + expect(response).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 1, + name: 'Group1', + description: null, + }, + }], + }); +}); + +test.skip('[Find One] Get groups with users + limit', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findFirst({ + with: { + usersToGroups: { + limit: 1, + columns: {}, + with: { + user: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + description: string | null; + usersToGroups: { + user: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }; + }[]; + } | undefined + >(); + + expect(response?.usersToGroups.length).toEqual(1); + + expect(response).toEqual({ + id: 1, + name: 'Group1', + description: null, + usersToGroups: [{ + user: { + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + }, + }], + }); +}); + +test.skip('[Find One] Get users with groups + limit + where', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 2, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findFirst({ + where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), + with: { + usersToGroups: { + where: eq(usersToGroupsTable.groupId, 1), + columns: {}, + with: { + group: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + usersToGroups: { + group: { + id: number; + name: string; + description: string | null; + }; + }[]; + } | undefined + >(); + + expect(response?.usersToGroups.length).toEqual(1); + + expect(response).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 1, + name: 'Group1', + description: null, + }, + }], + }); +}); + +test.skip('[Find One] Get groups with users + limit + where', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findFirst({ + where: gt(groupsTable.id, 1), + with: { + usersToGroups: { + where: eq(usersToGroupsTable.userId, 2), + limit: 1, + columns: {}, + with: { + user: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + description: string | null; + usersToGroups: { + user: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }; + }[]; + } | undefined + >(); + + expect(response?.usersToGroups.length).toEqual(1); + + expect(response).toEqual({ + id: 2, + name: 'Group2', + description: null, + usersToGroups: [{ + user: { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }, + }], + }); +}); + +test.skip('[Find One] Get users with groups + where', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 2, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findFirst({ + where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), + with: { + usersToGroups: { + where: eq(usersToGroupsTable.groupId, 2), + columns: {}, + with: { + group: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + usersToGroups: { + group: { + id: number; + name: string; + description: string | null; + }; + }[]; + } | undefined + >(); + + expect(response?.usersToGroups.length).toEqual(0); + + expect(response).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + usersToGroups: [], + }); +}); + +test.skip('[Find One] Get groups with users + where', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findFirst({ + where: gt(groupsTable.id, 1), + with: { + usersToGroups: { + where: eq(usersToGroupsTable.userId, 2), + columns: {}, + with: { + user: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + description: string | null; + usersToGroups: { + user: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }; + }[]; + } | undefined + >(); + + expect(response?.usersToGroups.length).toEqual(1); + + expect(response).toEqual({ + id: 2, + name: 'Group2', + description: null, + usersToGroups: [{ + user: { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }, + }], + }); +}); + +test.skip('[Find One] Get users with groups + orderBy', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findFirst({ + orderBy: (users, { desc }) => [desc(users.id)], + with: { + usersToGroups: { + orderBy: [desc(usersToGroupsTable.groupId)], + columns: {}, + with: { + group: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + usersToGroups: { + group: { + id: number; + name: string; + description: string | null; + }; + }[]; + } | undefined + >(); + + expect(response?.usersToGroups.length).toEqual(2); + + expect(response).toEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 3, + name: 'Group3', + description: null, + }, + }, { + group: { + id: 2, + name: 'Group2', + description: null, + }, + }], + }); +}); + +test.skip('[Find One] Get groups with users + orderBy', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findFirst({ + orderBy: [desc(groupsTable.id)], + with: { + usersToGroups: { + orderBy: (utg, { desc }) => [desc(utg.userId)], + columns: {}, + with: { + user: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + description: string | null; + usersToGroups: { + user: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }; + }[]; + } | undefined + >(); + + expect(response?.usersToGroups.length).toEqual(1); + + expect(response).toEqual({ + id: 3, + name: 'Group3', + description: null, + usersToGroups: [{ + user: { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }, + }], + }); +}); + +test.skip('[Find One] Get users with groups + orderBy + limit', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findFirst({ + orderBy: (users, { desc }) => [desc(users.id)], + with: { + usersToGroups: { + limit: 1, + orderBy: [desc(usersToGroupsTable.groupId)], + columns: {}, + with: { + group: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + usersToGroups: { + group: { + id: number; + name: string; + description: string | null; + }; + }[]; + } | undefined + >(); + + expect(response?.usersToGroups.length).toEqual(1); + + expect(response).toEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 3, + name: 'Group3', + description: null, + }, + }], + }); +}); + +test.skip('Get groups with users + orderBy + limit', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findMany({ + orderBy: [desc(groupsTable.id)], + limit: 2, + with: { + usersToGroups: { + limit: 1, + orderBy: (utg, { desc }) => [desc(utg.userId)], + columns: {}, + with: { + user: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + description: string | null; + usersToGroups: { + user: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }; + }[]; + }[] + >(); + + expect(response.length).toEqual(2); + + expect(response[0]?.usersToGroups.length).toEqual(1); + expect(response[1]?.usersToGroups.length).toEqual(1); + + expect(response[1]).toEqual({ + id: 2, + name: 'Group2', + description: null, + usersToGroups: [{ + user: { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }, + }], + }); + + expect(response[0]).toEqual({ + id: 3, + name: 'Group3', + description: null, + usersToGroups: [{ + user: { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }, + }], + }); +}); + +test.skip('Get users with groups + custom', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findMany({ + extras: { + lower: sql`lower(${usersTable.name})`.as('lower_name'), + }, + with: { + usersToGroups: { + columns: {}, + with: { + group: { + extras: { + lower: sql`lower(${groupsTable.name})`.as('lower_name'), + }, + }, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + lower: string; + usersToGroups: { + group: { + id: number; + name: string; + description: string | null; + lower: string; + }; + }[]; + }[] + >(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).toEqual(3); + + expect(response[0]?.usersToGroups.length).toEqual(1); + expect(response[1]?.usersToGroups.length).toEqual(1); + expect(response[2]?.usersToGroups.length).toEqual(2); + + expect(response).toContainEqual({ + id: 1, + name: 'Dan', + lower: 'dan', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 1, + name: 'Group1', + lower: 'group1', + description: null, + }, + }], + }); + + expect(response).toContainEqual({ + id: 2, + name: 'Andrew', + lower: 'andrew', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 2, + name: 'Group2', + lower: 'group2', + description: null, + }, + }], + }); + + expect(response).toContainEqual({ + id: 3, + name: 'Alex', + lower: 'alex', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 3, + name: 'Group3', + lower: 'group3', + description: null, + }, + }, { + group: { + id: 2, + name: 'Group2', + lower: 'group2', + description: null, + }, + }], + }); +}); + +test.skip('Get groups with users + custom', async (t) => { + const { singlestoreDb: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findMany({ + extras: (table, { sql }) => ({ + lower: sql`lower(${table.name})`.as('lower_name'), + }), + with: { + usersToGroups: { + columns: {}, + with: { + user: { + extras: (table, { sql }) => ({ + lower: sql`lower(${table.name})`.as('lower_name'), + }), + }, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + description: string | null; + lower: string; + usersToGroups: { + user: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + lower: string; + }; + }[]; + }[] + >(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).toEqual(3); + + expect(response[0]?.usersToGroups.length).toEqual(1); + expect(response[1]?.usersToGroups.length).toEqual(2); + expect(response[2]?.usersToGroups.length).toEqual(1); + + expect(response).toContainEqual({ + id: 1, + name: 'Group1', + lower: 'group1', + description: null, + usersToGroups: [{ + user: { + id: 1, + name: 'Dan', + lower: 'dan', + verified: false, + invitedBy: null, + }, + }], + }); + + expect(response).toContainEqual({ + id: 2, + name: 'Group2', + lower: 'group2', + description: null, + usersToGroups: [{ + user: { + id: 2, + name: 'Andrew', + lower: 'andrew', + verified: false, + invitedBy: null, + }, + }, { + user: { + id: 3, + name: 'Alex', + lower: 'alex', + verified: false, + invitedBy: null, + }, + }], + }); + + expect(response).toContainEqual({ + id: 3, + name: 'Group3', + lower: 'group3', + description: null, + usersToGroups: [{ + user: { + id: 3, + name: 'Alex', + lower: 'alex', + verified: false, + invitedBy: null, + }, + }], + }); +}); + +test('.toSQL()', () => { + const query = db.query.usersTable.findFirst().toSQL(); + + expect(query).toHaveProperty('sql', expect.any(String)); + expect(query).toHaveProperty('params', expect.any(Array)); +}); + +// + custom + where + orderby + +// + custom + where + orderby + limit + +// + partial + +// + partial(false) + +// + partial + orderBy + where (all not selected) + +/* + One four-level relation users+posts+comments+coment_likes + + users+users_to_groups+groups +*/ + +/* + Really hard case + 1. users+posts+comments+coment_likes + 2. users+users_to_groups+groups + 3. users+users +*/ diff --git a/integration-tests/tests/replicas/singlestore.test.ts b/integration-tests/tests/replicas/singlestore.test.ts new file mode 100644 index 000000000..76d84c972 --- /dev/null +++ b/integration-tests/tests/replicas/singlestore.test.ts @@ -0,0 +1,805 @@ +import { sql } from 'drizzle-orm'; +import { drizzle } from 'drizzle-orm/singlestore'; +import { boolean, serial, singlestoreTable, text, withReplicas } from 'drizzle-orm/singlestore-core'; +import { describe, expect, it, vi } from 'vitest'; + +const usersTable = singlestoreTable('users', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), +}); + +const users = singlestoreTable('users', { + id: serial('id' as string).primaryKey(), +}); + +describe('[select] read replicas singlestore', () => { + it('primary select', () => { + const primaryDb = drizzle({} as any); + const read1 = drizzle({} as any); + const read2 = drizzle({} as any); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, 'select'); + const spyRead1 = vi.spyOn(read1, 'select'); + const spyRead2 = vi.spyOn(read2, 'select'); + + const query = db.$primary.select().from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(query.toSQL().sql).toEqual('select `id` from `users`'); + + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); + + it('random replica select', () => { + const primaryDb = drizzle({} as any); + const read1 = drizzle({} as any); + const read2 = drizzle({} as any); + + const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); + + const db = withReplicas(primaryDb, [read1, read2], () => { + return randomMockReplica(); + }); + + const spyPrimary = vi.spyOn(primaryDb, 'select'); + const spyRead1 = vi.spyOn(read1, 'select'); + const spyRead2 = vi.spyOn(read2, 'select'); + + const query1 = db.select({ count: sql`count(*)`.as('count') }).from(users).limit(1); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + + expect(query1.toSQL().sql).toEqual('select count(*) as `count` from `users` limit ?'); + + const query2 = db.select().from(users); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(1); + expect(query2.toSQL().sql).toEqual('select `id` from `users`'); + }); + + it('single read replica select', () => { + const primaryDb = drizzle({} as any); + const read1 = drizzle({} as any); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb, 'select'); + const spyRead1 = vi.spyOn(read1, 'select'); + + const query1 = db.select().from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(query1.toSQL().sql).toEqual('select `id` from `users`'); + + const query2 = db.select().from(users); + expect(spyRead1).toHaveBeenCalledTimes(2); + expect(query2.toSQL().sql).toEqual('select `id` from `users`'); + }); + + it('single read replica select + primary select', () => { + const primaryDb = drizzle({} as any); + const read1 = drizzle({} as any); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb, 'select'); + const spyRead1 = vi.spyOn(read1, 'select'); + + const query1 = db.select({ id: users.id }).from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(query1.toSQL().sql).toEqual('select `id` from `users`'); + + const query2 = db.$primary.select().from(users); + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(query2.toSQL().sql).toEqual('select `id` from `users`'); + }); + + it('always first read select', () => { + const primaryDb = drizzle({} as any); + const read1 = drizzle({} as any); + const read2 = drizzle({} as any); + + const db = withReplicas(primaryDb, [read1, read2], (replicas) => { + return replicas[0]!; + }); + + const spyPrimary = vi.spyOn(primaryDb, 'select'); + const spyRead1 = vi.spyOn(read1, 'select'); + const spyRead2 = vi.spyOn(read2, 'select'); + + const query1 = db.select().from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query1.toSQL().sql).toEqual('select `id` from `users`'); + + const query2 = db.select().from(users); + + expect(spyRead1).toHaveBeenCalledTimes(2); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query2.toSQL().sql).toEqual('select `id` from `users`'); + }); +}); + +describe('[selectDistinct] read replicas singlestore', () => { + it('primary selectDistinct', () => { + const primaryDb = drizzle({} as any); + const read1 = drizzle({} as any); + const read2 = drizzle({} as any); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); + const spyRead1 = vi.spyOn(read1, 'selectDistinct'); + const spyRead2 = vi.spyOn(read2, 'selectDistinct'); + + const query = db.$primary.selectDistinct().from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query.toSQL().sql).toEqual('select distinct `id` from `users`'); + }); + + it('random replica selectDistinct', () => { + const primaryDb = drizzle({} as any); + const read1 = drizzle({} as any); + const read2 = drizzle({} as any); + + const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); + + const db = withReplicas(primaryDb, [read1, read2], () => { + return randomMockReplica(); + }); + + const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); + const spyRead1 = vi.spyOn(read1, 'selectDistinct'); + const spyRead2 = vi.spyOn(read2, 'selectDistinct'); + + const query1 = db.selectDistinct().from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query1.toSQL().sql).toEqual('select distinct `id` from `users`'); + + const query2 = db.selectDistinct().from(users); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(1); + expect(query2.toSQL().sql).toEqual('select distinct `id` from `users`'); + }); + + it('single read replica selectDistinct', () => { + const primaryDb = drizzle({} as any); + const read1 = drizzle({} as any); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); + const spyRead1 = vi.spyOn(read1, 'selectDistinct'); + + const query1 = db.selectDistinct().from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(query1.toSQL().sql).toEqual('select distinct `id` from `users`'); + + const query2 = db.selectDistinct().from(users); + expect(spyRead1).toHaveBeenCalledTimes(2); + expect(query2.toSQL().sql).toEqual('select distinct `id` from `users`'); + }); + + it('single read replica selectDistinct + primary selectDistinct', () => { + const primaryDb = drizzle({} as any); + const read1 = drizzle({} as any); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); + const spyRead1 = vi.spyOn(read1, 'selectDistinct'); + + const query1 = db.selectDistinct().from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(query1.toSQL().sql).toEqual('select distinct `id` from `users`'); + + const query2 = db.$primary.selectDistinct().from(users); + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(query2.toSQL().sql).toEqual('select distinct `id` from `users`'); + }); + + it('always first read selectDistinct', () => { + const primaryDb = drizzle({} as any); + const read1 = drizzle({} as any); + const read2 = drizzle({} as any); + + const db = withReplicas(primaryDb, [read1, read2], (replicas) => { + return replicas[0]!; + }); + + const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); + const spyRead1 = vi.spyOn(read1, 'selectDistinct'); + const spyRead2 = vi.spyOn(read2, 'selectDistinct'); + + const query1 = db.selectDistinct().from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query1.toSQL().sql).toEqual('select distinct `id` from `users`'); + + const query2 = db.selectDistinct().from(users); + expect(spyRead1).toHaveBeenCalledTimes(2); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query2.toSQL().sql).toEqual('select distinct `id` from `users`'); + }); +}); + +describe('[with] read replicas singlestore', () => { + it('primary with', () => { + const primaryDb = drizzle({} as any); + const read1 = drizzle({} as any); + const read2 = drizzle({} as any); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, 'with'); + const spyRead1 = vi.spyOn(read1, 'with'); + const spyRead2 = vi.spyOn(read2, 'with'); + const obj1 = {} as any; + const obj2 = {} as any; + const obj3 = {} as any; + const obj4 = {} as any; + + db.$primary.with(obj1, obj2, obj3, obj4); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyPrimary).toHaveBeenCalledWith(obj1, obj2, obj3, obj4); + }); + + it('random replica with', () => { + const primaryDb = drizzle({} as any); + const read1 = drizzle({} as any); + const read2 = drizzle({} as any); + + const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); + + const db = withReplicas(primaryDb, [read1, read2], () => { + return randomMockReplica(); + }); + + const spyPrimary = vi.spyOn(primaryDb, 'with'); + const spyRead1 = vi.spyOn(read1, 'with'); + const spyRead2 = vi.spyOn(read2, 'with'); + + db.with(); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + + db.with(); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(1); + }); + + it('single read replica with', () => { + const primaryDb = drizzle({} as any); + const read1 = drizzle({} as any); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb, 'with'); + const spyRead1 = vi.spyOn(read1, 'with'); + + db.with(); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + + db.with(); + expect(spyRead1).toHaveBeenCalledTimes(2); + }); + + it('single read replica with + primary with', () => { + const primaryDb = drizzle({} as any); + const read1 = drizzle({} as any); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb, 'with'); + const spyRead1 = vi.spyOn(read1, 'with'); + + db.with(); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + + db.$primary.with(); + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(1); + }); + + it('always first read with', () => { + const primaryDb = drizzle({} as any); + const read1 = drizzle({} as any); + const read2 = drizzle({} as any); + + const db = withReplicas(primaryDb, [read1, read2], (replicas) => { + return replicas[0]!; + }); + + const spyPrimary = vi.spyOn(primaryDb, 'with'); + const spyRead1 = vi.spyOn(read1, 'with'); + const spyRead2 = vi.spyOn(read2, 'with'); + const obj1 = {} as any; + const obj2 = {} as any; + const obj3 = {} as any; + + db.with(obj1); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledWith(obj1); + + db.with(obj2, obj3); + expect(spyRead1).toHaveBeenCalledTimes(2); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledWith(obj2, obj3); + }); +}); + +describe('[update] replicas singlestore', () => { + it('primary update', () => { + const primaryDb = drizzle({} as any); + const read1 = drizzle({} as any); + const read2 = drizzle({} as any); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, 'update'); + const spyRead1 = vi.spyOn(read1, 'update'); + const spyRead2 = vi.spyOn(read2, 'update'); + + const query1 = db.update(users).set({ id: 1 }); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query1.toSQL().sql).toEqual('update `users` set `id` = ?'); + + const query2 = db.update(users).set({ id: 1 }); + + expect(spyPrimary).toHaveBeenCalledTimes(2); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query2.toSQL().sql).toEqual('update `users` set `id` = ?'); + + const query3 = db.$primary.update(users).set({ id: 1 }); + + expect(spyPrimary).toHaveBeenCalledTimes(3); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query3.toSQL().sql).toEqual('update `users` set `id` = ?'); + }); +}); + +describe('[delete] replicas singlestore', () => { + it('primary delete', () => { + const primaryDb = drizzle({} as any); + const read1 = drizzle({} as any); + const read2 = drizzle({} as any); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, 'delete'); + const spyRead1 = vi.spyOn(read1, 'delete'); + const spyRead2 = vi.spyOn(read2, 'delete'); + + const query1 = db.delete(users); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyPrimary).toHaveBeenCalledWith(users); + expect(query1.toSQL().sql).toEqual('delete from `users`'); + + const query2 = db.delete(users); + + expect(spyPrimary).toHaveBeenCalledTimes(2); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyPrimary).toHaveBeenNthCalledWith(2, users); + expect(query2.toSQL().sql).toEqual('delete from `users`'); + + db.$primary.delete({} as any); + + expect(spyPrimary).toHaveBeenCalledTimes(3); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); +}); + +describe('[insert] replicas singlestore', () => { + it('primary insert', () => { + const primaryDb = drizzle({} as any); + const read1 = drizzle({} as any); + const read2 = drizzle({} as any); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, 'insert'); + const spyRead1 = vi.spyOn(read1, 'insert'); + const spyRead2 = vi.spyOn(read2, 'insert'); + + const query = db.insert(users).values({ id: 1 }); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyPrimary).toHaveBeenCalledWith(users); + expect(query.toSQL().sql).toEqual('insert into `users` (`id`) values (?)'); + + db.insert(users); + + expect(spyPrimary).toHaveBeenCalledTimes(2); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyPrimary).toHaveBeenNthCalledWith(2, users); + + db.$primary.insert({} as any); + + expect(spyPrimary).toHaveBeenCalledTimes(3); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); +}); + +describe('[execute] replicas singlestore', () => { + it('primary execute', async () => { + const primaryDb = drizzle({} as any); + const read1 = drizzle({} as any); + const read2 = drizzle({} as any); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, 'execute'); + const spyRead1 = vi.spyOn(read1, 'execute'); + const spyRead2 = vi.spyOn(read2, 'execute'); + + expect(db.execute(sql``)).rejects.toThrow(); + + // try { + // db.execute(sql``); + // } catch { /* empty */ } + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + + expect(db.execute(sql``)).rejects.toThrow(); + // try { + // db.execute(sql``); + // } catch { /* empty */ } + + expect(spyPrimary).toHaveBeenCalledTimes(2); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + + expect(db.execute(sql``)).rejects.toThrow(); + // try { + // db.execute(sql``); + // } catch { /* empty */ } + + expect(spyPrimary).toHaveBeenCalledTimes(3); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); +}); + +describe('[transaction] replicas singlestore', () => { + it('primary transaction', async () => { + const primaryDb = drizzle({} as any); + const read1 = drizzle({} as any); + const read2 = drizzle({} as any); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, 'transaction'); + const spyRead1 = vi.spyOn(read1, 'transaction'); + const spyRead2 = vi.spyOn(read2, 'transaction'); + const txFn1 = async (tx: any) => { + tx.select().from({} as any); + }; + + expect(db.transaction(txFn1)).rejects.toThrow(); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyPrimary).toHaveBeenCalledWith(txFn1); + + const txFn2 = async (tx: any) => { + tx.select().from({} as any); + }; + + expect(db.transaction(txFn2)).rejects.toThrow(); + + expect(spyPrimary).toHaveBeenCalledTimes(2); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyPrimary).toHaveBeenNthCalledWith(2, txFn2); + + expect(db.transaction(async (tx) => { + tx.select().from({} as any); + })).rejects.toThrow(); + + expect(spyPrimary).toHaveBeenCalledTimes(3); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); +}); + +describe('[findFirst] read replicas singlestore', () => { + it('primary findFirst', () => { + const primaryDb = drizzle({} as any, { schema: { usersTable } }); + const read1 = drizzle({} as any, { schema: { usersTable } }); + const read2 = drizzle({} as any, { schema: { usersTable } }); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); + const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); + const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findFirst'); + const obj = {} as any; + + db.$primary.query.usersTable.findFirst(obj); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyPrimary).toHaveBeenCalledWith(obj); + }); + + it('random replica findFirst', () => { + const primaryDb = drizzle({} as any, { schema: { usersTable } }); + const read1 = drizzle({} as any, { schema: { usersTable } }); + const read2 = drizzle({} as any, { schema: { usersTable } }); + + const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); + + const db = withReplicas(primaryDb, [read1, read2], () => { + return randomMockReplica(); + }); + + const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); + const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); + const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findFirst'); + const par1 = {} as any; + + db.query.usersTable.findFirst(par1); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledWith(par1); + + const query = db.query.usersTable.findFirst(); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(1); + expect(query.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable` limit ?'); + }); + + it('single read replica findFirst', () => { + const primaryDb = drizzle({} as any, { schema: { usersTable } }); + const read1 = drizzle({} as any, { schema: { usersTable } }); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); + const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); + + db.query.usersTable.findFirst(); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + + db.query.usersTable.findFirst(); + expect(spyRead1).toHaveBeenCalledTimes(2); + }); + + it('single read replica findFirst + primary findFirst', () => { + const primaryDb = drizzle({} as any, { schema: { usersTable } }); + const read1 = drizzle({} as any, { schema: { usersTable } }); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); + const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); + + db.query.usersTable.findFirst(); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + + db.$primary.query.usersTable.findFirst(); + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(1); + }); + + it('always first read findFirst', () => { + const primaryDb = drizzle({} as any, { schema: { usersTable } }); + const read1 = drizzle({} as any, { schema: { usersTable } }); + const read2 = drizzle({} as any, { schema: { usersTable } }); + + const db = withReplicas(primaryDb, [read1, read2], (replicas) => { + return replicas[0]!; + }); + + const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); + const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); + const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findFirst'); + + db.query.usersTable.findFirst(); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + + db.query.usersTable.findFirst(); + expect(spyRead1).toHaveBeenCalledTimes(2); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); +}); + +describe('[findMany] read replicas singlestore', () => { + it('primary findMany', () => { + const primaryDb = drizzle({} as any, { schema: { usersTable } }); + const read1 = drizzle({} as any, { schema: { usersTable } }); + const read2 = drizzle({} as any, { schema: { usersTable } }); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); + const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); + const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findMany'); + const obj = {} as any; + + const query = db.$primary.query.usersTable.findMany(obj); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyPrimary).toHaveBeenCalledWith(obj); + expect(query.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); + }); + + it('random replica findMany', () => { + const primaryDb = drizzle({} as any, { schema: { usersTable } }); + const read1 = drizzle({} as any, { schema: { usersTable } }); + const read2 = drizzle({} as any, { schema: { usersTable } }); + + const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); + + const db = withReplicas(primaryDb, [read1, read2], () => { + return randomMockReplica(); + }); + + const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); + const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); + const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findMany'); + const obj1 = {} as any; + const obj2 = {} as any; + + const query1 = db.query.usersTable.findMany(obj1); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query1.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); + expect(spyRead1).toHaveBeenCalledWith(obj1); + + const query2 = db.query.usersTable.findMany(obj2); + + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(1); + expect(query2.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); + expect(spyRead2).toHaveBeenCalledWith(obj2); + }); + + it('single read replica findMany', () => { + const primaryDb = drizzle({} as any, { schema: { usersTable } }); + const read1 = drizzle({} as any, { schema: { usersTable } }); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); + const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); + const obj1 = {} as any; + const obj2 = {} as any; + + const query1 = db.query.usersTable.findMany(obj1); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledWith(obj1); + expect(query1.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); + + const query2 = db.query.usersTable.findMany(obj2); + expect(spyRead1).toHaveBeenCalledTimes(2); + expect(spyRead1).toHaveBeenNthCalledWith(2, obj2); + expect(query2.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); + }); + + it('single read replica findMany + primary findMany', () => { + const primaryDb = drizzle({} as any, { schema: { usersTable } }); + const read1 = drizzle({} as any, { schema: { usersTable } }); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); + const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); + const obj1 = {} as any; + const obj2 = {} as any; + + const query1 = db.query.usersTable.findMany(obj1); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledWith(obj1); + expect(query1.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); + + const query2 = db.$primary.query.usersTable.findMany(obj2); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyPrimary).toHaveBeenNthCalledWith(1, obj2); + expect(query2.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); + }); + + it('always first read findMany', () => { + const primaryDb = drizzle({} as any, { schema: { usersTable } }); + const read1 = drizzle({} as any, { schema: { usersTable } }); + const read2 = drizzle({} as any, { schema: { usersTable } }); + + const db = withReplicas(primaryDb, [read1, read2], (replicas) => { + return replicas[0]!; + }); + + const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); + const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); + const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findMany'); + const obj1 = {} as any; + const obj2 = {} as any; + + const query1 = db.query.usersTable.findMany(obj1); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledWith(obj1); + expect(query1.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); + + const query2 = db.query.usersTable.findMany(obj2); + expect(spyRead1).toHaveBeenCalledTimes(2); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenNthCalledWith(2, obj2); + expect(query2.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); + }); +}); diff --git a/integration-tests/tests/singlestore/singlestore-common.ts b/integration-tests/tests/singlestore/singlestore-common.ts new file mode 100644 index 000000000..037c27202 --- /dev/null +++ b/integration-tests/tests/singlestore/singlestore-common.ts @@ -0,0 +1,3432 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import 'dotenv/config'; +import Docker from 'dockerode'; +import { + and, + asc, + avg, + avgDistinct, + count, + countDistinct, + eq, + exists, + getTableColumns, + gt, + gte, + inArray, + lt, + max, + min, + Name, + notInArray, + placeholder, + sql, + sum, + sumDistinct, + TransactionRollbackError, +} from 'drizzle-orm'; +import type { SingleStoreDatabase } from 'drizzle-orm/singlestore-core'; +import { + alias, + bigint, + boolean, + date, + datetime, + decimal, + except, + getTableConfig, + getViewConfig, + int, + intersect, + json, + mediumint, + primaryKey, + serial, + singlestoreEnum, + singlestoreSchema, + singlestoreTable, + singlestoreTableCreator, + singlestoreView, + smallint, + text, + time, + timestamp, + tinyint, + union, + unionAll, + unique, + uniqueIndex, + uniqueKeyName, + varchar, + year, +} from 'drizzle-orm/singlestore-core'; +import { migrate } from 'drizzle-orm/singlestore/migrator'; +import getPort from 'get-port'; +import { v4 as uuid } from 'uuid'; +import { afterAll, beforeEach, describe, expect, expectTypeOf, test } from 'vitest'; +import { Expect, toLocalDate } from '~/utils.ts'; +import type { Equal } from '~/utils.ts'; + +type TestSingleStoreDB = SingleStoreDatabase; + +declare module 'vitest' { + interface TestContext { + singlestore: { + db: TestSingleStoreDB; + }; + } +} + +const ENABLE_LOGGING = false; + +const usersTable = singlestoreTable('userstest', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 6 }).notNull().defaultNow(), +}); + +const users2Table = singlestoreTable('users2', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id'), +}); + +const citiesTable = singlestoreTable('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +const usersOnUpdate = singlestoreTable('users_on_update', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + updateCounter: int('update_counter').default(sql`1`).$onUpdateFn(() => sql`update_counter + 1`), + updatedAt: datetime('updated_at', { mode: 'date', fsp: 6 }).$onUpdateFn(() => new Date()), + alwaysNull: text('always_null').$type().$onUpdateFn(() => null), // need to add $type because $onUpdate add a default value +}); + +const datesTable = singlestoreTable('datestable', { + date: date('date'), + dateAsString: date('date_as_string', { mode: 'string' }), + time: time('time', { fsp: 1 }), + datetime: datetime('datetime', { fsp: 6 }), + datetimeAsString: datetime('datetime_as_string', { fsp: 6, mode: 'string' }), + timestamp: timestamp('timestamp', { fsp: 6 }), + timestampAsString: timestamp('timestamp_as_string', { fsp: 6, mode: 'string' }), + year: year('year'), +}); + +const coursesTable = singlestoreTable('courses', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + categoryId: int('category_id'), +}); + +const courseCategoriesTable = singlestoreTable('course_categories', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +const orders = singlestoreTable('orders', { + id: serial('id').primaryKey(), + region: text('region').notNull(), + product: text('product').notNull().$default(() => 'random_string'), + amount: int('amount').notNull(), + quantity: int('quantity').notNull(), +}); + +const usersMigratorTable = singlestoreTable('users12', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + email: text('email').notNull(), +}, (table) => { + return { + name: uniqueIndex('').on(table.name).using('btree'), + }; +}); + +// To test aggregate functions +const aggregateTable = singlestoreTable('aggregate_table', { + id: serial('id').notNull(), + name: text('name').notNull(), + a: int('a'), + b: int('b'), + c: int('c'), + nullOnly: int('null_only'), +}); + +// To test another schema and multischema +const mySchema = singlestoreSchema(`mySchema`); + +const usersMySchemaTable = mySchema.table('userstest', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 6 }).notNull().defaultNow(), +}); + +const users2MySchemaTable = mySchema.table('users2', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id'), +}); + +const citiesMySchemaTable = mySchema.table('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +let singlestoreContainer: Docker.Container; +export async function createDockerDB(): Promise<{ connectionString: string; container: Docker.Container }> { + const docker = new Docker(); + const port = await getPort({ port: 3306 }); + const image = 'ghcr.io/singlestore-labs/singlestoredb-dev:latest'; + + const pullStream = await docker.pull(image); + await new Promise((resolve, reject) => + docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) + ); + + singlestoreContainer = await docker.createContainer({ + Image: image, + Env: ['ROOT_PASSWORD=singlestore'], + name: `drizzle-integration-tests-${uuid()}`, + HostConfig: { + AutoRemove: true, + PortBindings: { + '3306/tcp': [{ HostPort: `${port}` }], + }, + }, + }); + + await singlestoreContainer.start(); + await new Promise((resolve) => setTimeout(resolve, 4000)); + + return { + connectionString: `singlestore://root:singlestore@localhost:${port}/`, + container: singlestoreContainer, + }; +} + +// Tests are slow so we keep track of the test number +let testRunNumber = 0; + +export function tests(driver?: string) { + describe('common', () => { + afterAll(async () => { + await singlestoreContainer?.stop().catch(console.error); + }); + + beforeEach(async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`drop table if exists userstest`); + await db.execute(sql`drop table if exists users2`); + await db.execute(sql`drop table if exists cities`); + + await db.execute(sql`drop schema if exists \`mySchema\``); + await db.execute(sql`create schema if not exists \`mySchema\``); + + await db.execute( + sql` + create table userstest ( + id serial primary key, + name text not null, + verified boolean not null default false, + jsonb json, + created_at timestamp not null default now() + ) + `, + ); + + await db.execute( + sql` + create table users2 ( + id serial primary key, + name text not null, + city_id int + ) + `, + ); + + await db.execute( + sql` + create table cities ( + id serial primary key, + name text not null + ) + `, + ); + + // mySchema + await db.execute( + sql` + create table \`mySchema\`.\`userstest\` ( + \`id\` serial primary key, + \`name\` text not null, + \`verified\` boolean not null default false, + \`jsonb\` json, + \`created_at\` timestamp not null default now() + ) + `, + ); + + await db.execute( + sql` + create table \`mySchema\`.\`cities\` ( + \`id\` serial primary key, + \`name\` text not null + ) + `, + ); + + await db.execute( + sql` + create table \`mySchema\`.\`users2\` ( + \`id\` serial primary key, + \`name\` text not null, + \`city_id\` int + ) + `, + ); + + testRunNumber += 1; + console.log(`Test number: ${testRunNumber}`); + }); + + async function setupReturningFunctionsTest(db: SingleStoreDatabase) { + await db.execute(sql`drop table if exists \`users_default_fn\``); + await db.execute( + sql` + create table \`users_default_fn\` ( + \`id\` varchar(256) primary key, + \`name\` text not null + ); + `, + ); + } + + async function setupSetOperationTest(db: TestSingleStoreDB) { + await db.execute(sql`drop table if exists \`users2\``); + await db.execute(sql`drop table if exists \`cities\``); + await db.execute( + sql` + create table \`users2\` ( + \`id\` serial primary key, + \`name\` text not null, + \`city_id\` int + ) + `, + ); + + await db.execute( + sql` + create table \`cities\` ( + \`id\` serial primary key, + \`name\` text not null + ) + `, + ); + + await db.insert(citiesTable).values([ + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await db.insert(users2Table).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 2 }, + { id: 3, name: 'Jack', cityId: 3 }, + { id: 4, name: 'Peter', cityId: 3 }, + { id: 5, name: 'Ben', cityId: 2 }, + { id: 6, name: 'Jill', cityId: 1 }, + { id: 7, name: 'Mary', cityId: 2 }, + { id: 8, name: 'Sally', cityId: 1 }, + ]); + } + + async function setupAggregateFunctionsTest(db: TestSingleStoreDB) { + await db.execute(sql`drop table if exists \`aggregate_table\``); + await db.execute( + sql` + create table \`aggregate_table\` ( + \`id\` integer primary key auto_increment not null, + \`name\` text not null, + \`a\` integer, + \`b\` integer, + \`c\` integer, + \`null_only\` integer + ); + `, + ); + await db.insert(aggregateTable).values([ + { id: 1, name: 'value 1', a: 5, b: 10, c: 20 }, + { id: 2, name: 'value 1', a: 5, b: 20, c: 30 }, + { id: 3, name: 'value 2', a: 10, b: 50, c: 60 }, + { id: 4, name: 'value 3', a: 20, b: 20, c: null }, + { id: 5, name: 'value 4', a: null, b: 90, c: 120 }, + { id: 6, name: 'value 5', a: 80, b: 10, c: null }, + { id: 7, name: 'value 6', a: null, b: null, c: 150 }, + ]); + } + + test('table config: unsigned ints', async () => { + const unsignedInts = singlestoreTable('cities1', { + bigint: bigint('bigint', { mode: 'number', unsigned: true }), + int: int('int', { unsigned: true }), + smallint: smallint('smallint', { unsigned: true }), + mediumint: mediumint('mediumint', { unsigned: true }), + tinyint: tinyint('tinyint', { unsigned: true }), + }); + + const tableConfig = getTableConfig(unsignedInts); + + const bigintColumn = tableConfig.columns.find((c) => c.name === 'bigint')!; + const intColumn = tableConfig.columns.find((c) => c.name === 'int')!; + const smallintColumn = tableConfig.columns.find((c) => c.name === 'smallint')!; + const mediumintColumn = tableConfig.columns.find((c) => c.name === 'mediumint')!; + const tinyintColumn = tableConfig.columns.find((c) => c.name === 'tinyint')!; + + expect(bigintColumn.getSQLType()).toBe('bigint unsigned'); + expect(intColumn.getSQLType()).toBe('int unsigned'); + expect(smallintColumn.getSQLType()).toBe('smallint unsigned'); + expect(mediumintColumn.getSQLType()).toBe('mediumint unsigned'); + expect(tinyintColumn.getSQLType()).toBe('tinyint unsigned'); + }); + + test('table config: signed ints', async () => { + const unsignedInts = singlestoreTable('cities1', { + bigint: bigint('bigint', { mode: 'number' }), + int: int('int'), + smallint: smallint('smallint'), + mediumint: mediumint('mediumint'), + tinyint: tinyint('tinyint'), + }); + + const tableConfig = getTableConfig(unsignedInts); + + const bigintColumn = tableConfig.columns.find((c) => c.name === 'bigint')!; + const intColumn = tableConfig.columns.find((c) => c.name === 'int')!; + const smallintColumn = tableConfig.columns.find((c) => c.name === 'smallint')!; + const mediumintColumn = tableConfig.columns.find((c) => c.name === 'mediumint')!; + const tinyintColumn = tableConfig.columns.find((c) => c.name === 'tinyint')!; + + expect(bigintColumn.getSQLType()).toBe('bigint'); + expect(intColumn.getSQLType()).toBe('int'); + expect(smallintColumn.getSQLType()).toBe('smallint'); + expect(mediumintColumn.getSQLType()).toBe('mediumint'); + expect(tinyintColumn.getSQLType()).toBe('tinyint'); + }); + + test('table config: primary keys name', async () => { + const table = singlestoreTable('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: text('state'), + }, (t) => ({ + f: primaryKey({ columns: [t.id, t.name], name: 'custom_pk' }), + })); + + const tableConfig = getTableConfig(table); + + expect(tableConfig.primaryKeys).toHaveLength(1); + expect(tableConfig.primaryKeys[0]!.getName()).toBe('custom_pk'); + }); + + test('table configs: unique third param', async () => { + const cities1Table = singlestoreTable('cities1', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: text('state'), + }, (t) => ({ + f: unique('custom_name').on(t.name, t.state), + f1: unique('custom_name1').on(t.name, t.state), + })); + + const tableConfig = getTableConfig(cities1Table); + + expect(tableConfig.uniqueConstraints).toHaveLength(2); + + expect(tableConfig.uniqueConstraints[0]?.name).toBe('custom_name'); + expect(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name)).toEqual(['name', 'state']); + + expect(tableConfig.uniqueConstraints[1]?.name).toBe('custom_name1'); + expect(tableConfig.uniqueConstraints[1]?.columns.map((t) => t.name)).toEqual(['name', 'state']); + }); + + test('table configs: unique in column', async () => { + const cities1Table = singlestoreTable('cities1', { + id: serial('id').primaryKey(), + name: text('name').notNull().unique(), + state: text('state').unique('custom'), + field: text('field').unique('custom_field'), + }); + + const tableConfig = getTableConfig(cities1Table); + + const columnName = tableConfig.columns.find((it) => it.name === 'name'); + expect(columnName?.uniqueName).toBe(uniqueKeyName(cities1Table, [columnName!.name])); + expect(columnName?.isUnique).toBeTruthy(); + + const columnState = tableConfig.columns.find((it) => it.name === 'state'); + expect(columnState?.uniqueName).toBe('custom'); + expect(columnState?.isUnique).toBeTruthy(); + + const columnField = tableConfig.columns.find((it) => it.name === 'field'); + expect(columnField?.uniqueName).toBe('custom_field'); + expect(columnField?.isUnique).toBeTruthy(); + }); + + test('select all fields', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const result = await db.select().from(usersTable); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + }); + + test('select sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersTable.name})`, + }).from(usersTable); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test('select typed sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersTable.name})`, + }).from(usersTable); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test('select with empty array in inArray', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + const result = await db + .select({ + name: sql`upper(${usersTable.name})`, + }) + .from(usersTable) + .where(inArray(usersTable.id, [])) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([]); + }); + + test('select with empty array in notInArray', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + const result = await db + .select({ + name: sql`upper(${usersTable.name})`, + }) + .from(usersTable) + .where(notInArray(usersTable.id, [])) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'JOHN' }, { name: 'JANE' }, { name: 'JANE' }]); + }); + + test('select distinct', async (ctx) => { + const { db } = ctx.singlestore; + + const usersDistinctTable = singlestoreTable('users_distinct', { + id: int('id').notNull(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${usersDistinctTable}`); + await db.execute(sql`create table ${usersDistinctTable} (id int, name text)`); + + await db.insert(usersDistinctTable).values([ + { id: 1, name: 'John' }, + { id: 1, name: 'John' }, + { id: 2, name: 'John' }, + { id: 1, name: 'Jane' }, + ]); + const users = await db.selectDistinct().from(usersDistinctTable).orderBy( + usersDistinctTable.id, + usersDistinctTable.name, + ); + + await db.execute(sql`drop table ${usersDistinctTable}`); + + expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); + }); + + test('insert returning sql', async (ctx) => { + const { db } = ctx.singlestore; + + const [result, _] = await db.insert(usersTable).values({ id: 1, name: 'John' }); + + expect(result.insertId).toBe(1); + }); + + test('delete returning sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(users[0].affectedRows).toBe(1); + }); + + test('update returning sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + expect(users[0].changedRows).toBe(1); + }); + + test('update with returning all fields', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + const users = await db.select().from(usersTable).where(eq(usersTable.id, 1)); + + expect(updatedUsers[0].changedRows).toBe(1); + + expect(users[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 2000); + expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); + }); + + test('update with returning partial', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + const users = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(updatedUsers[0].changedRows).toBe(1); + + expect(users).toEqual([{ id: 1, name: 'Jane' }]); + }); + + test('delete with returning all fields', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(deletedUser[0].affectedRows).toBe(1); + }); + + test('delete with returning partial', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(deletedUser[0].affectedRows).toBe(1); + }); + + test('insert + select', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const result = await db.select().from(usersTable); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + + await db.insert(usersTable).values({ id: 2, name: 'Jane' }); + const result2 = await db.select().from(usersTable).orderBy(asc(usersTable.id)); + expect(result2).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, + { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, + ]); + }); + + test('json insert', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John', jsonb: ['foo', 'bar'] }); + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + }).from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); + }); + + test('insert with overridden default values', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John', verified: true }); + const result = await db.select().from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); + }); + + test('insert many', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([ + { id: 1, name: 'John' }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'] }, + { id: 3, name: 'Jane' }, + { id: 4, name: 'Austin', verified: true }, + ]); + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + verified: usersTable.verified, + }).from(usersTable) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([ + { id: 1, name: 'John', jsonb: null, verified: false }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', jsonb: null, verified: false }, + { id: 4, name: 'Austin', jsonb: null, verified: true }, + ]); + }); + + test('insert many with returning', async (ctx) => { + const { db } = ctx.singlestore; + + const result = await db.insert(usersTable).values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + + expect(result[0].affectedRows).toBe(4); + }); + + test('select with group by as field', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.name) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); + }); + + test('select with exists', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const user = alias(usersTable, 'user'); + const result = await db.select({ name: usersTable.name }).from(usersTable).where( + exists( + db.select({ one: sql`1` }).from(user).where(and(eq(usersTable.name, 'John'), eq(user.id, usersTable.id))), + ), + ) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }]); + }); + + test('select with group by as sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(sql`${usersTable.name}`) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); + }); + + test('$default function', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute(sql`drop table if exists \`orders\``); + await db.execute( + sql` + create table \`orders\` ( + \`id\` serial primary key, + \`region\` text not null, + \`product\` text not null, + \`amount\` int not null, + \`quantity\` int not null + ) + `, + ); + + await db.insert(orders).values({ id: 1, region: 'Ukraine', amount: 1, quantity: 1 }); + const selectedOrder = await db.select().from(orders); + + expect(selectedOrder).toEqual([{ + id: 1, + amount: 1, + quantity: 1, + region: 'Ukraine', + product: 'random_string', + }]); + }); + + test('$default with empty array', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute(sql`drop table if exists \`s_orders\``); + await db.execute( + sql` + create table \`s_orders\` ( + \`id\` serial primary key, + \`region\` text default 'Ukraine', + \`product\` text not null + ) + `, + ); + + const users = singlestoreTable('s_orders', { + id: serial('id').primaryKey(), + region: text('region').default('Ukraine'), + product: text('product').$defaultFn(() => 'random_string'), + }); + + await db.insert(users).values({ id: 1 }); + const selectedOrder = await db.select().from(users); + + expect(selectedOrder).toEqual([{ + id: 1, + region: 'Ukraine', + product: 'random_string', + }]); + }); + + test('select with group by as sql + column', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(sql`${usersTable.name}`, usersTable.id) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + }); + + test('select with group by as column + sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + }); + + test('select with group by complex query', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`) + .orderBy(asc(usersTable.name)) + .limit(1); + + expect(result).toEqual([{ name: 'Jane' }]); + }); + + test('build query', async (ctx) => { + const { db } = ctx.singlestore; + + const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, usersTable.name) + .toSQL(); + + expect(query).toEqual({ + sql: `select \`id\`, \`name\` from \`userstest\` group by \`userstest\`.\`id\`, \`userstest\`.\`name\``, + params: [], + }); + }); + + test('Query check: Insert all defaults in 1 row', async (ctx) => { + const { db } = ctx.singlestore; + + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + const query = db + .insert(users) + .values({}) + .toSQL(); + + expect(query).toEqual({ + sql: 'insert into `users` (`id`, `name`, `state`) values (default, default, default)', + params: [], + }); + }); + + test('Query check: Insert all defaults in multiple rows', async (ctx) => { + const { db } = ctx.singlestore; + + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state').default('UA'), + }); + + const query = db + .insert(users) + .values([{}, {}]) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into `users` (`id`, `name`, `state`) values (default, default, default), (default, default, default)', + params: [], + }); + }); + + test('Insert all defaults in 1 row', async (ctx) => { + const { db } = ctx.singlestore; + + const users = singlestoreTable('empty_insert_single', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, + ); + + await db.insert(users).values({ id: 1 }); + + const res = await db.select().from(users); + + expect(res).toEqual([{ id: 1, name: 'Dan', state: null }]); + }); + + test('Insert all defaults in multiple rows', async (ctx) => { + const { db } = ctx.singlestore; + + const users = singlestoreTable('empty_insert_multiple', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, + ); + + await db.insert(users).values([{ id: 1 }, { id: 2 }]); + + const res = await db.select().from(users).orderBy(asc(users.id)); + + expect(res).toEqual([{ id: 1, name: 'Dan', state: null }, { id: 2, name: 'Dan', state: null }]); + }); + + test('build query insert with onDuplicate', async (ctx) => { + const { db } = ctx.singlestore; + + const query = db.insert(usersTable) + .values({ id: 1, name: 'John', jsonb: ['foo', 'bar'] }) + .onDuplicateKeyUpdate({ set: { id: 1, name: 'John1' } }) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into `userstest` (`id`, `name`, `verified`, `jsonb`, `created_at`) values (?, ?, default, ?, default) on duplicate key update `id` = ?, `name` = ?', + params: [1, 'John', '["foo","bar"]', 1, 'John1'], + }); + }); + + test('insert with onDuplicate', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable) + .values({ id: 1, name: 'John' }); + + await db.insert(usersTable) + .values({ id: 1, name: 'John' }) + .onDuplicateKeyUpdate({ set: { name: 'John1' } }); + + const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(res).toEqual([{ id: 1, name: 'John1' }]); + }); + + test('insert conflict', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable) + .values({ id: 1, name: 'John' }); + + await expect((async () => { + db.insert(usersTable).values({ id: 1, name: 'John1' }); + })()).resolves.not.toThrowError(); + }); + + test('insert conflict with ignore', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable) + .values({ id: 1, name: 'John' }); + + await db.insert(usersTable) + .ignore() + .values({ id: 1, name: 'John1' }); + + const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(res).toEqual([{ id: 1, name: 'John' }]); + }); + + test('insert sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: sql`${'John'}` }); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + expect(result).toEqual([{ id: 1, name: 'John' }]); + }); + + test('partial join with alias', async (ctx) => { + const { db } = ctx.singlestore; + const customerAlias = alias(usersTable, 'customer'); + + await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select({ + user: { + id: usersTable.id, + name: usersTable.name, + }, + customer: { + id: customerAlias.id, + name: customerAlias.name, + }, + }).from(usersTable) + .leftJoin(customerAlias, eq(customerAlias.id, 11)) + .where(eq(usersTable.id, 10)) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ + user: { id: 10, name: 'Ivan' }, + customer: { id: 11, name: 'Hans' }, + }]); + }); + + test('full join with alias', async (ctx) => { + const { db } = ctx.singlestore; + + const singlestoreTable = singlestoreTableCreator((name) => `prefixed_${name}`); + + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); + + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select().from(users) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(users.id, 10)) + .orderBy(asc(users.id)); + + expect(result).toEqual([{ + users: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.execute(sql`drop table ${users}`); + }); + + test('select from alias', async (ctx) => { + const { db } = ctx.singlestore; + + const singlestoreTable = singlestoreTableCreator((name) => `prefixed_${name}`); + + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); + + const user = alias(users, 'user'); + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select() + .from(user) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(user.id, 10)) + .orderBy(asc(user.id)); + + expect(result).toEqual([{ + user: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.execute(sql`drop table ${users}`); + }); + + test('insert with spaces', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: sql`'Jo h n'` }); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); + }); + + test('prepared statement', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const statement = db.select({ + id: usersTable.id, + name: usersTable.name, + }).from(usersTable) + .prepare(); + const result = await statement.execute(); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + }); + + test('insert: placeholders on columns with encoder', async (ctx) => { + const { db } = ctx.singlestore; + + const date = new Date('2024-08-07T15:30:00Z'); + + const statement = db.insert(usersTable).values({ + id: 1, + name: 'John', + createdAt: sql.placeholder('createdAt'), + }).prepare(); + + await statement.execute({ createdAt: date }); + + const result = await db + .select({ + id: usersTable.id, + createdAt: usersTable.createdAt, + }) + .from(usersTable); + + expect(result).toEqual([ + { id: 1, createdAt: date }, + ]); + }); + + test('prepared statement reuse', async (ctx) => { + const { db } = ctx.singlestore; + + const stmt = db.insert(usersTable).values({ + verified: true, + id: placeholder('id'), + name: placeholder('name'), + }).prepare(); + + for (let i = 0; i < 10; i++) { + await stmt.execute({ id: i + 1, name: `John ${i}` }); + } + + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + verified: usersTable.verified, + }).from(usersTable) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([ + { id: 1, name: 'John 0', verified: true }, + { id: 2, name: 'John 1', verified: true }, + { id: 3, name: 'John 2', verified: true }, + { id: 4, name: 'John 3', verified: true }, + { id: 5, name: 'John 4', verified: true }, + { id: 6, name: 'John 5', verified: true }, + { id: 7, name: 'John 6', verified: true }, + { id: 8, name: 'John 7', verified: true }, + { id: 9, name: 'John 8', verified: true }, + { id: 10, name: 'John 9', verified: true }, + ]); + }); + + test('prepared statement with placeholder in .where', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const stmt = db.select({ + id: usersTable.id, + name: usersTable.name, + }).from(usersTable) + .where(eq(usersTable.id, placeholder('id'))) + .prepare(); + const result = await stmt.execute({ id: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + }); + + test('migrator', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute(sql`drop table if exists cities_migration`); + await db.execute(sql`drop table if exists users_migration`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists __drizzle_migrations`); + + await migrate(db, { migrationsFolder: './drizzle2/singlestore' }); + + await db.insert(usersMigratorTable).values({ id: 1, name: 'John', email: 'email' }); + + const result = await db.select().from(usersMigratorTable); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table cities_migration`); + await db.execute(sql`drop table users_migration`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table __drizzle_migrations`); + }); + + test('insert via db.execute + select via db.execute', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute( + sql`insert into ${usersTable} (${new Name(usersTable.id.name)},${new Name( + usersTable.name.name, + )}) values (1,${'John'})`, + ); + + const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${usersTable}`); + expect(result[0]).toEqual([{ id: 1, name: 'John' }]); + }); + + test('insert via db.execute w/ query builder', async (ctx) => { + const { db } = ctx.singlestore; + + const inserted = await db.execute( + db.insert(usersTable).values({ id: 1, name: 'John' }), + ); + expect(inserted[0].affectedRows).toBe(1); + }); + + test('insert + select all possible dates', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute(sql`drop table if exists \`datestable\``); + await db.execute( + sql` + create table \`datestable\` ( + \`date\` date, + \`date_as_string\` date, + \`time\` time, + \`datetime\` datetime, + \`datetime_as_string\` datetime, + \`timestamp\` timestamp(6), + \`timestamp_as_string\` timestamp(6), + \`year\` year + ) + `, + ); + + const date = new Date('2022-11-11'); + const dateWithMilliseconds = new Date('2022-11-11 12:12:12.123'); + + await db.insert(datesTable).values({ + date: date, + dateAsString: '2022-11-11', + time: '12:12:12', + datetime: date, + year: 22, + datetimeAsString: '2022-11-11 12:12:12', + timestamp: dateWithMilliseconds, + timestampAsString: '2022-11-11 12:12:12.123', + }); + + const res = await db.select().from(datesTable); + + expect(res[0]?.date).toBeInstanceOf(Date); + expect(res[0]?.datetime).toBeInstanceOf(Date); + expect(typeof res[0]?.dateAsString).toBe('string'); + expect(typeof res[0]?.datetimeAsString).toBe('string'); + + expect(res).toEqual([{ + date: toLocalDate(new Date('2022-11-11')), + dateAsString: '2022-11-11', + time: '12:12:12', + datetime: new Date('2022-11-11'), + year: 2022, + datetimeAsString: '2022-11-11 12:12:12', + timestamp: new Date('2022-11-11 12:12:12.123'), + timestampAsString: '2022-11-11 12:12:12.123000', + }]); + + await db.execute(sql`drop table if exists \`datestable\``); + }); + + const tableWithEnums = singlestoreTable('enums_test_case', { + id: serial('id').primaryKey(), + enum1: singlestoreEnum('enum1', ['a', 'b', 'c']).notNull(), + enum2: singlestoreEnum('enum2', ['a', 'b', 'c']).default('a'), + enum3: singlestoreEnum('enum3', ['a', 'b', 'c']).notNull().default('b'), + }); + + test('SingleStore enum test case #1', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute(sql`drop table if exists \`enums_test_case\``); + + await db.execute(sql` + create table \`enums_test_case\` ( + \`id\` serial primary key, + \`enum1\` ENUM('a', 'b', 'c') not null, + \`enum2\` ENUM('a', 'b', 'c') default 'a', + \`enum3\` ENUM('a', 'b', 'c') not null default 'b' + ) + `); + + await db.insert(tableWithEnums).values([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum3: 'c' }, + { id: 3, enum1: 'a' }, + ]); + + const res = await db.select().from(tableWithEnums).orderBy(asc(tableWithEnums.id)); + + await db.execute(sql`drop table \`enums_test_case\``); + + expect(res).toEqual([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, + { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, + ]); + }); + + test('left join (flat object fields)', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(citiesTable) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); + + await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); + + const res = await db.select({ + userId: users2Table.id, + userName: users2Table.name, + cityId: citiesTable.id, + cityName: citiesTable.name, + }).from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) + .orderBy(users2Table.id); + + expect(res).toEqual([ + { userId: 1, userName: 'John', cityId: 1, cityName: 'Paris' }, + { userId: 2, userName: 'Jane', cityId: null, cityName: null }, + ]); + }); + + test('left join (grouped fields)', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(citiesTable) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); + + await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); + + const res = await db.select({ + id: users2Table.id, + user: { + name: users2Table.name, + nameUpper: sql`upper(${users2Table.name})`, + }, + city: { + id: citiesTable.id, + name: citiesTable.name, + nameUpper: sql`upper(${citiesTable.name})`, + }, + }).from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) + .orderBy(asc(users2Table.id)); + + expect(res).toEqual([ + { + id: 1, + user: { name: 'John', nameUpper: 'JOHN' }, + city: { id: 1, name: 'Paris', nameUpper: 'PARIS' }, + }, + { + id: 2, + user: { name: 'Jane', nameUpper: 'JANE' }, + city: null, + }, + ]); + }); + + test('left join (all fields)', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(citiesTable) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); + + await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); + + const res = await db.select().from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) + .orderBy(asc(users2Table.id)); + + expect(res).toEqual([ + { + users2: { + id: 1, + name: 'John', + cityId: 1, + }, + cities: { + id: 1, + name: 'Paris', + }, + }, + { + users2: { + id: 2, + name: 'Jane', + cityId: null, + }, + cities: null, + }, + ]); + }); + + test('join subquery', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute(sql`drop table if exists \`courses\``); + await db.execute(sql`drop table if exists \`course_categories\``); + + await db.execute( + sql` + create table \`course_categories\` ( + \`id\` serial primary key, + \`name\` text not null + ) + `, + ); + + await db.execute( + sql` + create table \`courses\` ( + \`id\` serial primary key, + \`name\` text not null, + \`category_id\` int + ) + `, + ); + + await db.insert(courseCategoriesTable).values([ + { id: 1, name: 'Category 1' }, + { id: 2, name: 'Category 2' }, + { id: 3, name: 'Category 3' }, + { id: 4, name: 'Category 4' }, + ]); + + await db.insert(coursesTable).values([ + { id: 1, name: 'Development', categoryId: 2 }, + { id: 2, name: 'IT & Software', categoryId: 3 }, + { id: 3, name: 'Marketing', categoryId: 4 }, + { id: 4, name: 'Design', categoryId: 1 }, + ]); + + const sq2 = db + .select({ + categoryId: courseCategoriesTable.id, + category: courseCategoriesTable.name, + total: sql`count(${courseCategoriesTable.id})`, + }) + .from(courseCategoriesTable) + .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) + .as('sq2'); + + const res = await db + .select({ + courseName: coursesTable.name, + categoryId: sq2.categoryId, + }) + .from(coursesTable) + .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) + .orderBy(coursesTable.name); + + expect(res).toEqual([ + { courseName: 'Design', categoryId: 1 }, + { courseName: 'Development', categoryId: 2 }, + { courseName: 'IT & Software', categoryId: 3 }, + { courseName: 'Marketing', categoryId: 4 }, + ]); + + await db.execute(sql`drop table if exists \`courses\``); + await db.execute(sql`drop table if exists \`course_categories\``); + }); + + test('with ... select', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute(sql`drop table if exists \`orders\``); + await db.execute( + sql` + create table \`orders\` ( + \`id\` serial primary key, + \`region\` text not null, + \`product\` text not null, + \`amount\` int not null, + \`quantity\` int not null + ) + `, + ); + + await db.insert(orders).values([ + { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, + { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 50, quantity: 5 }, + ]); + + const regionalSales = db + .$with('regional_sales') + .as( + db + .select({ + region: orders.region, + totalSales: sql`sum(${orders.amount})`.as('total_sales'), + }) + .from(orders) + .groupBy(orders.region), + ); + + const topRegions = db + .$with('top_regions') + .as( + db + .select({ + region: regionalSales.region, + }) + .from(regionalSales) + .where( + gt( + regionalSales.totalSales, + db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), + ), + ), + ); + + const result = await db + .with(regionalSales, topRegions) + .select({ + region: orders.region, + product: orders.product, + productUnits: sql`cast(sum(${orders.quantity}) as unsigned)`, + productSales: sql`cast(sum(${orders.amount}) as unsigned)`, + }) + .from(orders) + .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) + .groupBy(orders.region, orders.product) + .orderBy(orders.region, orders.product); + + expect(result).toEqual([ + { + region: 'Europe', + product: 'A', + productUnits: 3, + productSales: 30, + }, + { + region: 'Europe', + product: 'B', + productUnits: 5, + productSales: 50, + }, + { + region: 'US', + product: 'A', + productUnits: 7, + productSales: 70, + }, + { + region: 'US', + product: 'B', + productUnits: 9, + productSales: 90, + }, + ]); + }); + + test('with ... update', async (ctx) => { + const { db } = ctx.singlestore; + + const products = singlestoreTable('products', { + id: serial('id').primaryKey(), + price: decimal('price', { + precision: 15, + scale: 2, + }).notNull(), + cheap: boolean('cheap').notNull().default(false), + }); + + await db.execute(sql`drop table if exists ${products}`); + await db.execute(sql` + create table ${products} ( + id serial primary key, + price decimal(15, 2) not null, + cheap boolean not null default false + ) + `); + + await db.insert(products).values([ + { id: 1, price: '10.99' }, + { id: 2, price: '25.85' }, + { id: 3, price: '32.99' }, + { id: 4, price: '2.50' }, + { id: 5, price: '4.59' }, + ]); + + const averagePrice = db + .$with('average_price') + .as( + db + .select({ + value: sql`avg(${products.price})`.as('value'), + }) + .from(products), + ); + + await db + .with(averagePrice) + .update(products) + .set({ + cheap: true, + }) + .where(lt(products.price, sql`(select * from ${averagePrice})`)); + + const result = await db + .select({ + id: products.id, + }) + .from(products) + .where(eq(products.cheap, true)) + .orderBy(asc(products.id)); + + expect(result).toEqual([ + { id: 1 }, + { id: 4 }, + { id: 5 }, + ]); + }); + + test('with ... delete', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute(sql`drop table if exists \`orders\``); + await db.execute( + sql` + create table \`orders\` ( + \`id\` serial primary key, + \`region\` text not null, + \`product\` text not null, + \`amount\` int not null, + \`quantity\` int not null + ) + `, + ); + + await db.insert(orders).values([ + { id: 1, region: 'Europe', product: 'A', amount: 10, quantity: 1 }, + { id: 2, region: 'Europe', product: 'A', amount: 20, quantity: 2 }, + { id: 3, region: 'Europe', product: 'B', amount: 20, quantity: 2 }, + { id: 4, region: 'Europe', product: 'B', amount: 30, quantity: 3 }, + { id: 5, region: 'US', product: 'A', amount: 30, quantity: 3 }, + { id: 6, region: 'US', product: 'A', amount: 40, quantity: 4 }, + { id: 7, region: 'US', product: 'B', amount: 40, quantity: 4 }, + { id: 8, region: 'US', product: 'B', amount: 50, quantity: 5 }, + ]); + + const averageAmount = db + .$with('average_amount') + .as( + db + .select({ + value: sql`avg(${orders.amount})`.as('value'), + }) + .from(orders), + ); + + await db + .with(averageAmount) + .delete(orders) + .where(gt(orders.amount, sql`(select * from ${averageAmount})`)); + + const result = await db + .select({ + id: orders.id, + }) + .from(orders) + .orderBy(asc(orders.id)); + + expect(result).toEqual([ + { id: 1 }, + { id: 2 }, + { id: 3 }, + { id: 4 }, + { id: 5 }, + ]); + }); + + test('select from subquery sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(users2Table).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }]); + + const sq = db + .select({ name: sql`concat(${users2Table.name}, " modified")`.as('name') }) + .from(users2Table) + .orderBy(asc(users2Table.id)) + .as('sq'); + + const res = await db.select({ name: sq.name }).from(sq); + + expect(res).toEqual([{ name: 'John modified' }, { name: 'Jane modified' }]); + }); + + test('select a field without joining its table', (ctx) => { + const { db } = ctx.singlestore; + + expect(() => db.select({ name: users2Table.name }).from(usersTable).prepare()).toThrowError(); + }); + + test('select all fields from subquery without alias', (ctx) => { + const { db } = ctx.singlestore; + + const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); + + expect(() => db.select().from(sq).prepare()).toThrowError(); + }); + + test('select count()', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); + + const res = await db.select({ count: sql`count(*)` }).from(usersTable); + + expect(res).toEqual([{ count: 2 }]); + }); + + test('select for ...', (ctx) => { + const { db } = ctx.singlestore; + + { + const query = db.select().from(users2Table).for('update').toSQL(); + expect(query.sql).toMatch(/ for update$/); + } + { + const query = db.select().from(users2Table).for('share', { skipLocked: true }).toSQL(); + expect(query.sql).toMatch(/ for share skip locked$/); + } + { + const query = db.select().from(users2Table).for('update', { noWait: true }).toSQL(); + expect(query.sql).toMatch(/ for update no wait$/); + } + }); + + test('having', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(citiesTable).values([{ id: 1, name: 'London' }, { id: 2, name: 'Paris' }, { + id: 3, + name: 'New York', + }]); + + await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, { + id: 3, + name: 'Jack', + cityId: 2, + }]); + + const result = await db + .select({ + id: citiesTable.id, + name: sql`upper(${citiesTable.name})`.as('upper_name'), + usersCount: sql`count(${users2Table.id})`.as('users_count'), + }) + .from(citiesTable) + .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) + .where(({ name }) => sql`length(${name}) >= 3`) + .groupBy(citiesTable.id) + .having(({ usersCount }) => sql`${usersCount} > 0`) + .orderBy(({ name }) => name); + + expect(result).toEqual([ + { + id: 1, + name: 'LONDON', + usersCount: 2, + }, + { + id: 2, + name: 'PARIS', + usersCount: 1, + }, + ]); + }); + + test('view', async (ctx) => { + const { db } = ctx.singlestore; + + const newYorkers1 = singlestoreView('new_yorkers') + .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); + + const newYorkers2 = singlestoreView('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); + + const newYorkers3 = singlestoreView('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).existing(); + + await db.execute(sql`create view new_yorkers as ${getViewConfig(newYorkers1).query}`); + + await db.insert(citiesTable).values([{ id: 1, name: 'New York' }, { id: 2, name: 'Paris' }]); + + await db.insert(users2Table).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + { id: 3, name: 'Jack', cityId: 2 }, + ]); + + { + const result = await db.select().from(newYorkers1).orderBy(asc(newYorkers1.id)); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers2).orderBy(asc(newYorkers2.id)); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers3).orderBy(asc(newYorkers3.id)); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select({ name: newYorkers1.name }).from(newYorkers1).orderBy(asc(newYorkers1.id)); + expect(result).toEqual([ + { name: 'John' }, + { name: 'Jane' }, + ]); + } + + await db.execute(sql`drop view ${newYorkers1}`); + }); + + test('select from raw sql', async (ctx) => { + const { db } = ctx.singlestore; + + const result = await db.select({ + id: sql`id`, + name: sql`name`, + }).from(sql`(select 1 as id, 'John' as name) as users`); + + Expect>; + + expect(result).toEqual([ + { id: 1, name: 'John' }, + ]); + }); + + test('select from raw sql with joins', async (ctx) => { + const { db } = ctx.singlestore; + + const result = await db + .select({ + id: sql`users.id`, + name: sql`users.name`, + userCity: sql`users.city`, + cityName: sql`cities.name`, + }) + .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) + .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); + + Expect>; + + expect(result).toEqual([ + { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, + ]); + }); + + test('join on aliased sql from select', async (ctx) => { + const { db } = ctx.singlestore; + + const result = await db + .select({ + userId: sql`users.id`.as('userId'), + name: sql`users.name`, + userCity: sql`users.city`, + cityId: sql`cities.id`.as('cityId'), + cityName: sql`cities.name`, + }) + .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) + .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); + + Expect< + Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> + >; + + expect(result).toEqual([ + { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, + ]); + }); + + test('join on aliased sql from with clause', async (ctx) => { + const { db } = ctx.singlestore; + + const users = db.$with('users').as( + db.select({ + id: sql`id`.as('userId'), + name: sql`name`.as('userName'), + city: sql`city`.as('city'), + }).from( + sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, + ), + ); + + const cities = db.$with('cities').as( + db.select({ + id: sql`id`.as('cityId'), + name: sql`name`.as('cityName'), + }).from( + sql`(select 1 as id, 'Paris' as name) as cities`, + ), + ); + + const result = await db + .with(users, cities) + .select({ + userId: users.id, + name: users.name, + userCity: users.city, + cityId: cities.id, + cityName: cities.name, + }) + .from(users) + .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); + + Expect< + Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> + >; + + expect(result).toEqual([ + { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, + ]); + }); + + test('prefixed table', async (ctx) => { + const { db } = ctx.singlestore; + + const singlestoreTable = singlestoreTableCreator((name) => `myprefix_${name}`); + + const users = singlestoreTable('test_prefixed_table_with_unique_name', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table myprefix_test_prefixed_table_with_unique_name (id int not null primary key, name text not null)`, + ); + + await db.insert(users).values({ id: 1, name: 'John' }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + + await db.execute(sql`drop table ${users}`); + }); + + test('orderBy with aliased column', (ctx) => { + const { db } = ctx.singlestore; + + const query = db.select({ + test: sql`something`.as('test'), + }).from(users2Table).orderBy((fields) => fields.test).toSQL(); + + expect(query.sql).toBe('select something as `test` from `users2` order by `test`'); + }); + + test('timestamp timezone', async (ctx) => { + const { db } = ctx.singlestore; + + const date = new Date(Date.parse('2020-01-01T12:34:56+07:00')); + + await db.insert(usersTable).values({ id: 1, name: 'With default times' }); + await db.insert(usersTable).values({ + id: 2, + name: 'Without default times', + createdAt: date, + }); + const users = await db.select().from(usersTable).orderBy(asc(usersTable.id)); + + // check that the timestamps are set correctly for default times + expect(Math.abs(users[0]!.createdAt.getTime() - Date.now())).toBeLessThan(2000); + + // check that the timestamps are set correctly for non default times + expect(Math.abs(users[1]!.createdAt.getTime() - date.getTime())).toBeLessThan(2000); + }); + + test('transaction', async (ctx) => { + const { db } = ctx.singlestore; + + const users = singlestoreTable('users_transactions', { + id: serial('id').primaryKey(), + balance: int('balance').notNull(), + }); + const products = singlestoreTable('products_transactions', { + id: serial('id').primaryKey(), + price: int('price').notNull(), + stock: int('stock').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop table if exists ${products}`); + + await db.execute(sql`create table users_transactions (id serial not null primary key, balance int not null)`); + await db.execute( + sql`create table products_transactions (id serial not null primary key, price int not null, stock int not null)`, + ); + + const [{ insertId: userId }] = await db.insert(users).values({ id: 1, balance: 100 }); + const user = await db.select().from(users).where(eq(users.id, userId)).then((rows) => rows[0]!); + const [{ insertId: productId }] = await db.insert(products).values({ id: 1, price: 10, stock: 10 }); + const product = await db.select().from(products).where(eq(products.id, productId)).then((rows) => rows[0]!); + + await db.transaction(async (tx) => { + await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); + await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); + }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, balance: 90 }]); + + await db.execute(sql`drop table ${users}`); + await db.execute(sql`drop table ${products}`); + }); + + test('transaction rollback', async (ctx) => { + const { db } = ctx.singlestore; + + const users = singlestoreTable('users_transactions_rollback', { + id: serial('id').primaryKey(), + balance: int('balance').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table users_transactions_rollback (id serial not null primary key, balance int not null)`, + ); + + await expect((async () => { + await db.transaction(async (tx) => { + await tx.insert(users).values({ balance: 100 }); + tx.rollback(); + }); + })()).rejects.toThrowError(TransactionRollbackError); + + const result = await db.select().from(users); + + expect(result).toEqual([]); + + await db.execute(sql`drop table ${users}`); + }); + + test('join subquery with join', async (ctx) => { + const { db } = ctx.singlestore; + + const internalStaff = singlestoreTable('internal_staff', { + userId: int('user_id').notNull(), + }); + + const customUser = singlestoreTable('custom_user', { + id: int('id').notNull(), + }); + + const ticket = singlestoreTable('ticket', { + staffId: int('staff_id').notNull(), + }); + + await db.execute(sql`drop table if exists ${internalStaff}`); + await db.execute(sql`drop table if exists ${customUser}`); + await db.execute(sql`drop table if exists ${ticket}`); + + await db.execute(sql`create table internal_staff (user_id integer not null)`); + await db.execute(sql`create table custom_user (id integer not null)`); + await db.execute(sql`create table ticket (staff_id integer not null)`); + + await db.insert(internalStaff).values({ userId: 1 }); + await db.insert(customUser).values({ id: 1 }); + await db.insert(ticket).values({ staffId: 1 }); + + const subq = db + .select() + .from(internalStaff) + .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) + .as('internal_staff'); + + const mainQuery = await db + .select() + .from(ticket) + .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); + + expect(mainQuery).toEqual([{ + ticket: { staffId: 1 }, + internal_staff: { + internal_staff: { userId: 1 }, + custom_user: { id: 1 }, + }, + }]); + + await db.execute(sql`drop table ${internalStaff}`); + await db.execute(sql`drop table ${customUser}`); + await db.execute(sql`drop table ${ticket}`); + }); + + test('subquery with view', async (ctx) => { + const { db } = ctx.singlestore; + + const users = singlestoreTable('users_subquery_view', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }); + + const newYorkers = singlestoreView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop view if exists ${newYorkers}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, + ); + await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); + + await db.insert(users).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 2 }, + { id: 3, name: 'Jack', cityId: 1 }, + { id: 4, name: 'Jill', cityId: 2 }, + ]); + + const sq = db.$with('sq').as(db.select().from(newYorkers)); + const result = await db.with(sq).select().from(sq).orderBy(asc(sq.id)); + + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 3, name: 'Jack', cityId: 1 }, + ]); + + await db.execute(sql`drop view ${newYorkers}`); + await db.execute(sql`drop table ${users}`); + }); + + test('join view as subquery', async (ctx) => { + const { db } = ctx.singlestore; + + const users = singlestoreTable('users_join_view', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }); + + const newYorkers = singlestoreView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop view if exists ${newYorkers}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, + ); + await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); + + await db.insert(users).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 2 }, + { id: 3, name: 'Jack', cityId: 1 }, + { id: 4, name: 'Jill', cityId: 2 }, + ]); + + const sq = db.select().from(newYorkers).as('new_yorkers_sq'); + + const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)).orderBy(asc(users.id)); + + expect(result).toEqual([ + { + users_join_view: { id: 1, name: 'John', cityId: 1 }, + new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, + }, + { + users_join_view: { id: 2, name: 'Jane', cityId: 2 }, + new_yorkers_sq: null, + }, + { + users_join_view: { id: 3, name: 'Jack', cityId: 1 }, + new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, + }, + { + users_join_view: { id: 4, name: 'Jill', cityId: 2 }, + new_yorkers_sq: null, + }, + ]); + + await db.execute(sql`drop view ${newYorkers}`); + await db.execute(sql`drop table ${users}`); + }); + + test('select iterator', async (ctx) => { + const { db } = ctx.singlestore; + + const users = singlestoreTable('users_iterator', { + id: serial('id').primaryKey(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial not null primary key)`); + + await db.insert(users).values([{ id: 1 }, { id: 2 }, { id: 3 }]); + + const iter = db.select().from(users) + .orderBy(asc(users.id)) + .iterator(); + + const result: typeof users.$inferSelect[] = []; + + for await (const row of iter) { + result.push(row); + } + + expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); + }); + + test('select iterator w/ prepared statement', async (ctx) => { + const { db } = ctx.singlestore; + + const users = singlestoreTable('users_iterator', { + id: serial('id').primaryKey(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial not null primary key)`); + + await db.insert(users).values([{ id: 1 }, { id: 2 }, { id: 3 }]); + + const prepared = db.select().from(users) + .orderBy(asc(users.id)) + .prepare(); + const iter = prepared.iterator(); + const result: typeof users.$inferSelect[] = []; + + for await (const row of iter) { + result.push(row); + } + + expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); + }); + + test('insert undefined', async (ctx) => { + const { db } = ctx.singlestore; + + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text)`, + ); + + await expect((async () => { + await db.insert(users).values({ name: undefined }); + })()).resolves.not.toThrowError(); + + await db.execute(sql`drop table ${users}`); + }); + + test('update undefined', async (ctx) => { + const { db } = ctx.singlestore; + + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text)`, + ); + + await expect((async () => { + await db.update(users).set({ name: undefined }); + })()).rejects.toThrowError(); + + await expect((async () => { + await db.update(users).set({ id: 1, name: undefined }); + })()).resolves.not.toThrowError(); + + await db.execute(sql`drop table ${users}`); + }); + + test('utc config for datetime', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute(sql`drop table if exists \`datestable\``); + await db.execute( + sql` + create table \`datestable\` ( + \`datetime_utc\` datetime(6), + \`datetime\` datetime(6) + ) + `, + ); + const datesTable = singlestoreTable('datestable', { + datetimeUTC: datetime('datetime_utc', { fsp: 6, mode: 'date' }), + datetime: datetime('datetime', { fsp: 6 }), + }); + + const dateObj = new Date('2022-11-11'); + const dateUtc = new Date('2022-11-11T12:12:12.122Z'); + + await db.insert(datesTable).values({ + datetimeUTC: dateUtc, + datetime: dateObj, + }); + + const res = await db.select().from(datesTable); + + const [rawSelect] = await db.execute(sql`select \`datetime_utc\` from \`datestable\``); + const selectedRow = (rawSelect as unknown as [{ datetime_utc: string }])[0]; + + expect(selectedRow.datetime_utc).toBe('2022-11-11 12:12:12.122000'); + expect(new Date(selectedRow.datetime_utc.replace(' ', 'T') + 'Z')).toEqual(dateUtc); + + expect(res[0]?.datetime).toBeInstanceOf(Date); + expect(res[0]?.datetimeUTC).toBeInstanceOf(Date); + + expect(res).toEqual([{ + datetimeUTC: dateUtc, + datetime: new Date('2022-11-11'), + }]); + + await db.execute(sql`drop table if exists \`datestable\``); + }); + + // TODO (https://memsql.atlassian.net/browse/MCDB-63261) allow chaining limit and orderby in subquery + test('set operations (union) from query builder with subquery', async (ctx) => { + const { db } = ctx.singlestore; + + await setupSetOperationTest(db); + const citiesQuery = db + .select({ + id: citiesTable.id, + name: citiesTable.name, + orderCol: sql`0`.as('orderCol'), + }) + .from(citiesTable); + + const usersQuery = db + .select({ + id: users2Table.id, + name: users2Table.name, + orderCol: sql`1`.as('orderCol'), + }) + .from(users2Table); + + const unionQuery = db + .select({ + id: sql`id`, + name: sql`name`, + }) + .from( + citiesQuery.union(usersQuery).as('combined'), + ) + .orderBy(sql`orderCol`, sql`id`) + .limit(8); + + const result = await unionQuery; + + expect(result).toHaveLength(8); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + { id: 1, name: 'John' }, + { id: 2, name: 'Jane' }, + { id: 3, name: 'Jack' }, + { id: 4, name: 'Peter' }, + { id: 5, name: 'Ben' }, + ]); + + // union should throw if selected fields are not in the same order + await expect((async () => { + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).union( + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table), + ); + })()).rejects.toThrowError(); + }); + + test('set operations (union) as function', async (ctx) => { + const { db } = ctx.singlestore; + + await setupSetOperationTest(db); + + const result = await union( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + { id: 1, name: 'John' }, + ]); + + await expect((async () => { + union( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 1)), + ); + })()).rejects.toThrowError(); + }); + + test('set operations (union all) from query builder', async (ctx) => { + const { db } = ctx.singlestore; + + await setupSetOperationTest(db); + + const sq = db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).orderBy(asc(sql`id`)).limit(2).unionAll( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).orderBy(asc(sql`id`)).limit(2), + ).as('sq'); + + const result = await db.select().from(sq).orderBy(asc(sql`id`)).limit(3); + + expect(result).toHaveLength(3); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + ]); + + await expect((async () => { + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).limit(2).unionAll( + db + .select({ name: citiesTable.name, id: citiesTable.id }) + .from(citiesTable).limit(2), + ).orderBy(asc(sql`id`)); + })()).rejects.toThrowError(); + }); + + test('set operations (union all) as function', async (ctx) => { + const { db } = ctx.singlestore; + + await setupSetOperationTest(db); + + const sq = unionAll( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).as('sq'); + + const result = await db.select().from(sq).limit(1); + + expect(result).toHaveLength(1); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + ]); + + await expect((async () => { + unionAll( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).limit(1); + })()).rejects.toThrowError(); + }); + + test('set operations (intersect) from query builder', async (ctx) => { + const { db } = ctx.singlestore; + + await setupSetOperationTest(db); + + const sq = db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).intersect( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(gt(citiesTable.id, 1)), + ) + .as('sq'); + + const result = await db.select().from(sq).orderBy(asc(sql`id`)); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await expect((async () => { + db + .select({ name: citiesTable.name, id: citiesTable.id }) + .from(citiesTable).intersect( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(gt(citiesTable.id, 1)), + ); + })()).rejects.toThrowError(); + }); + + test('set operations (intersect) as function', async (ctx) => { + const { db } = ctx.singlestore; + + await setupSetOperationTest(db); + + const sq = await intersect( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).as('sq'); + + const result = await db.select().from(sq).limit(1); + + expect(result).toHaveLength(0); + + expect(result).toEqual([]); + + await expect((async () => { + intersect( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).limit(1); + })()).rejects.toThrowError(); + }); + + test('set operations (except) from query builder', async (ctx) => { + const { db } = ctx.singlestore; + + await setupSetOperationTest(db); + + const result = await db + .select() + .from(citiesTable).except( + db + .select() + .from(citiesTable).where(gt(citiesTable.id, 1)), + ); + + expect(result).toHaveLength(1); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + ]); + }); + + test('set operations (except) as function', async (ctx) => { + const { db } = ctx.singlestore; + + await setupSetOperationTest(db); + + const sq = await except( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable), + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).as('sq'); + + const result = await db.select().from(sq).limit(3); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await expect((async () => { + except( + db + .select({ name: citiesTable.name, id: citiesTable.id }) + .from(citiesTable), + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).limit(3); + })()).rejects.toThrowError(); + }); + + test('set operations (mixed) from query builder', async (ctx) => { + const { db } = ctx.singlestore; + + await setupSetOperationTest(db); + + const sq1 = unionAll( + db + .select() + .from(citiesTable).where(gt(citiesTable.id, 1)), + db.select().from(citiesTable).where(eq(citiesTable.id, 2)), + ).as('sq1'); + + const sq2 = await db.select().from(sq1).orderBy(asc(sql`id`)).as('sq2'); + + const sq3 = await db.select().from(sq2).limit(1).offset(1).as('sq3'); + + const result = await db + .select() + .from(citiesTable) + .except( + db + .select() + .from(sq3), + ); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 3, name: 'Tampa' }, + { id: 1, name: 'New York' }, + ]); + + await expect((async () => { + db + .select() + .from(citiesTable).except( + ({ unionAll }) => + unionAll( + db + .select({ name: citiesTable.name, id: citiesTable.id }) + .from(citiesTable).where(gt(citiesTable.id, 1)), + db.select().from(citiesTable).where(eq(citiesTable.id, 2)), + ), + ); + })()).rejects.toThrowError(); + }); + + test('set operations (mixed all) as function with subquery', async (ctx) => { + const { db } = ctx.singlestore; + + await setupSetOperationTest(db); + + const sq1 = except( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(gte(users2Table.id, 5)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 7)), + ).as('sq1'); + + const sq2 = await db.select().from(sq1).orderBy(asc(sql`id`)).as('sq2'); + + const sq3 = await db.select().from(sq2).limit(1).as('sq3'); + + const result = await union( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db.select().from(sq3), + db + .select().from(citiesTable).where(gt(citiesTable.id, 1)), + ); + + expect(result).toHaveLength(4); + + // multiple results possible as a result of the filters >= 5 and ==7 because singlestore doesn't guarantee order + // dynamically validate results + const hasValidEntry = (entry: { id: number; name: string }) => { + if (entry.id === 1) return entry.name === 'John'; + if (entry.id > 1 && entry.id < 5) return entry.name === 'Tampa' || entry.name === 'London'; + if (entry.id >= 5 && entry.id !== 7) return true; // Accept any entry with id >= 5 and not 7 + return false; + }; + + for (const entry of result) { + expect(hasValidEntry(entry)).toBe(true); + } + + await expect((async () => { + union( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + except( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(gte(users2Table.id, 5)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 7)), + ).limit(1), + db + .select().from(citiesTable).where(gt(citiesTable.id, 1)), + ); + })()).rejects.toThrowError(); + }); + + test('aggregate function: count', async (ctx) => { + const { db } = ctx.singlestore; + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: count() }).from(table); + const result2 = await db.select({ value: count(table.a) }).from(table); + const result3 = await db.select({ value: countDistinct(table.name) }).from(table); + + expect(result1[0]?.value).toBe(7); + expect(result2[0]?.value).toBe(5); + expect(result3[0]?.value).toBe(6); + }); + + test('aggregate function: avg', async (ctx) => { + const { db } = ctx.singlestore; + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: avg(table.b) }).from(table); + const result2 = await db.select({ value: avg(table.nullOnly) }).from(table); + const result3 = await db.select({ value: avgDistinct(table.b) }).from(table); + + expect(result1[0]?.value).toBe('33.3333'); + expect(result2[0]?.value).toBe(null); + expect(result3[0]?.value).toBe('42.5000'); + }); + + test('aggregate function: sum', async (ctx) => { + const { db } = ctx.singlestore; + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: sum(table.b) }).from(table); + const result2 = await db.select({ value: sum(table.nullOnly) }).from(table); + const result3 = await db.select({ value: sumDistinct(table.b) }).from(table); + + expect(result1[0]?.value).toBe('200'); + expect(result2[0]?.value).toBe(null); + expect(result3[0]?.value).toBe('170'); + }); + + test('aggregate function: max', async (ctx) => { + const { db } = ctx.singlestore; + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: max(table.b) }).from(table); + const result2 = await db.select({ value: max(table.nullOnly) }).from(table); + + expect(result1[0]?.value).toBe(90); + expect(result2[0]?.value).toBe(null); + }); + + test('aggregate function: min', async (ctx) => { + const { db } = ctx.singlestore; + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: min(table.b) }).from(table); + const result2 = await db.select({ value: min(table.nullOnly) }).from(table); + + expect(result1[0]?.value).toBe(10); + expect(result2[0]?.value).toBe(null); + }); + + test('test $onUpdateFn and $onUpdate works as $default', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute(sql`drop table if exists ${usersOnUpdate}`); + + await db.execute( + sql` + create table ${usersOnUpdate} ( + id serial not null primary key, + name text not null, + update_counter integer default 1 not null, + updated_at datetime(6), + always_null text + ) + `, + ); + + await db.insert(usersOnUpdate).values([ + { id: 1, name: 'John' }, + { id: 2, name: 'Jane' }, + { id: 3, name: 'Jack' }, + { id: 4, name: 'Jill' }, + ]); + const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); + + const justDates = await db.select({ updatedAt }).from(usersOnUpdate); + + const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); + + expect(response).toEqual([ + { name: 'John', id: 1, updateCounter: 1, alwaysNull: null }, + { name: 'Jane', id: 2, updateCounter: 1, alwaysNull: null }, + { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, + { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, + ]); + const msDelay = 750; + + for (const eachUser of justDates) { + expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); + } + }); + + test('test $onUpdateFn and $onUpdate works updating', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute(sql`drop table if exists ${usersOnUpdate}`); + + await db.execute( + sql` + create table ${usersOnUpdate} ( + id serial not null primary key, + name text not null, + update_counter integer default 1 not null, + updated_at datetime(6), + always_null text + ) + `, + ); + + await db.insert(usersOnUpdate).values([ + { id: 1, name: 'John', alwaysNull: 'this will will be null after updating' }, + { id: 2, name: 'Jane' }, + { id: 3, name: 'Jack' }, + { id: 4, name: 'Jill' }, + ]); + const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); + const initial = await db.select({ id: usersOnUpdate.id, updatedAt: usersOnUpdate.updatedAt }).from(usersOnUpdate); + + await db.update(usersOnUpdate).set({ name: 'Angel' }).where(eq(usersOnUpdate.id, 1)); + + const justDates = await db.select({ id: usersOnUpdate.id, updatedAt: usersOnUpdate.updatedAt }).from( + usersOnUpdate, + ); + + const response = await db.select().from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); + + expect(response).toEqual([ + { id: 1, name: 'Angel', updateCounter: 2, updatedAt: expect.any(Date), alwaysNull: null }, + { id: 2, name: 'Jane', updateCounter: 1, updatedAt: expect.any(Date), alwaysNull: null }, + { id: 3, name: 'Jack', updateCounter: 1, updatedAt: expect.any(Date), alwaysNull: null }, + { id: 4, name: 'Jill', updateCounter: 1, updatedAt: expect.any(Date), alwaysNull: null }, + ]); + + const initialRecord = initial.find((record) => record.id === 1); + const updatedRecord = justDates.find((record) => record.id === 1); + + expect(initialRecord?.updatedAt?.valueOf()).not.toBe(updatedRecord?.updatedAt?.valueOf()); + + const msDelay = 1000; + + for (const eachUser of justDates) { + expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); + } + }); + + // mySchema tests + test('mySchema :: select all fields', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersMySchemaTable).values({ id: 1, name: 'John' }); + const result = await db.select().from(usersMySchemaTable); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + }); + + test('mySchema :: select sql', async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersMySchemaTable.name})`, + }).from(usersMySchemaTable); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test('mySchema :: select typed sql', async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersMySchemaTable.name})`, + }).from(usersMySchemaTable); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test('mySchema :: select distinct', async (ctx) => { + const { db } = ctx.singlestore; + + const usersDistinctTable = singlestoreTable('users_distinct', { + id: int('id').notNull(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${usersDistinctTable}`); + await db.execute(sql`create table ${usersDistinctTable} (id int, name text)`); + + await db.insert(usersDistinctTable).values([ + { id: 1, name: 'John' }, + { id: 1, name: 'John' }, + { id: 2, name: 'John' }, + { id: 1, name: 'Jane' }, + ]); + const users = await db.selectDistinct().from(usersDistinctTable).orderBy( + usersDistinctTable.id, + usersDistinctTable.name, + ); + + await db.execute(sql`drop table ${usersDistinctTable}`); + + expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); + }); + + test('mySchema :: insert returning sql', async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + const [result, _] = await db.insert(usersMySchemaTable).values({ id: 1, name: 'John' }); + + expect(result.insertId).toBe(1); + }); + + test('mySchema :: delete returning sql', async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const users = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')); + + expect(users[0].affectedRows).toBe(1); + }); + + test('mySchema :: update with returning partial', async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ id: 1, name: 'John' }); + const updatedUsers = await db.update(usersMySchemaTable).set({ name: 'Jane' }).where( + eq(usersMySchemaTable.name, 'John'), + ); + + const users = await db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from( + usersMySchemaTable, + ) + .where( + eq(usersMySchemaTable.id, 1), + ); + + expect(updatedUsers[0].changedRows).toBe(1); + + expect(users).toEqual([{ id: 1, name: 'Jane' }]); + }); + + test('mySchema :: delete with returning all fields', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const deletedUser = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')); + + expect(deletedUser[0].affectedRows).toBe(1); + }); + + test('mySchema :: insert + select', async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ id: 1, name: 'John' }); + const result = await db.select().from(usersMySchemaTable); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + + await db.insert(usersMySchemaTable).values({ id: 2, name: 'Jane' }); + const result2 = await db.select().from(usersMySchemaTable).orderBy(asc(usersMySchemaTable.id)); + expect(result2).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, + { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, + ]); + }); + + test('mySchema :: insert with overridden default values', async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ id: 1, name: 'John', verified: true }); + const result = await db.select().from(usersMySchemaTable); + + expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); + }); + + test('mySchema :: insert many', async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values([ + { id: 1, name: 'John' }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'] }, + { id: 3, name: 'Jane' }, + { id: 4, name: 'Austin', verified: true }, + ]); + const result = await db.select({ + id: usersMySchemaTable.id, + name: usersMySchemaTable.name, + jsonb: usersMySchemaTable.jsonb, + verified: usersMySchemaTable.verified, + }).from(usersMySchemaTable) + .orderBy(asc(usersMySchemaTable.id)); + + expect(result).toEqual([ + { id: 1, name: 'John', jsonb: null, verified: false }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', jsonb: null, verified: false }, + { id: 4, name: 'Austin', jsonb: null, verified: true }, + ]); + }); + + test('mySchema :: select with group by as field', async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { + id: 3, + name: 'Jane', + }]); + + const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) + .groupBy(usersMySchemaTable.name) + .orderBy(asc(usersMySchemaTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); + }); + + test('mySchema :: select with group by as column + sql', async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { + id: 3, + name: 'Jane', + }]); + + const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) + .groupBy(usersMySchemaTable.id, sql`${usersMySchemaTable.name}`) + .orderBy(asc(usersMySchemaTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + }); + + test('mySchema :: build query', async (ctx) => { + const { db } = ctx.singlestore; + + const query = db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from(usersMySchemaTable) + .groupBy(usersMySchemaTable.id, usersMySchemaTable.name) + .toSQL(); + + expect(query).toEqual({ + sql: + `select \`id\`, \`name\` from \`mySchema\`.\`userstest\` group by \`userstest\`.\`id\`, \`userstest\`.\`name\``, + params: [], + }); + }); + + test('mySchema :: insert with spaces', async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ id: 1, name: sql`'Jo h n'` }); + const result = await db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from( + usersMySchemaTable, + ); + + expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); + }); + + test('mySchema :: prepared statement with placeholder in .where', async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ id: 1, name: 'John' }); + const stmt = db.select({ + id: usersMySchemaTable.id, + name: usersMySchemaTable.name, + }).from(usersMySchemaTable) + .where(eq(usersMySchemaTable.id, sql.placeholder('id'))) + .prepare(); + const result = await stmt.execute({ id: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + }); + + test('mySchema :: select from tables with same name from different schema using alias', async (ctx) => { + const { db } = ctx.singlestore; + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.execute(sql`drop table if exists \`userstest\``); + await db.execute( + sql` + create table \`userstest\` ( + \`id\` serial primary key, + \`name\` text not null, + \`verified\` boolean not null default false, + \`jsonb\` json, + \`created_at\` timestamp not null default now() + ) + `, + ); + + await db.insert(usersMySchemaTable).values({ id: 10, name: 'Ivan' }); + await db.insert(usersTable).values({ id: 11, name: 'Hans' }); + + const customerAlias = alias(usersTable, 'customer'); + + const result = await db + .select().from(usersMySchemaTable) + .leftJoin(customerAlias, eq(customerAlias.id, 11)) + .where(eq(usersMySchemaTable.id, 10)); + + expect(result).toEqual([{ + userstest: { + id: 10, + name: 'Ivan', + verified: false, + jsonb: null, + createdAt: result[0]!.userstest.createdAt, + }, + customer: { + id: 11, + name: 'Hans', + verified: false, + jsonb: null, + createdAt: result[0]!.customer!.createdAt, + }, + }]); + }); + + test('insert $returningId: serial as id', async (ctx) => { + const { db } = ctx.singlestore; + + const result = await db.insert(usersTable).values({ id: 1, name: 'John' }).$returningId(); + + expectTypeOf(result).toEqualTypeOf<{ + id: number; + }[]>(); + + expect(result).toStrictEqual([{ id: 1 }]); + }); + + test('insert $returningId: serial as id, batch insert', async (ctx) => { + const { db } = ctx.singlestore; + + const result = await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'John1' }]) + .$returningId(); + + expectTypeOf(result).toEqualTypeOf<{ + id: number; + }[]>(); + + // singlestore auto increments when batch inserting, so the ids increment by one + expect(result).toStrictEqual([{ id: 2 }, { id: 3 }]); + }); + + test('insert $returningId: $default as primary key', async (ctx) => { + const { db } = ctx.singlestore; + + const uniqueKeys = ['ao865jf3mcmkfkk8o5ri495z', 'dyqs529eom0iczo2efxzbcut']; + let iterator = 0; + + const usersTableDefFn = singlestoreTable('users_default_fn', { + customId: varchar('id', { length: 256 }).primaryKey().$defaultFn(() => { + const value = uniqueKeys[iterator]!; + iterator++; + return value; + }), + name: text('name').notNull(), + }); + + await setupReturningFunctionsTest(db); + + const result = await db.insert(usersTableDefFn).values([{ name: 'John' }, { name: 'John1' }]) + // ^? + .$returningId(); + + expectTypeOf(result).toEqualTypeOf<{ + customId: string; + }[]>(); + + expect(result).toStrictEqual([{ customId: 'ao865jf3mcmkfkk8o5ri495z' }, { + customId: 'dyqs529eom0iczo2efxzbcut', + }]); + }); + + test('insert $returningId: $default as primary key with value', async (ctx) => { + const { db } = ctx.singlestore; + + const uniqueKeys = ['ao865jf3mcmkfkk8o5ri495z', 'dyqs529eom0iczo2efxzbcut']; + let iterator = 0; + + const usersTableDefFn = singlestoreTable('users_default_fn', { + customId: varchar('id', { length: 256 }).primaryKey().$defaultFn(() => { + const value = uniqueKeys[iterator]!; + iterator++; + return value; + }), + name: text('name').notNull(), + }); + + await setupReturningFunctionsTest(db); + + const result = await db.insert(usersTableDefFn).values([{ name: 'John', customId: 'test' }, { name: 'John1' }]) + // ^? + .$returningId(); + + expectTypeOf(result).toEqualTypeOf<{ + customId: string; + }[]>(); + + expect(result).toStrictEqual([{ customId: 'test' }, { customId: 'ao865jf3mcmkfkk8o5ri495z' }]); + }); + + test('mySchema :: view', async (ctx) => { + const { db } = ctx.singlestore; + + const newYorkers1 = mySchema.view('new_yorkers') + .as((qb) => qb.select().from(users2MySchemaTable).where(eq(users2MySchemaTable.cityId, 1))); + + const newYorkers2 = mySchema.view('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).as(sql`select * from ${users2MySchemaTable} where ${eq(users2MySchemaTable.cityId, 1)}`); + + const newYorkers3 = mySchema.view('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).existing(); + + await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); + + await db.insert(citiesMySchemaTable).values([{ id: 1, name: 'New York' }, { id: 2, name: 'Paris' }]); + + await db.insert(users2MySchemaTable).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + { id: 3, name: 'Jack', cityId: 2 }, + ]); + + { + const result = await db.select().from(newYorkers1).orderBy(asc(newYorkers1.id)); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers2).orderBy(asc(newYorkers2.id)); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers3).orderBy(asc(newYorkers3.id)); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select({ name: newYorkers1.name }).from(newYorkers1).orderBy(asc(newYorkers1.id)); + expect(result).toEqual([ + { name: 'John' }, + { name: 'Jane' }, + ]); + } + + await db.execute(sql`drop view ${newYorkers1}`); + }); + + test('limit 0', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db + .select() + .from(usersTable) + .limit(0); + + expect(users).toEqual([]); + }); + + test('limit -1', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db + .select() + .from(usersTable) + .limit(-1); + + expect(users.length).toBeGreaterThan(0); + }); + }); +} diff --git a/integration-tests/tests/singlestore/singlestore-custom.test.ts b/integration-tests/tests/singlestore/singlestore-custom.test.ts new file mode 100644 index 000000000..b05cd756b --- /dev/null +++ b/integration-tests/tests/singlestore/singlestore-custom.test.ts @@ -0,0 +1,827 @@ +import retry from 'async-retry'; +import type Docker from 'dockerode'; +import { asc, eq, Name, placeholder, sql } from 'drizzle-orm'; +import type { SingleStoreDriverDatabase } from 'drizzle-orm/singlestore'; +import { drizzle } from 'drizzle-orm/singlestore'; +import { + alias, + binary, + customType, + date, + datetime, + serial, + singlestoreEnum, + singlestoreTable, + singlestoreTableCreator, + text, + time, + varchar, + year, +} from 'drizzle-orm/singlestore-core'; +import { migrate } from 'drizzle-orm/singlestore/migrator'; +import * as mysql2 from 'mysql2/promise'; +import { v4 as uuid } from 'uuid'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { toLocalDate } from '~/utils'; +import { createDockerDB } from './singlestore-common'; + +const ENABLE_LOGGING = false; + +let db: SingleStoreDriverDatabase; +let client: mysql2.Connection; +let container: Docker.Container | undefined; + +beforeAll(async () => { + let connectionString; + if (process.env['SINGLESTORE_CONNECTION_STRING']) { + connectionString = process.env['SINGLESTORE_CONNECTION_STRING']; + } else { + const { connectionString: conStr, container: contrainerObj } = await createDockerDB(); + connectionString = conStr; + container = contrainerObj; + } + client = await retry(async () => { + client = await mysql2.createConnection(connectionString); + await client.connect(); + return client; + }, { + retries: 20, + factor: 1, + minTimeout: 250, + maxTimeout: 250, + randomize: false, + onRetry() { + client?.end(); + }, + }); + await client.query(`CREATE DATABASE IF NOT EXISTS drizzle;`); + await client.changeUser({ database: 'drizzle' }); + db = drizzle(client, { logger: ENABLE_LOGGING }); +}); + +afterAll(async () => { + await client?.end(); + await container?.stop().catch(console.error); +}); + +beforeEach((ctx) => { + ctx.singlestore = { + db, + }; +}); + +const customSerial = customType<{ data: number; notNull: true; default: true }>({ + dataType() { + return 'serial'; + }, +}); + +const customText = customType<{ data: string }>({ + dataType() { + return 'text'; + }, +}); + +const customBoolean = customType<{ data: boolean }>({ + dataType() { + return 'boolean'; + }, + fromDriver(value) { + if (typeof value === 'boolean') { + return value; + } + return value === 1; + }, +}); + +const customJson = (name: string) => + customType<{ data: TData; driverData: string }>({ + dataType() { + return 'json'; + }, + toDriver(value: TData): string { + return JSON.stringify(value); + }, + })(name); + +const customTimestamp = customType< + { data: Date; driverData: string; config: { fsp: number } } +>({ + dataType(config) { + const precision = config?.fsp === undefined ? '' : ` (${config.fsp})`; + return `timestamp${precision}`; + }, + fromDriver(value: string): Date { + return new Date(value); + }, +}); + +const customBinary = customType<{ data: string; driverData: Buffer; config: { length: number } }>({ + dataType(config) { + return config?.length === undefined + ? `binary` + : `binary(${config.length})`; + }, + + toDriver(value) { + return sql`UNHEX(${value})`; + }, + + fromDriver(value) { + return value.toString('hex'); + }, +}); + +const usersTable = singlestoreTable('userstest', { + id: customSerial('id').primaryKey(), + name: customText('name').notNull(), + verified: customBoolean('verified').notNull().default(false), + jsonb: customJson('jsonb'), + createdAt: customTimestamp('created_at', { fsp: 6 }).notNull().default(sql`now()`), +}); + +const datesTable = singlestoreTable('datestable', { + date: date('date'), + dateAsString: date('date_as_string', { mode: 'string' }), + time: time('time', { fsp: 1 }), + datetime: datetime('datetime', { fsp: 6 }), + datetimeAsString: datetime('datetime_as_string', { fsp: 6, mode: 'string' }), + year: year('year'), +}); + +export const testTable = singlestoreTable('test_table', { + id: customBinary('id', { length: 16 }).primaryKey(), + sqlId: binary('sql_id', { length: 16 }), + rawId: varchar('raw_id', { length: 64 }), +}); + +const usersMigratorTable = singlestoreTable('users12', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + email: text('email').notNull(), +}); + +beforeEach(async () => { + await db.execute(sql`drop table if exists \`userstest\``); + await db.execute(sql`drop table if exists \`datestable\``); + await db.execute(sql`drop table if exists \`test_table\``); + // await ctx.db.execute(sql`create schema public`); + await db.execute( + sql` + create table \`userstest\` ( + \`id\` serial primary key, + \`name\` text not null, + \`verified\` boolean not null default false, + \`jsonb\` json, + \`created_at\` timestamp not null default now() + ) + `, + ); + + await db.execute( + sql` + create table \`datestable\` ( + \`date\` date, + \`date_as_string\` date, + \`time\` time, + \`datetime\` datetime, + \`datetime_as_string\` datetime, + \`year\` year + ) + `, + ); + + await db.execute( + sql` + create table \`test_table\` ( + \`id\` binary(16) primary key, + \`sql_id\` binary(16), + \`raw_id\` varchar(64) + ) + `, + ); +}); + +test('select all fields', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const result = await db.select().from(usersTable); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); +}); + +test('select sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersTable.name})`, + }).from(usersTable); + + expect(users).toEqual([{ name: 'JOHN' }]); +}); + +test('select typed sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersTable.name})`, + }).from(usersTable); + + expect(users).toEqual([{ name: 'JOHN' }]); +}); + +test('insert returning sql', async (ctx) => { + const { db } = ctx.singlestore; + + const [result, _] = await db.insert(usersTable).values({ id: 1, name: 'John' }); + + expect(result.insertId).toBe(1); +}); + +test('delete returning sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(users[0].affectedRows).toBe(1); +}); + +test('update returning sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + expect(users[0].changedRows).toBe(1); +}); + +test('update with returning all fields', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + const users = await db.select().from(usersTable).where(eq(usersTable.id, 1)); + + expect(updatedUsers[0].changedRows).toBe(1); + + expect(users[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 2000); + expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); +}); + +test('update with returning partial', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + const users = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(updatedUsers[0].changedRows).toBe(1); + + expect(users).toEqual([{ id: 1, name: 'Jane' }]); +}); + +test('delete with returning all fields', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(deletedUser[0].affectedRows).toBe(1); +}); + +test('delete with returning partial', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ name: 'John' }); + const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(deletedUser[0].affectedRows).toBe(1); +}); + +test('insert + select', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const result = await db.select().from(usersTable); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + + await db.insert(usersTable).values({ id: 2, name: 'Jane' }); + const result2 = await db.select().from(usersTable).orderBy(asc(usersTable.id)); + expect(result2).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, + { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, + ]); +}); + +test('json insert', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John', jsonb: ['foo', 'bar'] }); + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + }).from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); +}); + +test('insert with overridden default values', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John', verified: true }); + const result = await db.select().from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); +}); + +test('insert many', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([ + { id: 1, name: 'John' }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'] }, + { id: 3, name: 'Jane' }, + { id: 4, name: 'Austin', verified: true }, + ]); + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + verified: usersTable.verified, + }).from(usersTable).orderBy(asc(usersTable.id)); + + expect(result).toEqual([ + { id: 1, name: 'John', jsonb: null, verified: false }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', jsonb: null, verified: false }, + { id: 4, name: 'Austin', jsonb: null, verified: true }, + ]); +}); + +test('insert many with returning', async (ctx) => { + const { db } = ctx.singlestore; + + const result = await db.insert(usersTable).values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + + expect(result[0].affectedRows).toBe(4); +}); + +test('select with group by as field', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.name).orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); +}); + +test('select with group by as sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(sql`${usersTable.name}`).orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); +}); + +test('select with group by as sql + column', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(sql`${usersTable.name}`, usersTable.id).orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); +}); + +test('select with group by as column + sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`).orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); +}); + +test('select with group by complex query', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`) + .orderBy(asc(usersTable.name)) + .limit(1); + + expect(result).toEqual([{ name: 'Jane' }]); +}); + +test('build query', async (ctx) => { + const { db } = ctx.singlestore; + + const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, usersTable.name) + .toSQL(); + + expect(query).toEqual({ + sql: `select \`id\`, \`name\` from \`userstest\` group by \`userstest\`.\`id\`, \`userstest\`.\`name\``, + params: [], + }); +}); + +test('build query insert with onDuplicate', async (ctx) => { + const { db } = ctx.singlestore; + + const query = db.insert(usersTable) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onDuplicateKeyUpdate({ set: { name: 'John1' } }) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into `userstest` (`id`, `name`, `verified`, `jsonb`, `created_at`) values (default, ?, default, ?, default) on duplicate key update `name` = ?', + params: ['John', '["foo","bar"]', 'John1'], + }); +}); + +test('insert with onDuplicate', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable) + .values({ id: 1, name: 'John' }); + + await db.insert(usersTable) + .values({ id: 1, name: 'John' }) + .onDuplicateKeyUpdate({ set: { name: 'John1' } }); + + const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(res).toEqual([{ id: 1, name: 'John1' }]); +}); + +test('insert conflict', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable) + .values({ id: 1, name: 'John' }); + + await expect((async () => { + db.insert(usersTable).values({ id: 1, name: 'John1' }); + })()).resolves.not.toThrowError(); +}); + +test('insert conflict with ignore', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable) + .values({ id: 1, name: 'John' }); + + await db.insert(usersTable) + .ignore() + .values({ id: 1, name: 'John1' }); + + const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(res).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert sql', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: sql`${'John'}` }); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test('partial join with alias', async (ctx) => { + const { db } = ctx.singlestore; + const customerAlias = alias(usersTable, 'customer'); + + await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select({ + user: { + id: usersTable.id, + name: usersTable.name, + }, + customer: { + id: customerAlias.id, + name: customerAlias.name, + }, + }).from(usersTable) + .leftJoin(customerAlias, eq(customerAlias.id, 11)) + .where(eq(usersTable.id, 10)) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ + user: { id: 10, name: 'Ivan' }, + customer: { id: 11, name: 'Hans' }, + }]); +}); + +test('full join with alias', async (ctx) => { + const { db } = ctx.singlestore; + + const singlestoreTable = singlestoreTableCreator((name) => `prefixed_${name}`); + + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); + + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select().from(users) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(users.id, 10)) + .orderBy(asc(users.id)); + + expect(result).toEqual([{ + users: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.execute(sql`drop table ${users}`); +}); + +test('select from alias', async (ctx) => { + const { db } = ctx.singlestore; + + const singlestoreTable = singlestoreTableCreator((name) => `prefixed_${name}`); + + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); + + const user = alias(users, 'user'); + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select() + .from(user) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(user.id, 10)) + .orderBy(asc(user.id)); + + expect(result).toEqual([{ + user: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.execute(sql`drop table ${users}`); +}); + +test('insert with spaces', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: sql`'Jo h n'` }); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); +}); + +test('prepared statement', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const statement = db.select({ + id: usersTable.id, + name: usersTable.name, + }).from(usersTable) + .prepare(); + const result = await statement.execute(); + + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test('prepared statement reuse', async (ctx) => { + const { db } = ctx.singlestore; + + const stmt = db.insert(usersTable).values({ + id: placeholder('id'), + verified: true, + name: placeholder('name'), + }).prepare(); + + for (let i = 0; i < 10; i++) { + await stmt.execute({ id: i + 1, name: `John ${i}` }); + } + + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + verified: usersTable.verified, + }).from(usersTable).orderBy(asc(usersTable.id)); + + expect(result).toEqual([ + { id: 1, name: 'John 0', verified: true }, + { id: 2, name: 'John 1', verified: true }, + { id: 3, name: 'John 2', verified: true }, + { id: 4, name: 'John 3', verified: true }, + { id: 5, name: 'John 4', verified: true }, + { id: 6, name: 'John 5', verified: true }, + { id: 7, name: 'John 6', verified: true }, + { id: 8, name: 'John 7', verified: true }, + { id: 9, name: 'John 8', verified: true }, + { id: 10, name: 'John 9', verified: true }, + ]); +}); + +test('prepared statement with placeholder in .where', async (ctx) => { + const { db } = ctx.singlestore; + + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const stmt = db.select({ + id: usersTable.id, + name: usersTable.name, + }).from(usersTable) + .where(eq(usersTable.id, placeholder('id'))) + .prepare(); + const result = await stmt.execute({ id: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test('migrator', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute(sql`drop table if exists cities_migration`); + await db.execute(sql`drop table if exists users_migration`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists __drizzle_migrations`); + + await migrate(db, { migrationsFolder: './drizzle2/singlestore' }); + + await db.insert(usersMigratorTable).values({ id: 1, name: 'John', email: 'email' }); + + const result = await db.select().from(usersMigratorTable); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table cities_migration`); + await db.execute(sql`drop table users_migration`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table __drizzle_migrations`); +}); + +test('insert via db.execute + select via db.execute', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute( + sql`insert into ${usersTable} (${new Name(usersTable.id.name)}, ${new Name( + usersTable.name.name, + )}) values (1,${'John'})`, + ); + + const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${usersTable}`); + expect(result[0]).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert via db.execute w/ query builder', async (ctx) => { + const { db } = ctx.singlestore; + + const inserted = await db.execute( + db.insert(usersTable).values({ name: 'John' }), + ); + expect(inserted[0].affectedRows).toBe(1); +}); + +test('insert + select all possible dates', async (ctx) => { + const { db } = ctx.singlestore; + + const date = new Date('2022-11-11'); + + await db.insert(datesTable).values({ + date: date, + dateAsString: '2022-11-11', + time: '12:12:12', + datetime: date, + year: 22, + datetimeAsString: '2022-11-11 12:12:12', + }); + + const res = await db.select().from(datesTable); + + expect(res[0]?.date).toBeInstanceOf(Date); + expect(res[0]?.datetime).toBeInstanceOf(Date); + expect(res[0]?.dateAsString).toBeTypeOf('string'); + expect(res[0]?.datetimeAsString).toBeTypeOf('string'); + + expect(res).toEqual([{ + date: toLocalDate(new Date('2022-11-11')), + dateAsString: '2022-11-11', + time: '12:12:12', + datetime: new Date('2022-11-11'), + year: 2022, + datetimeAsString: '2022-11-11 12:12:12', + }]); +}); + +const tableWithEnums = singlestoreTable('enums_test_case', { + id: serial('id').primaryKey(), + enum1: singlestoreEnum('enum1', ['a', 'b', 'c']).notNull(), + enum2: singlestoreEnum('enum2', ['a', 'b', 'c']).default('a'), + enum3: singlestoreEnum('enum3', ['a', 'b', 'c']).notNull().default('b'), +}); + +test('SingleStore enum test case #1', async (ctx) => { + const { db } = ctx.singlestore; + + await db.execute(sql`drop table if exists \`enums_test_case\``); + + await db.execute(sql` + create table \`enums_test_case\` ( + \`id\` serial primary key, + \`enum1\` ENUM('a', 'b', 'c') not null, + \`enum2\` ENUM('a', 'b', 'c') default 'a', + \`enum3\` ENUM('a', 'b', 'c') not null default 'b' + ) + `); + + await db.insert(tableWithEnums).values([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum3: 'c' }, + { id: 3, enum1: 'a' }, + ]); + + const res = await db.select().from(tableWithEnums).orderBy(asc(tableWithEnums.id)); + + await db.execute(sql`drop table \`enums_test_case\``); + + expect(res).toEqual([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, + { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, + ]); +}); + +test('custom binary', async (ctx) => { + const { db } = ctx.singlestore; + + const id = uuid().replace(/-/g, ''); + await db.insert(testTable).values({ + id, + sqlId: sql`UNHEX(${id})`, + rawId: id, + }); + + const res = await db.select().from(testTable); + + expect(res).toEqual([{ + id, + sqlId: Buffer.from(id, 'hex'), + rawId: id, + }]); +}); diff --git a/integration-tests/tests/singlestore/singlestore-prefixed.test.ts b/integration-tests/tests/singlestore/singlestore-prefixed.test.ts new file mode 100644 index 000000000..224ad433d --- /dev/null +++ b/integration-tests/tests/singlestore/singlestore-prefixed.test.ts @@ -0,0 +1,1572 @@ +import retry from 'async-retry'; +import type Docker from 'dockerode'; +import type { Equal } from 'drizzle-orm'; +import { asc, eq, getTableName, gt, inArray, Name, sql, TransactionRollbackError } from 'drizzle-orm'; +import type { SingleStoreDriverDatabase } from 'drizzle-orm/singlestore'; +import { drizzle } from 'drizzle-orm/singlestore'; +import { + alias, + boolean, + date, + datetime, + getViewConfig, + int, + json, + serial, + singlestoreEnum, + singlestoreTable as singlestoreTableRaw, + singlestoreTableCreator, + singlestoreView, + text, + time, + timestamp, + uniqueIndex, + year, +} from 'drizzle-orm/singlestore-core'; +import { migrate } from 'drizzle-orm/singlestore/migrator'; +import * as mysql2 from 'mysql2/promise'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { Expect, toLocalDate } from '~/utils'; +import { createDockerDB } from './singlestore-common'; + +const ENABLE_LOGGING = false; + +let db: SingleStoreDriverDatabase; +let client: mysql2.Connection; +let container: Docker.Container | undefined; + +beforeAll(async () => { + let connectionString; + if (process.env['SINGLESTORE_CONNECTION_STRING']) { + connectionString = process.env['SINGLESTORE_CONNECTION_STRING']; + } else { + const { connectionString: conStr, container: contrainerObj } = await createDockerDB(); + connectionString = conStr; + container = contrainerObj; + } + client = await retry(async () => { + client = await mysql2.createConnection(connectionString); + await client.connect(); + return client; + }, { + retries: 20, + factor: 1, + minTimeout: 250, + maxTimeout: 250, + randomize: false, + onRetry() { + client?.end(); + }, + }); + + await client.query(`CREATE DATABASE IF NOT EXISTS drizzle;`); + await client.changeUser({ database: 'drizzle' }); + db = drizzle(client, { logger: ENABLE_LOGGING }); +}); + +afterAll(async () => { + await client?.end(); + await container?.stop().catch(console.error); +}); + +const tablePrefix = 'drizzle_tests_'; + +const singlestoreTable = singlestoreTableCreator((name) => `${tablePrefix}${name}`); +const usersTable = singlestoreTable('userstest', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 6 }).notNull().defaultNow(), +}); + +const users2Table = singlestoreTable('users2', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id'), +}); + +const citiesTable = singlestoreTable('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +beforeEach(async () => { + await db.execute(sql`drop table if exists ${usersTable}`); + await db.execute(sql`drop table if exists ${users2Table}`); + await db.execute(sql`drop table if exists ${citiesTable}`); + + await db.execute( + sql` + create table ${usersTable} ( + \`id\` serial primary key, + \`name\` text not null, + \`verified\` boolean not null default false, + \`jsonb\` json, + \`created_at\` timestamp not null default now() + ) + `, + ); + + await db.execute( + sql` + create table ${users2Table} ( + \`id\` serial primary key, + \`name\` text not null, + \`city_id\` int + ) + `, + ); + + await db.execute( + sql` + create table ${citiesTable} ( + \`id\` serial primary key, + \`name\` text not null + ) + `, + ); +}); + +test('select all fields', async () => { + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const result = await db.select().from(usersTable); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); +}); + +test('select sql', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersTable.name})`, + }).from(usersTable); + + expect(users).toEqual([{ name: 'JOHN' }]); +}); + +test('select typed sql', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersTable.name})`, + }).from(usersTable); + + expect(users).toEqual([{ name: 'JOHN' }]); +}); + +test('select distinct', async () => { + const usersDistinctTable = singlestoreTable('users_distinct', { + id: int('id').notNull(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${usersDistinctTable}`); + await db.execute(sql`create table ${usersDistinctTable} (id int, name text)`); + + await db.insert(usersDistinctTable).values([ + { id: 1, name: 'John' }, + { id: 1, name: 'John' }, + { id: 2, name: 'John' }, + { id: 1, name: 'Jane' }, + ]); + const users = await db.selectDistinct().from(usersDistinctTable).orderBy( + usersDistinctTable.id, + usersDistinctTable.name, + ); + + await db.execute(sql`drop table ${usersDistinctTable}`); + + expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); +}); + +test('insert returning sql', async () => { + const [result, _] = await db.insert(usersTable).values({ id: 1, name: 'John' }); + + expect(result.insertId).toBe(1); +}); + +test('delete returning sql', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(users[0].affectedRows).toBe(1); +}); + +test('update returning sql', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + expect(users[0].changedRows).toBe(1); +}); + +test('update with returning all fields', async () => { + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + const users = await db.select().from(usersTable).where(eq(usersTable.id, 1)); + + expect(updatedUsers[0].changedRows).toBe(1); + + expect(users[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 2000); + expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); +}); + +test('update with returning partial', async () => { + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + const users = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(updatedUsers[0].changedRows).toBe(1); + + expect(users).toEqual([{ id: 1, name: 'Jane' }]); +}); + +test('delete with returning all fields', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(deletedUser[0].affectedRows).toBe(1); +}); + +test('delete with returning partial', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(deletedUser[0].affectedRows).toBe(1); +}); + +test('insert + select', async () => { + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const result = await db.select().from(usersTable); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + + await db.insert(usersTable).values({ id: 2, name: 'Jane' }); + const result2 = await db.select().from(usersTable).orderBy(asc(usersTable.id)); + expect(result2).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, + { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, + ]); +}); + +test('json insert', async () => { + await db.insert(usersTable).values({ id: 1, name: 'John', jsonb: ['foo', 'bar'] }); + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + }).from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); +}); + +test('insert with overridden default values', async () => { + await db.insert(usersTable).values({ id: 1, name: 'John', verified: true }); + const result = await db.select().from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); +}); + +test('insert many', async () => { + await db.insert(usersTable).values([ + { id: 1, name: 'John' }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'] }, + { id: 3, name: 'Jane' }, + { id: 4, name: 'Austin', verified: true }, + ]); + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + verified: usersTable.verified, + }).from(usersTable) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([ + { id: 1, name: 'John', jsonb: null, verified: false }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', jsonb: null, verified: false }, + { id: 4, name: 'Austin', jsonb: null, verified: true }, + ]); +}); + +test('insert many with returning', async () => { + const result = await db.insert(usersTable).values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + + expect(result[0].affectedRows).toBe(4); +}); + +test('select with group by as field', async () => { + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.name) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); +}); + +test('select with group by as sql', async () => { + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(sql`${usersTable.name}`) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); +}); + +test('select with group by as sql + column', async () => { + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(sql`${usersTable.name}`, usersTable.id) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); +}); + +test('select with group by as column + sql', async () => { + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); +}); + +test('select with group by complex query', async () => { + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`) + .orderBy(asc(usersTable.name)) + .limit(1); + + expect(result).toEqual([{ name: 'Jane' }]); +}); + +test('build query', async () => { + const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, usersTable.name) + .toSQL(); + + expect(query).toEqual({ + sql: `select \`id\`, \`name\` from \`${getTableName(usersTable)}\` group by \`${ + getTableName(usersTable) + }\`.\`id\`, \`${getTableName(usersTable)}\`.\`name\``, + params: [], + }); +}); + +test('build query insert with onDuplicate', async () => { + const query = db.insert(usersTable) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onDuplicateKeyUpdate({ set: { name: 'John1' } }) + .toSQL(); + + expect(query).toEqual({ + sql: `insert into \`${ + getTableName(usersTable) + }\` (\`id\`, \`name\`, \`verified\`, \`jsonb\`, \`created_at\`) values (default, ?, default, ?, default) on duplicate key update \`name\` = ?`, + params: ['John', '["foo","bar"]', 'John1'], + }); +}); + +test('insert with onDuplicate', async () => { + await db.insert(usersTable) + .values({ id: 1, name: 'John' }); + + await db.insert(usersTable) + .values({ id: 1, name: 'John' }) + .onDuplicateKeyUpdate({ set: { name: 'John1' } }); + + const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(res).toEqual([{ id: 1, name: 'John1' }]); +}); + +test('insert conflict', async () => { + await db.insert(usersTable) + .values({ name: 'John' }); + + await expect((async () => { + db.insert(usersTable).values({ id: 1, name: 'John1' }); + })()).resolves.not.toThrowError(); +}); + +test('insert conflict with ignore', async () => { + await db.insert(usersTable) + .values({ id: 1, name: 'John' }); + + await db.insert(usersTable) + .ignore() + .values({ id: 1, name: 'John1' }); + + const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(res).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert sql', async () => { + await db.insert(usersTable).values({ id: 1, name: sql`${'John'}` }); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test('partial join with alias', async () => { + const customerAlias = alias(usersTable, 'customer'); + + await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select({ + user: { + id: usersTable.id, + name: usersTable.name, + }, + customer: { + id: customerAlias.id, + name: customerAlias.name, + }, + }).from(usersTable) + .leftJoin(customerAlias, eq(customerAlias.id, 11)) + .where(eq(usersTable.id, 10)) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ + user: { id: 10, name: 'Ivan' }, + customer: { id: 11, name: 'Hans' }, + }]); +}); + +test('full join with alias', async () => { + const singlestoreTable = singlestoreTableCreator((name) => `prefixed_${name}`); + + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); + + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select().from(users) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(users.id, 10)) + .orderBy(asc(users.id)); + + expect(result).toEqual([{ + users: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.execute(sql`drop table ${users}`); +}); + +test('select from alias', async () => { + const singlestoreTable = singlestoreTableCreator((name) => `prefixed_${name}`); + + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); + + const user = alias(users, 'user'); + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select() + .from(user) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(user.id, 10)) + .orderBy(asc(user.id)); + + expect(result).toEqual([{ + user: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.execute(sql`drop table ${users}`); +}); + +test('insert with spaces', async () => { + await db.insert(usersTable).values({ id: 1, name: sql`'Jo h n'` }); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); +}); + +test('prepared statement', async () => { + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const statement = db.select({ + id: usersTable.id, + name: usersTable.name, + }).from(usersTable) + .prepare(); + const result = await statement.execute(); + + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test('prepared statement reuse', async () => { + const stmt = db.insert(usersTable).values({ + verified: true, + id: sql.placeholder('id'), + name: sql.placeholder('name'), + }).prepare(); + + for (let i = 0; i < 10; i++) { + await stmt.execute({ id: i + 1, name: `John ${i}` }); + } + + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + verified: usersTable.verified, + }).from(usersTable) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([ + { id: 1, name: 'John 0', verified: true }, + { id: 2, name: 'John 1', verified: true }, + { id: 3, name: 'John 2', verified: true }, + { id: 4, name: 'John 3', verified: true }, + { id: 5, name: 'John 4', verified: true }, + { id: 6, name: 'John 5', verified: true }, + { id: 7, name: 'John 6', verified: true }, + { id: 8, name: 'John 7', verified: true }, + { id: 9, name: 'John 8', verified: true }, + { id: 10, name: 'John 9', verified: true }, + ]); +}); + +test('prepared statement with placeholder in .where', async () => { + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const stmt = db.select({ + id: usersTable.id, + name: usersTable.name, + }).from(usersTable) + .where(eq(usersTable.id, sql.placeholder('id'))) + .prepare(); + const result = await stmt.execute({ id: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test('migrator', async () => { + const usersMigratorTable = singlestoreTableRaw('users12', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + email: text('email').notNull(), + }, (table) => { + return { + name: uniqueIndex('').on(table.name).using('btree'), + }; + }); + + await db.execute(sql.raw(`drop table if exists cities_migration`)); + await db.execute(sql.raw(`drop table if exists users_migration`)); + await db.execute(sql.raw(`drop table if exists users12`)); + await db.execute(sql.raw(`drop table if exists __drizzle_migrations`)); + + await migrate(db, { migrationsFolder: './drizzle2/singlestore' }); + + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + + const result = await db.select().from(usersMigratorTable); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql.raw(`drop table cities_migration`)); + await db.execute(sql.raw(`drop table users_migration`)); + await db.execute(sql.raw(`drop table users12`)); + await db.execute(sql.raw(`drop table __drizzle_migrations`)); +}); + +test('insert via db.execute + select via db.execute', async () => { + await db.execute( + sql`insert into ${usersTable} (${new Name(usersTable.id.name)}, ${new Name( + usersTable.name.name, + )}) values (1, ${'John'})`, + ); + + const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${usersTable}`); + expect(result[0]).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert via db.execute w/ query builder', async () => { + const inserted = await db.execute( + db.insert(usersTable).values({ name: 'John' }), + ); + expect(inserted[0].affectedRows).toBe(1); +}); + +test('insert + select all possible dates', async () => { + const datesTable = singlestoreTable('datestable', { + date: date('date'), + dateAsString: date('date_as_string', { mode: 'string' }), + time: time('time', { fsp: 1 }), + datetime: datetime('datetime', { fsp: 6 }), + datetimeAsString: datetime('datetime_as_string', { fsp: 6, mode: 'string' }), + year: year('year'), + }); + + await db.execute(sql`drop table if exists ${datesTable}`); + await db.execute( + sql` + create table ${datesTable} ( + \`date\` date, + \`date_as_string\` date, + \`time\` time, + \`datetime\` datetime, + \`datetime_as_string\` datetime, + \`year\` year + ) + `, + ); + + const d = new Date('2022-11-11'); + + await db.insert(datesTable).values({ + date: d, + dateAsString: '2022-11-11', + time: '12:12:12', + datetime: d, + year: 22, + datetimeAsString: '2022-11-11 12:12:12', + }); + + const res = await db.select().from(datesTable); + + expect(res[0]?.date).toBeInstanceOf(Date); + expect(res[0]?.datetime).toBeInstanceOf(Date); + expect(typeof res[0]?.dateAsString).toBe('string'); + expect(typeof res[0]?.datetimeAsString).toBe('string'); + + expect(res).toEqual([{ + date: toLocalDate(new Date('2022-11-11')), + dateAsString: '2022-11-11', + time: '12:12:12', + datetime: new Date('2022-11-11'), + year: 2022, + datetimeAsString: '2022-11-11 12:12:12', + }]); + + await db.execute(sql`drop table ${datesTable}`); +}); + +test('SingleStore enum test case #1', async () => { + const tableWithEnums = singlestoreTable('enums_test_case', { + id: serial('id').primaryKey(), + enum1: singlestoreEnum('enum1', ['a', 'b', 'c']).notNull(), + enum2: singlestoreEnum('enum2', ['a', 'b', 'c']).default('a'), + enum3: singlestoreEnum('enum3', ['a', 'b', 'c']).notNull().default('b'), + }); + + await db.execute(sql`drop table if exists ${tableWithEnums}`); + + await db.execute(sql` + create table ${tableWithEnums} ( + \`id\` serial primary key, + \`enum1\` ENUM('a', 'b', 'c') not null, + \`enum2\` ENUM('a', 'b', 'c') default 'a', + \`enum3\` ENUM('a', 'b', 'c') not null default 'b' + ) + `); + + await db.insert(tableWithEnums).values([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum3: 'c' }, + { id: 3, enum1: 'a' }, + ]); + + const res = await db.select().from(tableWithEnums).orderBy(asc(tableWithEnums.id)); + + await db.execute(sql`drop table ${tableWithEnums}`); + + expect(res).toEqual([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, + { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, + ]); +}); + +test('left join (flat object fields)', async () => { + await db.insert(citiesTable) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); + + await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); + + const res = await db.select({ + userId: users2Table.id, + userName: users2Table.name, + cityId: citiesTable.id, + cityName: citiesTable.name, + }).from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) + .orderBy(asc(users2Table.id)); + + expect(res).toEqual([ + { userId: 1, userName: 'John', cityId: 1, cityName: 'Paris' }, + { userId: 2, userName: 'Jane', cityId: null, cityName: null }, + ]); +}); + +test('left join (grouped fields)', async () => { + await db.insert(citiesTable) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); + + await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); + + const res = await db.select({ + id: users2Table.id, + user: { + name: users2Table.name, + nameUpper: sql`upper(${users2Table.name})`, + }, + city: { + id: citiesTable.id, + name: citiesTable.name, + nameUpper: sql`upper(${citiesTable.name})`, + }, + }).from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) + .orderBy(asc(users2Table.id)); + + expect(res).toEqual([ + { + id: 1, + user: { name: 'John', nameUpper: 'JOHN' }, + city: { id: 1, name: 'Paris', nameUpper: 'PARIS' }, + }, + { + id: 2, + user: { name: 'Jane', nameUpper: 'JANE' }, + city: null, + }, + ]); +}); + +test('left join (all fields)', async () => { + await db.insert(citiesTable) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); + + await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); + + const res = await db.select().from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) + .orderBy(asc(users2Table.id)); + + expect(res).toEqual([ + { + users2: { + id: 1, + name: 'John', + cityId: 1, + }, + cities: { + id: 1, + name: 'Paris', + }, + }, + { + users2: { + id: 2, + name: 'Jane', + cityId: null, + }, + cities: null, + }, + ]); +}); + +test('join subquery', async () => { + const coursesTable = singlestoreTable('courses', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + categoryId: int('category_id'), + }); + + const courseCategoriesTable = singlestoreTable('course_categories', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${coursesTable}`); + await db.execute(sql`drop table if exists ${courseCategoriesTable}`); + + await db.execute( + sql` + create table ${courseCategoriesTable} ( + \`id\` serial primary key, + \`name\` text not null + ) + `, + ); + + await db.execute( + sql` + create table ${coursesTable} ( + \`id\` serial primary key, + \`name\` text not null, + \`category_id\` int + ) + `, + ); + + await db.insert(courseCategoriesTable).values([ + { id: 1, name: 'Category 1' }, + { id: 2, name: 'Category 2' }, + { id: 3, name: 'Category 3' }, + { id: 4, name: 'Category 4' }, + ]); + + await db.insert(coursesTable).values([ + { id: 1, name: 'Development', categoryId: 2 }, + { id: 2, name: 'IT & Software', categoryId: 3 }, + { id: 3, name: 'Marketing', categoryId: 4 }, + { id: 4, name: 'Design', categoryId: 1 }, + ]); + + const sq2 = db + .select({ + categoryId: courseCategoriesTable.id, + category: courseCategoriesTable.name, + total: sql`count(${courseCategoriesTable.id})`, + }) + .from(courseCategoriesTable) + .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) + .orderBy(courseCategoriesTable.id) + .as('sq2'); + + const res = await db + .select({ + courseName: coursesTable.name, + categoryId: sq2.categoryId, + }) + .from(coursesTable) + .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) + .orderBy(coursesTable.name); + + await db.execute(sql`drop table ${coursesTable}`); + await db.execute(sql`drop table ${courseCategoriesTable}`); + + expect(res).toEqual([ + { courseName: 'Design', categoryId: 1 }, + { courseName: 'Development', categoryId: 2 }, + { courseName: 'IT & Software', categoryId: 3 }, + { courseName: 'Marketing', categoryId: 4 }, + ]); +}); + +test('with ... select', async () => { + const orders = singlestoreTable('orders', { + id: serial('id').primaryKey(), + region: text('region').notNull(), + product: text('product').notNull(), + amount: int('amount').notNull(), + quantity: int('quantity').notNull(), + }); + + await db.execute(sql`drop table if exists ${orders}`); + await db.execute( + sql` + create table ${orders} ( + \`id\` serial primary key, + \`region\` text not null, + \`product\` text not null, + \`amount\` int not null, + \`quantity\` int not null + ) + `, + ); + + await db.insert(orders).values([ + { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, + { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 50, quantity: 5 }, + ]); + + const regionalSales = db + .$with('regional_sales') + .as( + db + .select({ + region: orders.region, + totalSales: sql`sum(${orders.amount})`.as('total_sales'), + }) + .from(orders) + .groupBy(orders.region), + ); + + const topRegions = db + .$with('top_regions') + .as( + db + .select({ + region: regionalSales.region, + }) + .from(regionalSales) + .where( + gt( + regionalSales.totalSales, + db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), + ), + ), + ); + + const result = await db + .with(regionalSales, topRegions) + .select({ + region: orders.region, + product: orders.product, + productUnits: sql`cast(sum(${orders.quantity}) as unsigned)`, + productSales: sql`cast(sum(${orders.amount}) as unsigned)`, + }) + .from(orders) + .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) + .groupBy(orders.region, orders.product) + .orderBy(orders.region, orders.product); + + await db.execute(sql`drop table ${orders}`); + + expect(result).toEqual([ + { + region: 'Europe', + product: 'A', + productUnits: 3, + productSales: 30, + }, + { + region: 'Europe', + product: 'B', + productUnits: 5, + productSales: 50, + }, + { + region: 'US', + product: 'A', + productUnits: 7, + productSales: 70, + }, + { + region: 'US', + product: 'B', + productUnits: 9, + productSales: 90, + }, + ]); +}); + +test('select from subquery sql', async () => { + await db.insert(users2Table).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }]); + + const sq = db + .select({ name: sql`concat(${users2Table.name}, " modified")`.as('name') }) + .from(users2Table) + .orderBy(asc(users2Table.id)) + .as('sq'); + + const res = await db.select({ name: sq.name }).from(sq); + + expect(res).toEqual([{ name: 'John modified' }, { name: 'Jane modified' }]); +}); + +test('select a field without joining its table', () => { + expect(() => db.select({ name: users2Table.name }).from(usersTable).prepare()).toThrowError(); +}); + +test('select all fields from subquery without alias', () => { + const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); + + expect(() => db.select().from(sq).prepare()).toThrowError(); +}); + +test('select count()', async () => { + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); + + const res = await db.select({ count: sql`count(*)` }).from(usersTable); + + expect(res).toEqual([{ count: 2 }]); +}); + +test('select for ...', () => { + { + const query = db.select().from(users2Table).for('update').toSQL(); + expect(query.sql).toMatch(/ for update$/); + } + { + const query = db.select().from(users2Table).for('share', { skipLocked: true }).toSQL(); + expect(query.sql).toMatch(/ for share skip locked$/); + } + { + const query = db.select().from(users2Table).for('update', { noWait: true }).toSQL(); + expect(query.sql).toMatch(/ for update no wait$/); + } +}); + +test('having', async () => { + await db.insert(citiesTable).values([{ id: 1, name: 'London' }, { id: 2, name: 'Paris' }, { + id: 3, + name: 'New York', + }]); + + await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, { + id: 3, + name: 'Jack', + cityId: 2, + }]); + + const result = await db + .select({ + id: citiesTable.id, + name: sql`upper(${citiesTable.name})`.as('upper_name'), + usersCount: sql`count(${users2Table.id})`.as('users_count'), + }) + .from(citiesTable) + .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) + .where(({ name }) => sql`length(${name}) >= 3`) + .groupBy(citiesTable.id) + .having(({ usersCount }) => sql`${usersCount} > 0`) + .orderBy(({ name }) => name); + + expect(result).toEqual([ + { + id: 1, + name: 'LONDON', + usersCount: 2, + }, + { + id: 2, + name: 'PARIS', + usersCount: 1, + }, + ]); +}); + +test('view', async () => { + const newYorkers1 = singlestoreView('new_yorkers') + .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); + + const newYorkers2 = singlestoreView('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); + + const newYorkers3 = singlestoreView('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).existing(); + + await db.execute(sql`create view new_yorkers as ${getViewConfig(newYorkers1).query}`); + + await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); + + await db.insert(users2Table).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + { id: 3, name: 'Jack', cityId: 2 }, + ]); + + { + const result = await db.select().from(newYorkers1).orderBy(asc(newYorkers1.id)); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers2).orderBy(asc(newYorkers2.id)); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers3).orderBy(asc(newYorkers3.id)); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select({ name: newYorkers1.name }).from(newYorkers1).orderBy(asc(newYorkers1.id)); + expect(result).toEqual([ + { name: 'John' }, + { name: 'Jane' }, + ]); + } + + await db.execute(sql`drop view ${newYorkers1}`); +}); + +test('select from raw sql', async () => { + const result = await db.select({ + id: sql`id`, + name: sql`name`, + }).from(sql`(select 1 as id, 'John' as name) as users`); + + Expect>; + + expect(result).toEqual([ + { id: 1, name: 'John' }, + ]); +}); + +test('select from raw sql with joins', async () => { + const result = await db + .select({ + id: sql`users.id`, + name: sql`users.name`, + userCity: sql`users.city`, + cityName: sql`cities.name`, + }) + .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) + .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); + + Expect>; + + expect(result).toEqual([ + { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, + ]); +}); + +test('join on aliased sql from select', async () => { + const result = await db + .select({ + userId: sql`users.id`.as('userId'), + name: sql`users.name`, + userCity: sql`users.city`, + cityId: sql`cities.id`.as('cityId'), + cityName: sql`cities.name`, + }) + .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) + .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); + + Expect>; + + expect(result).toEqual([ + { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, + ]); +}); + +test('join on aliased sql from with clause', async () => { + const users = db.$with('users').as( + db.select({ + id: sql`id`.as('userId'), + name: sql`name`.as('userName'), + city: sql`city`.as('city'), + }).from( + sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, + ), + ); + + const cities = db.$with('cities').as( + db.select({ + id: sql`id`.as('cityId'), + name: sql`name`.as('cityName'), + }).from( + sql`(select 1 as id, 'Paris' as name) as cities`, + ), + ); + + const result = await db + .with(users, cities) + .select({ + userId: users.id, + name: users.name, + userCity: users.city, + cityId: cities.id, + cityName: cities.name, + }) + .from(users) + .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); + + Expect>; + + expect(result).toEqual([ + { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, + ]); +}); + +test('prefixed table', async () => { + const singlestoreTable = singlestoreTableCreator((name) => `myprefix_${name}`); + + const users = singlestoreTable('test_prefixed_table_with_unique_name', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table myprefix_test_prefixed_table_with_unique_name (id int not null primary key, name text not null)`, + ); + + await db.insert(users).values({ id: 1, name: 'John' }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + + await db.execute(sql`drop table ${users}`); +}); + +test('orderBy with aliased column', () => { + const query = db.select({ + test: sql`something`.as('test'), + }).from(users2Table).orderBy((fields) => fields.test).toSQL(); + + expect(query.sql).toBe(`select something as \`test\` from \`${getTableName(users2Table)}\` order by \`test\``); +}); + +test('timestamp timezone', async () => { + const date = new Date(Date.parse('2020-01-01T12:34:56+07:00')); + + await db.insert(usersTable).values({ id: 1, name: 'With default times' }); + await db.insert(usersTable).values({ + id: 2, + name: 'Without default times', + createdAt: date, + }); + const users = await db.select().from(usersTable).orderBy(asc(usersTable.id)); + + // check that the timestamps are set correctly for default times + expect(Math.abs(users[0]!.createdAt.getTime() - Date.now())).toBeLessThan(2000); + + // check that the timestamps are set correctly for non default times + expect(Math.abs(users[1]!.createdAt.getTime() - date.getTime())).toBeLessThan(2000); +}); + +test('transaction', async () => { + const users = singlestoreTable('users_transactions', { + id: serial('id').primaryKey(), + balance: int('balance').notNull(), + }); + const products = singlestoreTable('products_transactions', { + id: serial('id').primaryKey(), + price: int('price').notNull(), + stock: int('stock').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop table if exists ${products}`); + + await db.execute(sql`create table ${users} (id serial not null primary key, balance int not null)`); + await db.execute( + sql`create table ${products} (id serial not null primary key, price int not null, stock int not null)`, + ); + + const [{ insertId: userId }] = await db.insert(users).values({ id: 1, balance: 100 }); + const user = await db.select().from(users).where(eq(users.id, userId)).then((rows) => rows[0]!); + const [{ insertId: productId }] = await db.insert(products).values({ id: 1, price: 10, stock: 10 }); + const product = await db.select().from(products).where(eq(products.id, productId)).then((rows) => rows[0]!); + + await db.transaction(async (tx) => { + await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); + await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); + }); + + const result = await db.select().from(users); + + await db.execute(sql`drop table ${users}`); + await db.execute(sql`drop table ${products}`); + + expect(result).toEqual([{ id: 1, balance: 90 }]); +}); + +test('transaction rollback', async () => { + const users = singlestoreTable('users_transactions_rollback', { + id: serial('id').primaryKey(), + balance: int('balance').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, balance int not null)`, + ); + + await expect((async () => { + await db.transaction(async (tx) => { + await tx.insert(users).values({ balance: 100 }); + tx.rollback(); + }); + })()).rejects.toThrowError(TransactionRollbackError); + + const result = await db.select().from(users); + + await db.execute(sql`drop table ${users}`); + + expect(result).toEqual([]); +}); + +test('join subquery with join', async () => { + const internalStaff = singlestoreTable('internal_staff', { + userId: int('user_id').notNull(), + }); + + const customUser = singlestoreTable('custom_user', { + id: int('id').notNull(), + }); + + const ticket = singlestoreTable('ticket', { + staffId: int('staff_id').notNull(), + }); + + await db.execute(sql`drop table if exists ${internalStaff}`); + await db.execute(sql`drop table if exists ${customUser}`); + await db.execute(sql`drop table if exists ${ticket}`); + + await db.execute(sql`create table ${internalStaff} (user_id integer not null)`); + await db.execute(sql`create table ${customUser} (id integer not null)`); + await db.execute(sql`create table ${ticket} (staff_id integer not null)`); + + await db.insert(internalStaff).values({ userId: 1 }); + await db.insert(customUser).values({ id: 1 }); + await db.insert(ticket).values({ staffId: 1 }); + + const subq = db + .select() + .from(internalStaff) + .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) + .as('internal_staff'); + + const mainQuery = await db + .select() + .from(ticket) + .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); + + await db.execute(sql`drop table ${internalStaff}`); + await db.execute(sql`drop table ${customUser}`); + await db.execute(sql`drop table ${ticket}`); + + expect(mainQuery).toEqual([{ + ticket: { staffId: 1 }, + internal_staff: { + internal_staff: { userId: 1 }, + custom_user: { id: 1 }, + }, + }]); +}); + +test('subquery with view', async () => { + const users = singlestoreTable('users_subquery_view', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }); + + const newYorkers = singlestoreView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop view if exists ${newYorkers}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, + ); + await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); + + await db.insert(users).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 2 }, + { id: 3, name: 'Jack', cityId: 1 }, + { id: 4, name: 'Jill', cityId: 2 }, + ]); + + const sq = db.$with('sq').as(db.select().from(newYorkers)); + const result = await db.with(sq).select().from(sq).orderBy(asc(sq.id)); + + await db.execute(sql`drop view ${newYorkers}`); + await db.execute(sql`drop table ${users}`); + + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 3, name: 'Jack', cityId: 1 }, + ]); +}); + +test('join view as subquery', async () => { + const users = singlestoreTable('users_join_view', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }); + + const newYorkers = singlestoreView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop view if exists ${newYorkers}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, + ); + await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); + + await db.insert(users).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 2 }, + { id: 3, name: 'Jack', cityId: 1 }, + { id: 4, name: 'Jill', cityId: 2 }, + ]); + + const sq = db.select().from(newYorkers).as('new_yorkers_sq'); + + const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)).orderBy(asc(users.id)); + + expect(result).toEqual([ + { + users_join_view: { id: 1, name: 'John', cityId: 1 }, + new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, + }, + { + users_join_view: { id: 2, name: 'Jane', cityId: 2 }, + new_yorkers_sq: null, + }, + { + users_join_view: { id: 3, name: 'Jack', cityId: 1 }, + new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, + }, + { + users_join_view: { id: 4, name: 'Jill', cityId: 2 }, + new_yorkers_sq: null, + }, + ]); + + await db.execute(sql`drop view ${newYorkers}`); + await db.execute(sql`drop table ${users}`); +}); + +test('select iterator', async () => { + const users = singlestoreTable('users_iterator', { + id: serial('id').primaryKey(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial not null primary key)`); + + await db.insert(users).values([{ id: 1 }, { id: 2 }, { id: 3 }]); + + const iter = db.select().from(users) + .orderBy(asc(users.id)) + .iterator(); + + const result: typeof users.$inferSelect[] = []; + + for await (const row of iter) { + result.push(row); + } + + expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); +}); + +test('select iterator w/ prepared statement', async () => { + const users = singlestoreTable('users_iterator', { + id: serial('id').primaryKey(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial not null primary key)`); + + await db.insert(users).values([{ id: 1 }, { id: 2 }, { id: 3 }]); + + const prepared = db.select().from(users) + .orderBy(asc(users.id)) + .prepare(); + const iter = prepared.iterator(); + const result: typeof users.$inferSelect[] = []; + + for await (const row of iter) { + result.push(row); + } + + expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); +}); + +test('insert undefined', async () => { + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text)`, + ); + + await expect((async () => { + await db.insert(users).values({ name: undefined }); + })()).resolves.not.toThrowError(); + + await db.execute(sql`drop table ${users}`); +}); + +test('update undefined', async () => { + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text)`, + ); + + await expect((async () => { + await db.update(users).set({ name: undefined }); + })()).rejects.toThrowError(); + + await expect((async () => { + await db.update(users).set({ id: 1, name: undefined }); + })()).resolves.not.toThrowError(); + + await db.execute(sql`drop table ${users}`); +}); diff --git a/integration-tests/tests/singlestore/singlestore-proxy.test.ts b/integration-tests/tests/singlestore/singlestore-proxy.test.ts new file mode 100644 index 000000000..51dc48a4a --- /dev/null +++ b/integration-tests/tests/singlestore/singlestore-proxy.test.ts @@ -0,0 +1,140 @@ +import retry from 'async-retry'; +import type { SingleStoreRemoteDatabase } from 'drizzle-orm/singlestore-proxy'; +import { drizzle as proxyDrizzle } from 'drizzle-orm/singlestore-proxy'; +import * as mysql2 from 'mysql2/promise'; +import { afterAll, beforeAll, beforeEach } from 'vitest'; +import { skipTests } from '~/common'; +import { createDockerDB, tests } from './singlestore-common'; + +const ENABLE_LOGGING = false; + +// eslint-disable-next-line drizzle-internal/require-entity-kind +class ServerSimulator { + constructor(private db: mysql2.Connection) {} + + async query(sql: string, params: any[], method: 'all' | 'execute') { + if (method === 'all') { + try { + const result = await this.db.query({ + sql, + values: params, + rowsAsArray: true, + typeCast: function(field: any, next: any) { + if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { + return field.string(); + } + return next(); + }, + }); + + return { data: result[0] as any }; + } catch (e: any) { + return { error: e }; + } + } else if (method === 'execute') { + try { + const result = await this.db.query({ + sql, + values: params, + typeCast: function(field: any, next: any) { + if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { + return field.string(); + } + return next(); + }, + }); + + return { data: result as any }; + } catch (e: any) { + return { error: e }; + } + } else { + return { error: 'Unknown method value' }; + } + } + + async migrations(queries: string[]) { + await this.db.query('START TRANSACTION'); + try { + for (const query of queries) { + await this.db.query(query); + } + await this.db.query('COMMIT'); + } catch (e) { + await this.db.query('ROLLBACK'); + throw e; + } + + return {}; + } +} + +let db: SingleStoreRemoteDatabase; +let client: mysql2.Connection; +let serverSimulator: ServerSimulator; + +beforeAll(async () => { + let connectionString; + if (process.env['SINGLESTORE_CONNECTION_STRING']) { + connectionString = process.env['SINGLESTORE_CONNECTION_STRING']; + } else { + const { connectionString: conStr } = await createDockerDB(); + connectionString = conStr; + } + client = await retry(async () => { + client = await mysql2.createConnection(connectionString); + await client.connect(); + return client; + }, { + retries: 20, + factor: 1, + minTimeout: 250, + maxTimeout: 250, + randomize: false, + onRetry() { + client?.end(); + }, + }); + + await client.query(`CREATE DATABASE IF NOT EXISTS drizzle;`); + await client.changeUser({ database: 'drizzle' }); + + serverSimulator = new ServerSimulator(client); + db = proxyDrizzle(async (sql, params, method) => { + try { + const response = await serverSimulator.query(sql, params, method); + + if (response.error !== undefined) { + throw response.error; + } + + return { rows: response.data }; + } catch (e: any) { + console.error('Error from singlestore proxy server:', e.message); + throw e; + } + }, { logger: ENABLE_LOGGING }); +}); + +afterAll(async () => { + await client?.end(); +}); + +beforeEach((ctx) => { + ctx.singlestore = { + db, + }; +}); + +skipTests([ + 'select iterator w/ prepared statement', + 'select iterator', + 'nested transaction rollback', + 'nested transaction', + 'transaction rollback', + 'transaction', + 'transaction with options (set isolationLevel)', + 'migrator', +]); + +tests(); diff --git a/integration-tests/tests/singlestore/singlestore.test.ts b/integration-tests/tests/singlestore/singlestore.test.ts new file mode 100644 index 000000000..bfb1ee5b7 --- /dev/null +++ b/integration-tests/tests/singlestore/singlestore.test.ts @@ -0,0 +1,51 @@ +import retry from 'async-retry'; +import { drizzle } from 'drizzle-orm/singlestore'; +import type { SingleStoreDriverDatabase } from 'drizzle-orm/singlestore'; +import * as mysql2 from 'mysql2/promise'; +import { afterAll, beforeAll, beforeEach } from 'vitest'; +import { createDockerDB, tests } from './singlestore-common'; + +const ENABLE_LOGGING = false; + +let db: SingleStoreDriverDatabase; +let client: mysql2.Connection; + +beforeAll(async () => { + let connectionString; + if (process.env['SINGLESTORE_CONNECTION_STRING']) { + connectionString = process.env['SINGLESTORE_CONNECTION_STRING']; + } else { + const { connectionString: conStr } = await createDockerDB(); + connectionString = conStr; + } + client = await retry(async () => { + client = await mysql2.createConnection(connectionString); + await client.connect(); + return client; + }, { + retries: 20, + factor: 1, + minTimeout: 250, + maxTimeout: 250, + randomize: false, + onRetry() { + client?.end(); + }, + }); + + await client.query(`CREATE DATABASE IF NOT EXISTS drizzle;`); + await client.changeUser({ database: 'drizzle' }); + db = drizzle(client, { logger: ENABLE_LOGGING }); +}); + +afterAll(async () => { + await client?.end(); +}); + +beforeEach((ctx) => { + ctx.singlestore = { + db, + }; +}); + +tests(); diff --git a/integration-tests/tests/sqlite/sqlite-common.ts b/integration-tests/tests/sqlite/sqlite-common.ts index 49c609941..be452bcf1 100644 --- a/integration-tests/tests/sqlite/sqlite-common.ts +++ b/integration-tests/tests/sqlite/sqlite-common.ts @@ -844,10 +844,7 @@ export function tests() { test('prepared statement reuse', async (ctx) => { const { db } = ctx.sqlite; - const stmt = db.insert(usersTable).values({ - verified: true, - name: sql.placeholder('name'), - }).prepare(); + const stmt = db.insert(usersTable).values({ name: sql.placeholder('name') }).prepare(); for (let i = 0; i < 10; i++) { await stmt.run({ name: `John ${i}` }); @@ -856,20 +853,41 @@ export function tests() { const result = await db.select({ id: usersTable.id, name: usersTable.name, + }).from(usersTable).all(); + + expect(result).toEqual([ + { id: 1, name: 'John 0' }, + { id: 2, name: 'John 1' }, + { id: 3, name: 'John 2' }, + { id: 4, name: 'John 3' }, + { id: 5, name: 'John 4' }, + { id: 6, name: 'John 5' }, + { id: 7, name: 'John 6' }, + { id: 8, name: 'John 7' }, + { id: 9, name: 'John 8' }, + { id: 10, name: 'John 9' }, + ]); + }); + + test('insert: placeholders on columns with encoder', async (ctx) => { + const { db } = ctx.sqlite; + + const stmt = db.insert(usersTable).values({ + name: 'John', + verified: sql.placeholder('verified'), + }).prepare(); + + await stmt.run({ verified: true }); + await stmt.run({ verified: false }); + + const result = await db.select({ + id: usersTable.id, verified: usersTable.verified, }).from(usersTable).all(); expect(result).toEqual([ - { id: 1, name: 'John 0', verified: true }, - { id: 2, name: 'John 1', verified: true }, - { id: 3, name: 'John 2', verified: true }, - { id: 4, name: 'John 3', verified: true }, - { id: 5, name: 'John 4', verified: true }, - { id: 6, name: 'John 5', verified: true }, - { id: 7, name: 'John 6', verified: true }, - { id: 8, name: 'John 7', verified: true }, - { id: 9, name: 'John 8', verified: true }, - { id: 10, name: 'John 9', verified: true }, + { id: 1, verified: true }, + { id: 2, verified: false }, ]); }); diff --git a/integration-tests/vitest.config.ts b/integration-tests/vitest.config.ts index 5187d2cfc..6cefab280 100644 --- a/integration-tests/vitest.config.ts +++ b/integration-tests/vitest.config.ts @@ -14,6 +14,7 @@ export default defineConfig({ 'tests/imports/**/*', 'tests/extensions/vectors/**/*', 'tests/version.test.ts', + 'tests/singlestore/**/*.test.ts', ], exclude: [ ...(process.env.SKIP_EXTERNAL_DB_TESTS diff --git a/package.json b/package.json index 3327aad18..9b5c6739f 100755 --- a/package.json +++ b/package.json @@ -8,7 +8,8 @@ "test": "turbo run test --color", "t": "pnpm test", "test:types": "turbo run test:types --color", - "lint": "concurrently -n eslint,dprint \"eslint --ext ts .\" \"dprint check --list-different\"" + "lint": "concurrently -n eslint,dprint \"eslint --ext ts .\" \"dprint check --list-different\"", + "lint:fix": "npx dprint fmt" }, "devDependencies": { "@arethetypeswrong/cli": "^0.15.3", @@ -41,5 +42,6 @@ "patchedDependencies": { "typescript@5.4.5": "patches/typescript@5.4.5.patch" } - } + }, + "packageManager": "pnpm@9.7.0" } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 8cff3f864..5466249d6 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -45,7 +45,7 @@ importers: version: link:drizzle-orm/dist drizzle-orm-old: specifier: npm:drizzle-orm@^0.27.2 - version: drizzle-orm@0.27.2(@aws-sdk/client-rds-data@3.583.0)(@cloudflare/workers-types@4.20240524.0)(@libsql/client@0.6.0)(@neondatabase/serverless@0.9.3)(@opentelemetry/api@1.8.0)(@planetscale/database@1.18.0)(@types/better-sqlite3@7.6.10)(@types/pg@8.11.6)(@types/sql.js@1.4.9)(@vercel/postgres@0.8.0)(better-sqlite3@10.0.0)(bun-types@1.0.3)(knex@3.1.0(better-sqlite3@10.0.0)(mysql2@3.9.8)(pg@8.11.5)(sqlite3@5.1.7))(kysely@0.27.3)(mysql2@3.9.8)(pg@8.11.5)(postgres@3.4.4)(sql.js@1.10.3)(sqlite3@5.1.7) + version: drizzle-orm@0.27.2(@aws-sdk/client-rds-data@3.583.0)(@cloudflare/workers-types@4.20240524.0)(@libsql/client@0.5.6)(@neondatabase/serverless@0.9.3)(@opentelemetry/api@1.8.0)(@planetscale/database@1.18.0)(@types/better-sqlite3@7.6.10)(@types/pg@8.11.6)(@types/sql.js@1.4.9)(@vercel/postgres@0.8.0)(better-sqlite3@9.6.0)(bun-types@1.0.3)(knex@2.5.1(better-sqlite3@9.6.0)(mysql2@3.11.0)(pg@8.11.5)(sqlite3@5.1.7))(kysely@0.25.0)(mysql2@3.11.0)(pg@8.11.5)(postgres@3.4.4)(sql.js@1.10.3)(sqlite3@5.1.7) eslint: specifier: ^8.50.0 version: 8.50.0 @@ -89,6 +89,208 @@ importers: specifier: 5.4.5 version: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + drizzle-kit: + dependencies: + '@drizzle-team/brocli': + specifier: ^0.10.1 + version: 0.10.1 + '@esbuild-kit/esm-loader': + specifier: ^2.5.5 + version: 2.5.5 + esbuild: + specifier: ^0.19.7 + version: 0.19.12 + esbuild-register: + specifier: ^3.5.0 + version: 3.5.0(esbuild@0.19.12) + devDependencies: + '@arethetypeswrong/cli': + specifier: ^0.15.3 + version: 0.15.3 + '@aws-sdk/client-rds-data': + specifier: ^3.556.0 + version: 3.583.0 + '@cloudflare/workers-types': + specifier: ^4.20230518.0 + version: 4.20240524.0 + '@electric-sql/pglite': + specifier: ^0.1.5 + version: 0.1.5 + '@hono/node-server': + specifier: ^1.9.0 + version: 1.12.0 + '@hono/zod-validator': + specifier: ^0.2.1 + version: 0.2.2(hono@4.5.0)(zod@3.23.7) + '@libsql/client': + specifier: ^0.4.2 + version: 0.4.3(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + '@neondatabase/serverless': + specifier: ^0.9.1 + version: 0.9.3 + '@originjs/vite-plugin-commonjs': + specifier: ^1.0.3 + version: 1.0.3 + '@planetscale/database': + specifier: ^1.16.0 + version: 1.18.0 + '@types/better-sqlite3': + specifier: ^7.6.4 + version: 7.6.10 + '@types/dockerode': + specifier: ^3.3.28 + version: 3.3.29 + '@types/glob': + specifier: ^8.1.0 + version: 8.1.0 + '@types/json-diff': + specifier: ^1.0.3 + version: 1.0.3 + '@types/minimatch': + specifier: ^5.1.2 + version: 5.1.2 + '@types/node': + specifier: ^18.11.15 + version: 18.19.33 + '@types/pg': + specifier: ^8.10.7 + version: 8.11.6 + '@types/pluralize': + specifier: ^0.0.33 + version: 0.0.33 + '@types/semver': + specifier: ^7.5.5 + version: 7.5.8 + '@types/uuid': + specifier: ^9.0.8 + version: 9.0.8 + '@types/ws': + specifier: ^8.5.10 + version: 8.5.11 + '@typescript-eslint/eslint-plugin': + specifier: ^7.2.0 + version: 7.16.1(@typescript-eslint/parser@7.16.1(eslint@8.57.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint@8.57.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + '@typescript-eslint/parser': + specifier: ^7.2.0 + version: 7.16.1(eslint@8.57.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + '@vercel/postgres': + specifier: ^0.8.0 + version: 0.8.0 + ava: + specifier: ^5.1.0 + version: 5.3.0(@ava/typescript@5.0.0) + better-sqlite3: + specifier: ^9.4.3 + version: 9.6.0 + camelcase: + specifier: ^7.0.1 + version: 7.0.1 + chalk: + specifier: ^5.2.0 + version: 5.3.0 + commander: + specifier: ^12.1.0 + version: 12.1.0 + dockerode: + specifier: ^3.3.4 + version: 3.3.5 + dotenv: + specifier: ^16.0.3 + version: 16.4.5 + drizzle-kit: + specifier: 0.21.2 + version: 0.21.2 + drizzle-orm: + specifier: workspace:./drizzle-orm/dist + version: link:drizzle-orm/dist + env-paths: + specifier: ^3.0.0 + version: 3.0.0 + esbuild-node-externals: + specifier: ^1.9.0 + version: 1.14.0(esbuild@0.19.12) + eslint: + specifier: ^8.57.0 + version: 8.57.0 + eslint-config-prettier: + specifier: ^9.1.0 + version: 9.1.0(eslint@8.57.0) + eslint-plugin-prettier: + specifier: ^5.1.3 + version: 5.2.1(eslint-config-prettier@9.1.0(eslint@8.57.0))(eslint@8.57.0)(prettier@2.8.8) + get-port: + specifier: ^6.1.2 + version: 6.1.2 + glob: + specifier: ^8.1.0 + version: 8.1.0 + hanji: + specifier: ^0.0.5 + version: 0.0.5 + hono: + specifier: ^4.1.5 + version: 4.5.0 + json-diff: + specifier: 1.0.6 + version: 1.0.6 + minimatch: + specifier: ^7.4.3 + version: 7.4.6 + mysql2: + specifier: 3.3.3 + version: 3.3.3 + node-fetch: + specifier: ^3.3.2 + version: 3.3.2 + pg: + specifier: ^8.11.5 + version: 8.11.5 + pluralize: + specifier: ^8.0.0 + version: 8.0.0 + postgres: + specifier: ^3.4.4 + version: 3.4.4 + prettier: + specifier: ^2.8.1 + version: 2.8.8 + semver: + specifier: ^7.5.4 + version: 7.6.2 + superjson: + specifier: ^2.2.1 + version: 2.2.1 + tsup: + specifier: ^8.0.2 + version: 8.1.2(postcss@8.4.39)(tsx@3.14.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))(yaml@2.4.2) + tsx: + specifier: ^3.12.1 + version: 3.14.0 + typescript: + specifier: ^5.4.3 + version: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + uuid: + specifier: ^9.0.1 + version: 9.0.1 + vite-tsconfig-paths: + specifier: ^4.3.2 + version: 4.3.2(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))(vite@5.3.3(@types/node@18.19.33)(lightningcss@1.25.1)(terser@5.31.0)) + vitest: + specifier: ^1.4.0 + version: 1.6.0(@types/node@18.19.33)(@vitest/ui@1.6.0)(lightningcss@1.25.1)(terser@5.31.0) + wrangler: + specifier: ^3.22.1 + version: 3.65.0(@cloudflare/workers-types@4.20240524.0)(bufferutil@4.0.8)(utf-8-validate@6.0.3) + ws: + specifier: ^8.16.0 + version: 8.17.0(bufferutil@4.0.8)(utf-8-validate@6.0.3) + zod: + specifier: ^3.20.2 + version: 3.23.7 + zx: + specifier: ^7.2.2 + version: 7.2.2 + drizzle-orm: devDependencies: '@aws-sdk/client-rds-data': @@ -108,7 +310,7 @@ importers: version: 0.9.0 '@op-engineering/op-sqlite': specifier: ^2.0.16 - version: 2.0.22(react-native@0.74.1(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(@types/react@18.3.1)(bufferutil@4.0.8)(encoding@0.1.13)(react@18.3.1)(utf-8-validate@6.0.3))(react@18.3.1) + version: 2.0.22(react-native@0.74.1(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(@types/react@18.3.1)(bufferutil@4.0.8)(encoding@0.1.13)(react@18.3.1))(react@18.3.1) '@opentelemetry/api': specifier: ^1.4.1 version: 1.8.0 @@ -156,7 +358,7 @@ importers: version: 10.1.0 expo-sqlite: specifier: ^13.2.0 - version: 13.4.0(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)) + version: 13.4.0(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)) knex: specifier: ^2.4.2 version: 2.5.1(better-sqlite3@8.7.0)(mysql2@3.3.3)(pg@8.11.5)(sqlite3@5.1.7) @@ -1689,6 +1891,40 @@ packages: '@balena/dockerignore@1.0.2': resolution: {integrity: sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q==} + '@cloudflare/kv-asset-handler@0.3.4': + resolution: {integrity: sha512-YLPHc8yASwjNkmcDMQMY35yiWjoKAKnhUbPRszBRS0YgH+IXtsMp61j+yTcnCE3oO2DgP0U3iejLC8FTtKDC8Q==} + engines: {node: '>=16.13'} + + '@cloudflare/workerd-darwin-64@1.20240712.0': + resolution: {integrity: sha512-KB1vbOhr62BCAwVr3VaRcngzPeSCQ7zPA9VGrfwYXIxo0Y4zlW1z0EVtcewFSz5XXKr3BtNnJXxdjDPUNkguQw==} + engines: {node: '>=16'} + cpu: [x64] + os: [darwin] + + '@cloudflare/workerd-darwin-arm64@1.20240712.0': + resolution: {integrity: sha512-UDwFnCfQGFVCNxOeHxKNEc1ANQk/3OIiFWpVsxgZqJqU/22XM88JHxJW+YcBKsaUGUlpLyImaYUn2/rG+i+9UQ==} + engines: {node: '>=16'} + cpu: [arm64] + os: [darwin] + + '@cloudflare/workerd-linux-64@1.20240712.0': + resolution: {integrity: sha512-MxpMHSJcZRUL66TO7BEnEim9WgZ8wJEVOB1Rq7a/IF2hI4/8f+N+02PChh62NkBlWxDfTXAtZy0tyQMm0EGjHg==} + engines: {node: '>=16'} + cpu: [x64] + os: [linux] + + '@cloudflare/workerd-linux-arm64@1.20240712.0': + resolution: {integrity: sha512-DtLYZsFFFAMgn+6YCHoQS6nYY4nbdAtcAFa4PhWTjLJDbvQEn3IoK9Bi4ajCL7xG36FeuBdZliSbBiiv7CJjfQ==} + engines: {node: '>=16'} + cpu: [arm64] + os: [linux] + + '@cloudflare/workerd-windows-64@1.20240712.0': + resolution: {integrity: sha512-u8zoT9PQiiwxuz9npquLBFWrC/RlBWGGZ1aylarZNFlM4sFrRm+bRr6i+KtS+fltHIVXj3teuoKYytA1ppf9Yw==} + engines: {node: '>=16'} + cpu: [x64] + os: [win32] + '@cloudflare/workers-types@4.20240512.0': resolution: {integrity: sha512-o2yTEWg+YK/I1t/Me+dA0oarO0aCbjibp6wSeaw52DSE9tDyKJ7S+Qdyw/XsMrKn4t8kF6f/YOba+9O4MJfW9w==} @@ -1738,6 +1974,9 @@ packages: cpu: [x64] os: [win32] + '@drizzle-team/brocli@0.10.1': + resolution: {integrity: sha512-AHy0vjc+n/4w/8Mif+w86qpppHuF3AyXbcWW+R/W7GNA3F5/p2nuhlkCJaTXSLZheB4l1rtHzOfr9A7NwoR/Zg==} + '@drizzle-team/studio@0.0.5': resolution: {integrity: sha512-ps5qF0tMxWRVu+V5gvCRrQNqlY92aTnIKdq27gm9LZMSdaKYZt6AVvSK1dlUMzs6Rt0Jm80b+eWct6xShBKhIw==} @@ -1750,6 +1989,22 @@ packages: '@esbuild-kit/esm-loader@2.5.5': resolution: {integrity: sha512-Qwfvj/qoPbClxCRNuac1Du01r9gvNOT+pMYtJDapfB1eoGN1YlJ1BixLyL9WVENRx5RXgNLdfYdx/CuswlGhMw==} + '@esbuild-plugins/node-globals-polyfill@0.2.3': + resolution: {integrity: sha512-r3MIryXDeXDOZh7ih1l/yE9ZLORCd5e8vWg02azWRGj5SPTuoh69A2AIyn0Z31V/kHBfZ4HgWJ+OK3GTTwLmnw==} + peerDependencies: + esbuild: '*' + + '@esbuild-plugins/node-modules-polyfill@0.2.2': + resolution: {integrity: sha512-LXV7QsWJxRuMYvKbiznh+U1ilIop3g2TeKRzUxOG5X3YITc8JyyTa90BmLwqqv0YnX4v32CSlG+vsziZp9dMvA==} + peerDependencies: + esbuild: '*' + + '@esbuild/aix-ppc64@0.19.12': + resolution: {integrity: sha512-bmoCYyWdEL3wDQIVbcyzRyeKLgk2WtWLTWz1ZIAZF/EGbNOwSA6ew3PftJ1PqMiOOGu0OyFMzG53L0zqIpPeNA==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [aix] + '@esbuild/aix-ppc64@0.20.2': resolution: {integrity: sha512-D+EBOJHXdNZcLJRBkhENNG8Wji2kgc9AZ9KiPr1JuZjsNtyHzrsfLRrY0tk2H2aoFu6RANO1y1iPPUCDYWkb5g==} engines: {node: '>=12'} @@ -1762,6 +2017,12 @@ packages: cpu: [ppc64] os: [aix] + '@esbuild/aix-ppc64@0.23.0': + resolution: {integrity: sha512-3sG8Zwa5fMcA9bgqB8AfWPQ+HFke6uD3h1s3RIwUNK8EG7a4buxvuFTs3j1IMs2NXAk9F30C/FF4vxRgQCcmoQ==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [aix] + '@esbuild/android-arm64@0.17.19': resolution: {integrity: sha512-KBMWvEZooR7+kzY0BtbTQn0OAYY7CsiydT63pVEaPtVYF0hXbUaOyZog37DKxK7NF3XacBJOpYT4adIJh+avxA==} engines: {node: '>=12'} @@ -1774,6 +2035,12 @@ packages: cpu: [arm64] os: [android] + '@esbuild/android-arm64@0.19.12': + resolution: {integrity: sha512-P0UVNGIienjZv3f5zq0DP3Nt2IE/3plFzuaS96vihvD0Hd6H/q4WXUGpCxD/E8YrSXfNyRPbpTq+T8ZQioSuPA==} + engines: {node: '>=12'} + cpu: [arm64] + os: [android] + '@esbuild/android-arm64@0.20.2': resolution: {integrity: sha512-mRzjLacRtl/tWU0SvD8lUEwb61yP9cqQo6noDZP/O8VkwafSYwZ4yWy24kan8jE/IMERpYncRt2dw438LP3Xmg==} engines: {node: '>=12'} @@ -1786,6 +2053,12 @@ packages: cpu: [arm64] os: [android] + '@esbuild/android-arm64@0.23.0': + resolution: {integrity: sha512-EuHFUYkAVfU4qBdyivULuu03FhJO4IJN9PGuABGrFy4vUuzk91P2d+npxHcFdpUnfYKy0PuV+n6bKIpHOB3prQ==} + engines: {node: '>=18'} + cpu: [arm64] + os: [android] + '@esbuild/android-arm@0.17.19': resolution: {integrity: sha512-rIKddzqhmav7MSmoFCmDIb6e2W57geRsM94gV2l38fzhXMwq7hZoClug9USI2pFRGL06f4IOPHHpFNOkWieR8A==} engines: {node: '>=12'} @@ -1798,6 +2071,12 @@ packages: cpu: [arm] os: [android] + '@esbuild/android-arm@0.19.12': + resolution: {integrity: sha512-qg/Lj1mu3CdQlDEEiWrlC4eaPZ1KztwGJ9B6J+/6G+/4ewxJg7gqj8eVYWvao1bXrqGiW2rsBZFSX3q2lcW05w==} + engines: {node: '>=12'} + cpu: [arm] + os: [android] + '@esbuild/android-arm@0.20.2': resolution: {integrity: sha512-t98Ra6pw2VaDhqNWO2Oph2LXbz/EJcnLmKLGBJwEwXX/JAN83Fym1rU8l0JUWK6HkIbWONCSSatf4sf2NBRx/w==} engines: {node: '>=12'} @@ -1810,6 +2089,12 @@ packages: cpu: [arm] os: [android] + '@esbuild/android-arm@0.23.0': + resolution: {integrity: sha512-+KuOHTKKyIKgEEqKbGTK8W7mPp+hKinbMBeEnNzjJGyFcWsfrXjSTNluJHCY1RqhxFurdD8uNXQDei7qDlR6+g==} + engines: {node: '>=18'} + cpu: [arm] + os: [android] + '@esbuild/android-x64@0.17.19': resolution: {integrity: sha512-uUTTc4xGNDT7YSArp/zbtmbhO0uEEK9/ETW29Wk1thYUJBz3IVnvgEiEwEa9IeLyvnpKrWK64Utw2bgUmDveww==} engines: {node: '>=12'} @@ -1822,6 +2107,12 @@ packages: cpu: [x64] os: [android] + '@esbuild/android-x64@0.19.12': + resolution: {integrity: sha512-3k7ZoUW6Q6YqhdhIaq/WZ7HwBpnFBlW905Fa4s4qWJyiNOgT1dOqDiVAQFwBH7gBRZr17gLrlFCRzF6jFh7Kew==} + engines: {node: '>=12'} + cpu: [x64] + os: [android] + '@esbuild/android-x64@0.20.2': resolution: {integrity: sha512-btzExgV+/lMGDDa194CcUQm53ncxzeBrWJcncOBxuC6ndBkKxnHdFJn86mCIgTELsooUmwUm9FkhSp5HYu00Rg==} engines: {node: '>=12'} @@ -1834,6 +2125,12 @@ packages: cpu: [x64] os: [android] + '@esbuild/android-x64@0.23.0': + resolution: {integrity: sha512-WRrmKidLoKDl56LsbBMhzTTBxrsVwTKdNbKDalbEZr0tcsBgCLbEtoNthOW6PX942YiYq8HzEnb4yWQMLQuipQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [android] + '@esbuild/darwin-arm64@0.17.19': resolution: {integrity: sha512-80wEoCfF/hFKM6WE1FyBHc9SfUblloAWx6FJkFWTWiCoht9Mc0ARGEM47e67W9rI09YoUxJL68WHfDRYEAvOhg==} engines: {node: '>=12'} @@ -1846,6 +2143,12 @@ packages: cpu: [arm64] os: [darwin] + '@esbuild/darwin-arm64@0.19.12': + resolution: {integrity: sha512-B6IeSgZgtEzGC42jsI+YYu9Z3HKRxp8ZT3cqhvliEHovq8HSX2YX8lNocDn79gCKJXOSaEot9MVYky7AKjCs8g==} + engines: {node: '>=12'} + cpu: [arm64] + os: [darwin] + '@esbuild/darwin-arm64@0.20.2': resolution: {integrity: sha512-4J6IRT+10J3aJH3l1yzEg9y3wkTDgDk7TSDFX+wKFiWjqWp/iCfLIYzGyasx9l0SAFPT1HwSCR+0w/h1ES/MjA==} engines: {node: '>=12'} @@ -1858,6 +2161,12 @@ packages: cpu: [arm64] os: [darwin] + '@esbuild/darwin-arm64@0.23.0': + resolution: {integrity: sha512-YLntie/IdS31H54Ogdn+v50NuoWF5BDkEUFpiOChVa9UnKpftgwzZRrI4J132ETIi+D8n6xh9IviFV3eXdxfow==} + engines: {node: '>=18'} + cpu: [arm64] + os: [darwin] + '@esbuild/darwin-x64@0.17.19': resolution: {integrity: sha512-IJM4JJsLhRYr9xdtLytPLSH9k/oxR3boaUIYiHkAawtwNOXKE8KoU8tMvryogdcT8AU+Bflmh81Xn6Q0vTZbQw==} engines: {node: '>=12'} @@ -1870,6 +2179,12 @@ packages: cpu: [x64] os: [darwin] + '@esbuild/darwin-x64@0.19.12': + resolution: {integrity: sha512-hKoVkKzFiToTgn+41qGhsUJXFlIjxI/jSYeZf3ugemDYZldIXIxhvwN6erJGlX4t5h417iFuheZ7l+YVn05N3A==} + engines: {node: '>=12'} + cpu: [x64] + os: [darwin] + '@esbuild/darwin-x64@0.20.2': resolution: {integrity: sha512-tBcXp9KNphnNH0dfhv8KYkZhjc+H3XBkF5DKtswJblV7KlT9EI2+jeA8DgBjp908WEuYll6pF+UStUCfEpdysA==} engines: {node: '>=12'} @@ -1882,6 +2197,12 @@ packages: cpu: [x64] os: [darwin] + '@esbuild/darwin-x64@0.23.0': + resolution: {integrity: sha512-IMQ6eme4AfznElesHUPDZ+teuGwoRmVuuixu7sv92ZkdQcPbsNHzutd+rAfaBKo8YK3IrBEi9SLLKWJdEvJniQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [darwin] + '@esbuild/freebsd-arm64@0.17.19': resolution: {integrity: sha512-pBwbc7DufluUeGdjSU5Si+P3SoMF5DQ/F/UmTSb8HXO80ZEAJmrykPyzo1IfNbAoaqw48YRpv8shwd1NoI0jcQ==} engines: {node: '>=12'} @@ -1894,6 +2215,12 @@ packages: cpu: [arm64] os: [freebsd] + '@esbuild/freebsd-arm64@0.19.12': + resolution: {integrity: sha512-4aRvFIXmwAcDBw9AueDQ2YnGmz5L6obe5kmPT8Vd+/+x/JMVKCgdcRwH6APrbpNXsPz+K653Qg8HB/oXvXVukA==} + engines: {node: '>=12'} + cpu: [arm64] + os: [freebsd] + '@esbuild/freebsd-arm64@0.20.2': resolution: {integrity: sha512-d3qI41G4SuLiCGCFGUrKsSeTXyWG6yem1KcGZVS+3FYlYhtNoNgYrWcvkOoaqMhwXSMrZRl69ArHsGJ9mYdbbw==} engines: {node: '>=12'} @@ -1906,6 +2233,12 @@ packages: cpu: [arm64] os: [freebsd] + '@esbuild/freebsd-arm64@0.23.0': + resolution: {integrity: sha512-0muYWCng5vqaxobq6LB3YNtevDFSAZGlgtLoAc81PjUfiFz36n4KMpwhtAd4he8ToSI3TGyuhyx5xmiWNYZFyw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [freebsd] + '@esbuild/freebsd-x64@0.17.19': resolution: {integrity: sha512-4lu+n8Wk0XlajEhbEffdy2xy53dpR06SlzvhGByyg36qJw6Kpfk7cp45DR/62aPH9mtJRmIyrXAS5UWBrJT6TQ==} engines: {node: '>=12'} @@ -1918,6 +2251,12 @@ packages: cpu: [x64] os: [freebsd] + '@esbuild/freebsd-x64@0.19.12': + resolution: {integrity: sha512-EYoXZ4d8xtBoVN7CEwWY2IN4ho76xjYXqSXMNccFSx2lgqOG/1TBPW0yPx1bJZk94qu3tX0fycJeeQsKovA8gg==} + engines: {node: '>=12'} + cpu: [x64] + os: [freebsd] + '@esbuild/freebsd-x64@0.20.2': resolution: {integrity: sha512-d+DipyvHRuqEeM5zDivKV1KuXn9WeRX6vqSqIDgwIfPQtwMP4jaDsQsDncjTDDsExT4lR/91OLjRo8bmC1e+Cw==} engines: {node: '>=12'} @@ -1930,6 +2269,12 @@ packages: cpu: [x64] os: [freebsd] + '@esbuild/freebsd-x64@0.23.0': + resolution: {integrity: sha512-XKDVu8IsD0/q3foBzsXGt/KjD/yTKBCIwOHE1XwiXmrRwrX6Hbnd5Eqn/WvDekddK21tfszBSrE/WMaZh+1buQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [freebsd] + '@esbuild/linux-arm64@0.17.19': resolution: {integrity: sha512-ct1Tg3WGwd3P+oZYqic+YZF4snNl2bsnMKRkb3ozHmnM0dGWuxcPTTntAF6bOP0Sp4x0PjSF+4uHQ1xvxfRKqg==} engines: {node: '>=12'} @@ -1942,6 +2287,12 @@ packages: cpu: [arm64] os: [linux] + '@esbuild/linux-arm64@0.19.12': + resolution: {integrity: sha512-EoTjyYyLuVPfdPLsGVVVC8a0p1BFFvtpQDB/YLEhaXyf/5bczaGeN15QkR+O4S5LeJ92Tqotve7i1jn35qwvdA==} + engines: {node: '>=12'} + cpu: [arm64] + os: [linux] + '@esbuild/linux-arm64@0.20.2': resolution: {integrity: sha512-9pb6rBjGvTFNira2FLIWqDk/uaf42sSyLE8j1rnUpuzsODBq7FvpwHYZxQ/It/8b+QOS1RYfqgGFNLRI+qlq2A==} engines: {node: '>=12'} @@ -1954,6 +2305,12 @@ packages: cpu: [arm64] os: [linux] + '@esbuild/linux-arm64@0.23.0': + resolution: {integrity: sha512-j1t5iG8jE7BhonbsEg5d9qOYcVZv/Rv6tghaXM/Ug9xahM0nX/H2gfu6X6z11QRTMT6+aywOMA8TDkhPo8aCGw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [linux] + '@esbuild/linux-arm@0.17.19': resolution: {integrity: sha512-cdmT3KxjlOQ/gZ2cjfrQOtmhG4HJs6hhvm3mWSRDPtZ/lP5oe8FWceS10JaSJC13GBd4eH/haHnqf7hhGNLerA==} engines: {node: '>=12'} @@ -1966,6 +2323,12 @@ packages: cpu: [arm] os: [linux] + '@esbuild/linux-arm@0.19.12': + resolution: {integrity: sha512-J5jPms//KhSNv+LO1S1TX1UWp1ucM6N6XuL6ITdKWElCu8wXP72l9MM0zDTzzeikVyqFE6U8YAV9/tFyj0ti+w==} + engines: {node: '>=12'} + cpu: [arm] + os: [linux] + '@esbuild/linux-arm@0.20.2': resolution: {integrity: sha512-VhLPeR8HTMPccbuWWcEUD1Az68TqaTYyj6nfE4QByZIQEQVWBB8vup8PpR7y1QHL3CpcF6xd5WVBU/+SBEvGTg==} engines: {node: '>=12'} @@ -1978,6 +2341,12 @@ packages: cpu: [arm] os: [linux] + '@esbuild/linux-arm@0.23.0': + resolution: {integrity: sha512-SEELSTEtOFu5LPykzA395Mc+54RMg1EUgXP+iw2SJ72+ooMwVsgfuwXo5Fn0wXNgWZsTVHwY2cg4Vi/bOD88qw==} + engines: {node: '>=18'} + cpu: [arm] + os: [linux] + '@esbuild/linux-ia32@0.17.19': resolution: {integrity: sha512-w4IRhSy1VbsNxHRQpeGCHEmibqdTUx61Vc38APcsRbuVgK0OPEnQ0YD39Brymn96mOx48Y2laBQGqgZ0j9w6SQ==} engines: {node: '>=12'} @@ -1990,6 +2359,12 @@ packages: cpu: [ia32] os: [linux] + '@esbuild/linux-ia32@0.19.12': + resolution: {integrity: sha512-Thsa42rrP1+UIGaWz47uydHSBOgTUnwBwNq59khgIwktK6x60Hivfbux9iNR0eHCHzOLjLMLfUMLCypBkZXMHA==} + engines: {node: '>=12'} + cpu: [ia32] + os: [linux] + '@esbuild/linux-ia32@0.20.2': resolution: {integrity: sha512-o10utieEkNPFDZFQm9CoP7Tvb33UutoJqg3qKf1PWVeeJhJw0Q347PxMvBgVVFgouYLGIhFYG0UGdBumROyiig==} engines: {node: '>=12'} @@ -2002,6 +2377,12 @@ packages: cpu: [ia32] os: [linux] + '@esbuild/linux-ia32@0.23.0': + resolution: {integrity: sha512-P7O5Tkh2NbgIm2R6x1zGJJsnacDzTFcRWZyTTMgFdVit6E98LTxO+v8LCCLWRvPrjdzXHx9FEOA8oAZPyApWUA==} + engines: {node: '>=18'} + cpu: [ia32] + os: [linux] + '@esbuild/linux-loong64@0.14.54': resolution: {integrity: sha512-bZBrLAIX1kpWelV0XemxBZllyRmM6vgFQQG2GdNb+r3Fkp0FOh1NJSvekXDs7jq70k4euu1cryLMfU+mTXlEpw==} engines: {node: '>=12'} @@ -2020,6 +2401,12 @@ packages: cpu: [loong64] os: [linux] + '@esbuild/linux-loong64@0.19.12': + resolution: {integrity: sha512-LiXdXA0s3IqRRjm6rV6XaWATScKAXjI4R4LoDlvO7+yQqFdlr1Bax62sRwkVvRIrwXxvtYEHHI4dm50jAXkuAA==} + engines: {node: '>=12'} + cpu: [loong64] + os: [linux] + '@esbuild/linux-loong64@0.20.2': resolution: {integrity: sha512-PR7sp6R/UC4CFVomVINKJ80pMFlfDfMQMYynX7t1tNTeivQ6XdX5r2XovMmha/VjR1YN/HgHWsVcTRIMkymrgQ==} engines: {node: '>=12'} @@ -2032,6 +2419,12 @@ packages: cpu: [loong64] os: [linux] + '@esbuild/linux-loong64@0.23.0': + resolution: {integrity: sha512-InQwepswq6urikQiIC/kkx412fqUZudBO4SYKu0N+tGhXRWUqAx+Q+341tFV6QdBifpjYgUndV1hhMq3WeJi7A==} + engines: {node: '>=18'} + cpu: [loong64] + os: [linux] + '@esbuild/linux-mips64el@0.17.19': resolution: {integrity: sha512-LKJltc4LVdMKHsrFe4MGNPp0hqDFA1Wpt3jE1gEyM3nKUvOiO//9PheZZHfYRfYl6AwdTH4aTcXSqBerX0ml4A==} engines: {node: '>=12'} @@ -2044,6 +2437,12 @@ packages: cpu: [mips64el] os: [linux] + '@esbuild/linux-mips64el@0.19.12': + resolution: {integrity: sha512-fEnAuj5VGTanfJ07ff0gOA6IPsvrVHLVb6Lyd1g2/ed67oU1eFzL0r9WL7ZzscD+/N6i3dWumGE1Un4f7Amf+w==} + engines: {node: '>=12'} + cpu: [mips64el] + os: [linux] + '@esbuild/linux-mips64el@0.20.2': resolution: {integrity: sha512-4BlTqeutE/KnOiTG5Y6Sb/Hw6hsBOZapOVF6njAESHInhlQAghVVZL1ZpIctBOoTFbQyGW+LsVYZ8lSSB3wkjA==} engines: {node: '>=12'} @@ -2056,6 +2455,12 @@ packages: cpu: [mips64el] os: [linux] + '@esbuild/linux-mips64el@0.23.0': + resolution: {integrity: sha512-J9rflLtqdYrxHv2FqXE2i1ELgNjT+JFURt/uDMoPQLcjWQA5wDKgQA4t/dTqGa88ZVECKaD0TctwsUfHbVoi4w==} + engines: {node: '>=18'} + cpu: [mips64el] + os: [linux] + '@esbuild/linux-ppc64@0.17.19': resolution: {integrity: sha512-/c/DGybs95WXNS8y3Ti/ytqETiW7EU44MEKuCAcpPto3YjQbyK3IQVKfF6nbghD7EcLUGl0NbiL5Rt5DMhn5tg==} engines: {node: '>=12'} @@ -2068,6 +2473,12 @@ packages: cpu: [ppc64] os: [linux] + '@esbuild/linux-ppc64@0.19.12': + resolution: {integrity: sha512-nYJA2/QPimDQOh1rKWedNOe3Gfc8PabU7HT3iXWtNUbRzXS9+vgB0Fjaqr//XNbd82mCxHzik2qotuI89cfixg==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [linux] + '@esbuild/linux-ppc64@0.20.2': resolution: {integrity: sha512-rD3KsaDprDcfajSKdn25ooz5J5/fWBylaaXkuotBDGnMnDP1Uv5DLAN/45qfnf3JDYyJv/ytGHQaziHUdyzaAg==} engines: {node: '>=12'} @@ -2080,6 +2491,12 @@ packages: cpu: [ppc64] os: [linux] + '@esbuild/linux-ppc64@0.23.0': + resolution: {integrity: sha512-cShCXtEOVc5GxU0fM+dsFD10qZ5UpcQ8AM22bYj0u/yaAykWnqXJDpd77ublcX6vdDsWLuweeuSNZk4yUxZwtw==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [linux] + '@esbuild/linux-riscv64@0.17.19': resolution: {integrity: sha512-FC3nUAWhvFoutlhAkgHf8f5HwFWUL6bYdvLc/TTuxKlvLi3+pPzdZiFKSWz/PF30TB1K19SuCxDTI5KcqASJqA==} engines: {node: '>=12'} @@ -2092,6 +2509,12 @@ packages: cpu: [riscv64] os: [linux] + '@esbuild/linux-riscv64@0.19.12': + resolution: {integrity: sha512-2MueBrlPQCw5dVJJpQdUYgeqIzDQgw3QtiAHUC4RBz9FXPrskyyU3VI1hw7C0BSKB9OduwSJ79FTCqtGMWqJHg==} + engines: {node: '>=12'} + cpu: [riscv64] + os: [linux] + '@esbuild/linux-riscv64@0.20.2': resolution: {integrity: sha512-snwmBKacKmwTMmhLlz/3aH1Q9T8v45bKYGE3j26TsaOVtjIag4wLfWSiZykXzXuE1kbCE+zJRmwp+ZbIHinnVg==} engines: {node: '>=12'} @@ -2104,6 +2527,12 @@ packages: cpu: [riscv64] os: [linux] + '@esbuild/linux-riscv64@0.23.0': + resolution: {integrity: sha512-HEtaN7Y5UB4tZPeQmgz/UhzoEyYftbMXrBCUjINGjh3uil+rB/QzzpMshz3cNUxqXN7Vr93zzVtpIDL99t9aRw==} + engines: {node: '>=18'} + cpu: [riscv64] + os: [linux] + '@esbuild/linux-s390x@0.17.19': resolution: {integrity: sha512-IbFsFbxMWLuKEbH+7sTkKzL6NJmG2vRyy6K7JJo55w+8xDk7RElYn6xvXtDW8HCfoKBFK69f3pgBJSUSQPr+4Q==} engines: {node: '>=12'} @@ -2116,6 +2545,12 @@ packages: cpu: [s390x] os: [linux] + '@esbuild/linux-s390x@0.19.12': + resolution: {integrity: sha512-+Pil1Nv3Umes4m3AZKqA2anfhJiVmNCYkPchwFJNEJN5QxmTs1uzyy4TvmDrCRNT2ApwSari7ZIgrPeUx4UZDg==} + engines: {node: '>=12'} + cpu: [s390x] + os: [linux] + '@esbuild/linux-s390x@0.20.2': resolution: {integrity: sha512-wcWISOobRWNm3cezm5HOZcYz1sKoHLd8VL1dl309DiixxVFoFe/o8HnwuIwn6sXre88Nwj+VwZUvJf4AFxkyrQ==} engines: {node: '>=12'} @@ -2128,6 +2563,12 @@ packages: cpu: [s390x] os: [linux] + '@esbuild/linux-s390x@0.23.0': + resolution: {integrity: sha512-WDi3+NVAuyjg/Wxi+o5KPqRbZY0QhI9TjrEEm+8dmpY9Xir8+HE/HNx2JoLckhKbFopW0RdO2D72w8trZOV+Wg==} + engines: {node: '>=18'} + cpu: [s390x] + os: [linux] + '@esbuild/linux-x64@0.17.19': resolution: {integrity: sha512-68ngA9lg2H6zkZcyp22tsVt38mlhWde8l3eJLWkyLrp4HwMUr3c1s/M2t7+kHIhvMjglIBrFpncX1SzMckomGw==} engines: {node: '>=12'} @@ -2140,6 +2581,12 @@ packages: cpu: [x64] os: [linux] + '@esbuild/linux-x64@0.19.12': + resolution: {integrity: sha512-B71g1QpxfwBvNrfyJdVDexenDIt1CiDN1TIXLbhOw0KhJzE78KIFGX6OJ9MrtC0oOqMWf+0xop4qEU8JrJTwCg==} + engines: {node: '>=12'} + cpu: [x64] + os: [linux] + '@esbuild/linux-x64@0.20.2': resolution: {integrity: sha512-1MdwI6OOTsfQfek8sLwgyjOXAu+wKhLEoaOLTjbijk6E2WONYpH9ZU2mNtR+lZ2B4uwr+usqGuVfFT9tMtGvGw==} engines: {node: '>=12'} @@ -2152,6 +2599,12 @@ packages: cpu: [x64] os: [linux] + '@esbuild/linux-x64@0.23.0': + resolution: {integrity: sha512-a3pMQhUEJkITgAw6e0bWA+F+vFtCciMjW/LPtoj99MhVt+Mfb6bbL9hu2wmTZgNd994qTAEw+U/r6k3qHWWaOQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [linux] + '@esbuild/netbsd-x64@0.17.19': resolution: {integrity: sha512-CwFq42rXCR8TYIjIfpXCbRX0rp1jo6cPIUPSaWwzbVI4aOfX96OXY8M6KNmtPcg7QjYeDmN+DD0Wp3LaBOLf4Q==} engines: {node: '>=12'} @@ -2164,6 +2617,12 @@ packages: cpu: [x64] os: [netbsd] + '@esbuild/netbsd-x64@0.19.12': + resolution: {integrity: sha512-3ltjQ7n1owJgFbuC61Oj++XhtzmymoCihNFgT84UAmJnxJfm4sYCiSLTXZtE00VWYpPMYc+ZQmB6xbSdVh0JWA==} + engines: {node: '>=12'} + cpu: [x64] + os: [netbsd] + '@esbuild/netbsd-x64@0.20.2': resolution: {integrity: sha512-K8/DhBxcVQkzYc43yJXDSyjlFeHQJBiowJ0uVL6Tor3jGQfSGHNNJcWxNbOI8v5k82prYqzPuwkzHt3J1T1iZQ==} engines: {node: '>=12'} @@ -2176,14 +2635,32 @@ packages: cpu: [x64] os: [netbsd] + '@esbuild/netbsd-x64@0.23.0': + resolution: {integrity: sha512-cRK+YDem7lFTs2Q5nEv/HHc4LnrfBCbH5+JHu6wm2eP+d8OZNoSMYgPZJq78vqQ9g+9+nMuIsAO7skzphRXHyw==} + engines: {node: '>=18'} + cpu: [x64] + os: [netbsd] + + '@esbuild/openbsd-arm64@0.23.0': + resolution: {integrity: sha512-suXjq53gERueVWu0OKxzWqk7NxiUWSUlrxoZK7usiF50C6ipColGR5qie2496iKGYNLhDZkPxBI3erbnYkU0rQ==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openbsd] + '@esbuild/openbsd-x64@0.17.19': resolution: {integrity: sha512-cnq5brJYrSZ2CF6c35eCmviIN3k3RczmHz8eYaVlNasVqsNY+JKohZU5MKmaOI+KkllCdzOKKdPs762VCPC20g==} engines: {node: '>=12'} cpu: [x64] os: [openbsd] - '@esbuild/openbsd-x64@0.18.20': - resolution: {integrity: sha512-e5e4YSsuQfX4cxcygw/UCPIEP6wbIL+se3sxPdCiMbFLBWu0eiZOJ7WoD+ptCLrmjZBK1Wk7I6D/I3NglUGOxg==} + '@esbuild/openbsd-x64@0.18.20': + resolution: {integrity: sha512-e5e4YSsuQfX4cxcygw/UCPIEP6wbIL+se3sxPdCiMbFLBWu0eiZOJ7WoD+ptCLrmjZBK1Wk7I6D/I3NglUGOxg==} + engines: {node: '>=12'} + cpu: [x64] + os: [openbsd] + + '@esbuild/openbsd-x64@0.19.12': + resolution: {integrity: sha512-RbrfTB9SWsr0kWmb9srfF+L933uMDdu9BIzdA7os2t0TXhCRjrQyCeOt6wVxr79CKD4c+p+YhCj31HBkYcXebw==} engines: {node: '>=12'} cpu: [x64] os: [openbsd] @@ -2200,6 +2677,12 @@ packages: cpu: [x64] os: [openbsd] + '@esbuild/openbsd-x64@0.23.0': + resolution: {integrity: sha512-6p3nHpby0DM/v15IFKMjAaayFhqnXV52aEmv1whZHX56pdkK+MEaLoQWj+H42ssFarP1PcomVhbsR4pkz09qBg==} + engines: {node: '>=18'} + cpu: [x64] + os: [openbsd] + '@esbuild/sunos-x64@0.17.19': resolution: {integrity: sha512-vCRT7yP3zX+bKWFeP/zdS6SqdWB8OIpaRq/mbXQxTGHnIxspRtigpkUcDMlSCOejlHowLqII7K2JKevwyRP2rg==} engines: {node: '>=12'} @@ -2212,6 +2695,12 @@ packages: cpu: [x64] os: [sunos] + '@esbuild/sunos-x64@0.19.12': + resolution: {integrity: sha512-HKjJwRrW8uWtCQnQOz9qcU3mUZhTUQvi56Q8DPTLLB+DawoiQdjsYq+j+D3s9I8VFtDr+F9CjgXKKC4ss89IeA==} + engines: {node: '>=12'} + cpu: [x64] + os: [sunos] + '@esbuild/sunos-x64@0.20.2': resolution: {integrity: sha512-2UyFtRC6cXLyejf/YEld4Hajo7UHILetzE1vsRcGL3earZEW77JxrFjH4Ez2qaTiEfMgAXxfAZCm1fvM/G/o8w==} engines: {node: '>=12'} @@ -2224,6 +2713,12 @@ packages: cpu: [x64] os: [sunos] + '@esbuild/sunos-x64@0.23.0': + resolution: {integrity: sha512-BFelBGfrBwk6LVrmFzCq1u1dZbG4zy/Kp93w2+y83Q5UGYF1d8sCzeLI9NXjKyujjBBniQa8R8PzLFAUrSM9OA==} + engines: {node: '>=18'} + cpu: [x64] + os: [sunos] + '@esbuild/win32-arm64@0.17.19': resolution: {integrity: sha512-yYx+8jwowUstVdorcMdNlzklLYhPxjniHWFKgRqH7IFlUEa0Umu3KuYplf1HUZZ422e3NU9F4LGb+4O0Kdcaag==} engines: {node: '>=12'} @@ -2236,6 +2731,12 @@ packages: cpu: [arm64] os: [win32] + '@esbuild/win32-arm64@0.19.12': + resolution: {integrity: sha512-URgtR1dJnmGvX864pn1B2YUYNzjmXkuJOIqG2HdU62MVS4EHpU2946OZoTMnRUHklGtJdJZ33QfzdjGACXhn1A==} + engines: {node: '>=12'} + cpu: [arm64] + os: [win32] + '@esbuild/win32-arm64@0.20.2': resolution: {integrity: sha512-GRibxoawM9ZCnDxnP3usoUDO9vUkpAxIIZ6GQI+IlVmr5kP3zUq+l17xELTHMWTWzjxa2guPNyrpq1GWmPvcGQ==} engines: {node: '>=12'} @@ -2248,6 +2749,12 @@ packages: cpu: [arm64] os: [win32] + '@esbuild/win32-arm64@0.23.0': + resolution: {integrity: sha512-lY6AC8p4Cnb7xYHuIxQ6iYPe6MfO2CC43XXKo9nBXDb35krYt7KGhQnOkRGar5psxYkircpCqfbNDB4uJbS2jQ==} + engines: {node: '>=18'} + cpu: [arm64] + os: [win32] + '@esbuild/win32-ia32@0.17.19': resolution: {integrity: sha512-eggDKanJszUtCdlVs0RB+h35wNlb5v4TWEkq4vZcmVt5u/HiDZrTXe2bWFQUez3RgNHwx/x4sk5++4NSSicKkw==} engines: {node: '>=12'} @@ -2260,6 +2767,12 @@ packages: cpu: [ia32] os: [win32] + '@esbuild/win32-ia32@0.19.12': + resolution: {integrity: sha512-+ZOE6pUkMOJfmxmBZElNOx72NKpIa/HFOMGzu8fqzQJ5kgf6aTGrcJaFsNiVMH4JKpMipyK+7k0n2UXN7a8YKQ==} + engines: {node: '>=12'} + cpu: [ia32] + os: [win32] + '@esbuild/win32-ia32@0.20.2': resolution: {integrity: sha512-HfLOfn9YWmkSKRQqovpnITazdtquEW8/SoHW7pWpuEeguaZI4QnCRW6b+oZTztdBnZOS2hqJ6im/D5cPzBTTlQ==} engines: {node: '>=12'} @@ -2272,6 +2785,12 @@ packages: cpu: [ia32] os: [win32] + '@esbuild/win32-ia32@0.23.0': + resolution: {integrity: sha512-7L1bHlOTcO4ByvI7OXVI5pNN6HSu6pUQq9yodga8izeuB1KcT2UkHaH6118QJwopExPn0rMHIseCTx1CRo/uNA==} + engines: {node: '>=18'} + cpu: [ia32] + os: [win32] + '@esbuild/win32-x64@0.17.19': resolution: {integrity: sha512-lAhycmKnVOuRYNtRtatQR1LPQf2oYCkRGkSFnseDAKPl8lu5SOsK/e1sXe5a0Pc5kHIHe6P2I/ilntNv2xf3cA==} engines: {node: '>=12'} @@ -2284,6 +2803,12 @@ packages: cpu: [x64] os: [win32] + '@esbuild/win32-x64@0.19.12': + resolution: {integrity: sha512-T1QyPSDCyMXaO3pzBkF96E8xMkiRYbUEZADd29SyPGabqxMViNoii+NcK7eWJAEoU6RZyEm5lVSIjTmcdoB9HA==} + engines: {node: '>=12'} + cpu: [x64] + os: [win32] + '@esbuild/win32-x64@0.20.2': resolution: {integrity: sha512-N49X4lJX27+l9jbLKSqZ6bKNjzQvHaT8IIFUy+YIqmXQdjYCToGWwOItDrfby14c78aDd5NHQl29xingXfCdLQ==} engines: {node: '>=12'} @@ -2296,12 +2821,22 @@ packages: cpu: [x64] os: [win32] + '@esbuild/win32-x64@0.23.0': + resolution: {integrity: sha512-Arm+WgUFLUATuoxCJcahGuk6Yj9Pzxd6l11Zb/2aAuv5kWWvvfhLFo2fni4uSK5vzlUdCGZ/BdV5tH8klj8p8g==} + engines: {node: '>=18'} + cpu: [x64] + os: [win32] + '@eslint-community/eslint-utils@4.4.0': resolution: {integrity: sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} peerDependencies: eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 + '@eslint-community/regexpp@4.11.0': + resolution: {integrity: sha512-G/M/tIiMrTAxEWRfLfQJMmGNX28IxBg4PBz8XqQhqUHLFI6TL2htpIB1iQCj144V5ee/JaKyT9/WZ0MGZWfA7A==} + engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} + '@eslint-community/regexpp@4.9.0': resolution: {integrity: sha512-zJmuCWj2VLBt4c25CfBIbMZLGLyhkvs7LznyVX5HfpzeocThgIj5XQK4L+g3U36mMcx8bPMhGyPpwCATamC4jQ==} engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} @@ -2314,6 +2849,10 @@ packages: resolution: {integrity: sha512-yZzuIG+jnVu6hNSzFEN07e8BxF3uAzYtQb6uDkaYZLo6oYZDCq454c5kB8zxnzfCYyP4MIuyBn10L0DqwujTmA==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + '@eslint/eslintrc@2.1.4': + resolution: {integrity: sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + '@eslint/eslintrc@3.1.0': resolution: {integrity: sha512-4Bfj15dVJdoy3RfZmmo86RK1Fwzn6SstsvK9JS+BaVKqC6QQQQyXekNaC+g+LKNgkQ+2VhGAzm6hO40AhMR3zQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -2326,6 +2865,13 @@ packages: resolution: {integrity: sha512-Kn7K8dx/5U6+cT1yEhpX1w4PCSg0M+XyRILPgvwcEBjerFWCwQj5sbr3/VmxqV0JGHCBCzyd6LxypEuehypY1w==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + '@eslint/js@8.57.0': + resolution: {integrity: sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + + '@ewoudenberg/difflib@0.1.0': + resolution: {integrity: sha512-OU5P5mJyD3OoWYMWY+yIgwvgNS9cFAU10f+DDuvtogcWQOoJIsQ4Hy2McSfUfhKjq8L0FuWVb4Rt7kgA+XK86A==} + '@expo/bunyan@4.0.0': resolution: {integrity: sha512-Ydf4LidRB/EBI+YrB+cVLqIseiRfjUI/AeHBgjGMtq3GroraDu81OV7zqophRgupngoL3iS3JUMDMnxO7g39qA==} engines: {'0': node >=0.10.0} @@ -2415,6 +2961,16 @@ packages: '@hapi/topo@5.1.0': resolution: {integrity: sha512-foQZKJig7Ob0BMAYBfcJk8d77QtOe7Wo4ox7ff1lQYoNNAb6jwcY1ncdoy2e9wQZzvNy7ODZCYJkK8kzmcAnAg==} + '@hono/node-server@1.12.0': + resolution: {integrity: sha512-e6oHjNiErRxsZRZBmc2KucuvY3btlO/XPncIpP2X75bRdTilF9GLjm3NHvKKunpJbbJJj31/FoPTksTf8djAVw==} + engines: {node: '>=18.14.1'} + + '@hono/zod-validator@0.2.2': + resolution: {integrity: sha512-dSDxaPV70Py8wuIU2QNpoVEIOSzSXZ/6/B/h4xA7eOMz7+AarKTSGV8E6QwrdcCbBLkpqfJ4Q2TmBO0eP1tCBQ==} + peerDependencies: + hono: '>=3.9.0' + zod: ^3.19.1 + '@humanwhocodes/config-array@0.11.11': resolution: {integrity: sha512-N2brEuAadi0CcdeMXUkhbZB84eskAc8MEX1By6qEchoVywSgXPIjou4rYsl0V3Hj0ZnuGycGCjdNgockbzeWNA==} engines: {node: '>=10.10.0'} @@ -2423,6 +2979,11 @@ packages: resolution: {integrity: sha512-JSBDMiDKSzQVngfRjOdFXgFfklaXI4K9nLF49Auh21lmBWRLIK3+xTErTWD4KU54pb6coM6ESE7Awz/FNU3zgQ==} engines: {node: '>=10.10.0'} + '@humanwhocodes/config-array@0.11.14': + resolution: {integrity: sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==} + engines: {node: '>=10.10.0'} + deprecated: Use @eslint/config-array instead + '@humanwhocodes/module-importer@1.0.1': resolution: {integrity: sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==} engines: {node: '>=12.22'} @@ -2433,6 +2994,10 @@ packages: '@humanwhocodes/object-schema@2.0.1': resolution: {integrity: sha512-dvuCeX5fC9dXgJn9t+X5atfmgQAzUOWqS1254Gh0m6i8wKd10ebXkfNKiRK+1GWi/yTvvLDHpoxLr0xxxeslWw==} + '@humanwhocodes/object-schema@2.0.3': + resolution: {integrity: sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==} + deprecated: Use @eslint/object-schema instead + '@iarna/toml@2.2.5': resolution: {integrity: sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg==} @@ -2513,23 +3078,33 @@ packages: '@jridgewell/trace-mapping@0.3.9': resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} + '@libsql/client@0.4.3': + resolution: {integrity: sha512-AUYKnSPqAsFBVWBvmtrb4dG3pQlvTKT92eztAest9wQU2iJkabH8WzHLDb3dKFWKql7/kiCqvBQUVpozDwhekQ==} + '@libsql/client@0.5.6': resolution: {integrity: sha512-UBjmDoxz75Z2sHdP+ETCROpeLA/77VMesiff8R4UWK1rnaWbh6/YoCLDILMJL3Rh0udQeKxjL8MjXthqohax+g==} - '@libsql/client@0.6.0': - resolution: {integrity: sha512-qhQzTG/y2IEVbL3+9PULDvlQFWJ/RnjFXECr/Nc3nRngGiiMysDaOV5VUzYk7DulUX98EA4wi+z3FspKrUplUA==} + '@libsql/core@0.4.3': + resolution: {integrity: sha512-r28iYBtaLBW9RRgXPFh6cGCsVI/rwRlOzSOpAu/1PVTm6EJ3t233pUf97jETVHU0vjdr1d8VvV6fKAvJkokqCw==} '@libsql/core@0.5.6': resolution: {integrity: sha512-3vicUAydq6jPth410n4AsHHm1n2psTwvkSf94nfJlSXutGSZsl0updn2N/mJBgqUHkbuFoWZtlMifF0SwBj1xQ==} - '@libsql/core@0.6.0': - resolution: {integrity: sha512-affAB8vSqQwqI9NBDJ5uJCVaHoOAS2pOpbv1kWConh1SBbmJBnHHd4KG73RAJ2sgd2+NbT9WA+XJBqxgp28YSw==} + '@libsql/darwin-arm64@0.2.0': + resolution: {integrity: sha512-+qyT2W/n5CFH1YZWv2mxW4Fsoo4dX9Z9M/nvbQqZ7H84J8hVegvVAsIGYzcK8xAeMEcpU5yGKB1Y9NoDY4hOSQ==} + cpu: [arm64] + os: [darwin] '@libsql/darwin-arm64@0.3.18': resolution: {integrity: sha512-Zt49dt+cwhPCkuoWgvjbQd4ckNfCJR5xzIAyhgHl3CBZqZaEuaXTOGKLNQT7bnFRPuQcdLt5PBT1cenKu2N6pA==} cpu: [arm64] os: [darwin] + '@libsql/darwin-x64@0.2.0': + resolution: {integrity: sha512-hwmO2mF1n8oDHKFrUju6Jv+n9iFtTf5JUK+xlnIE3Td0ZwGC/O1R/Z/btZTd9nD+vsvakC8SJT7/Q6YlWIkhEw==} + cpu: [x64] + os: [darwin] + '@libsql/darwin-x64@0.3.18': resolution: {integrity: sha512-faq6HUGDaNaueeqPei5cypHaD/hhazUyfHo094CXiEeRZq6ZKtNl5PHdlr8jE/Uw8USNpVVQaLdnvSgKcpRPHw==} cpu: [x64] @@ -2538,38 +3113,57 @@ packages: '@libsql/hrana-client@0.5.6': resolution: {integrity: sha512-mjQoAmejZ1atG+M3YR2ZW+rg6ceBByH/S/h17ZoYZkqbWrvohFhXyz2LFxj++ARMoY9m6w3RJJIRdJdmnEUlFg==} - '@libsql/hrana-client@0.6.0': - resolution: {integrity: sha512-k+fqzdjqg3IvWfKmVJK5StsbjeTcyNAXFelUbXbGNz3yH1gEVT9mZ6kmhsIXP30ZSyVV0AE1Gi25p82mxC9hwg==} - '@libsql/isomorphic-fetch@0.1.12': resolution: {integrity: sha512-MRo4UcmjAGAa3ac56LoD5OE13m2p0lu0VEtZC2NZMcogM/jc5fU9YtMQ3qbPjFJ+u2BBjFZgMPkQaLS1dlMhpg==} - '@libsql/isomorphic-fetch@0.2.1': - resolution: {integrity: sha512-Sv07QP1Aw8A5OOrmKgRUBKe2fFhF2hpGJhtHe3d1aRnTESZCGkn//0zDycMKTGamVWb3oLYRroOsCV8Ukes9GA==} - '@libsql/isomorphic-ws@0.1.5': resolution: {integrity: sha512-DtLWIH29onUYR00i0GlQ3UdcTRC6EP4u9w/h9LxpUZJWRMARk6dQwZ6Jkd+QdwVpuAOrdxt18v0K2uIYR3fwFg==} + '@libsql/linux-arm64-gnu@0.2.0': + resolution: {integrity: sha512-1w2lPXIYtnBaK5t/Ej5E8x7lPiE+jP3KATI/W4yei5Z/ONJh7jQW5PJ7sYU95vTME3hWEM1FXN6kvzcpFAte7w==} + cpu: [arm64] + os: [linux] + '@libsql/linux-arm64-gnu@0.3.18': resolution: {integrity: sha512-5m9xtDAhoyLSV54tho9uQ2ZIDeJWc0vU3Xpe/VK4+6bpURISs23qNhXiCrZnnq3oV0hFlBfcIgQUIATmb6jD2A==} cpu: [arm64] os: [linux] + '@libsql/linux-arm64-musl@0.2.0': + resolution: {integrity: sha512-lkblBEJ7xuNiWNjP8DDq0rqoWccszfkUS7Efh5EjJ+GDWdCBVfh08mPofIZg0fZVLWQCY3j+VZCG1qZfATBizg==} + cpu: [arm64] + os: [linux] + '@libsql/linux-arm64-musl@0.3.18': resolution: {integrity: sha512-oYD5+oM2gPEalp+EoR5DVQBRtdGjLsocjsRbQs5O2m4WOBJKER7VUfDYZHsifLGZoBSc11Yo6s9IR9rjGWy20w==} cpu: [arm64] os: [linux] + '@libsql/linux-x64-gnu@0.2.0': + resolution: {integrity: sha512-+x/d289KeJydwOhhqSxKT+6MSQTCfLltzOpTzPccsvdt5fxg8CBi+gfvEJ4/XW23Sa+9bc7zodFP0i6MOlxX7w==} + cpu: [x64] + os: [linux] + '@libsql/linux-x64-gnu@0.3.18': resolution: {integrity: sha512-QDSSP60nS8KIldGE7H3bpEflQHiL1erwED6huoVJdmDFxsyDJX2CYdWUWW8Za0ZUOvUbnEWAOyMhp6j1dBbZqw==} cpu: [x64] os: [linux] + '@libsql/linux-x64-musl@0.2.0': + resolution: {integrity: sha512-5Xn0c5A6vKf9D1ASpgk7mef//FuY7t5Lktj/eiU4n3ryxG+6WTpqstTittJUgepVjcleLPYxIhQAYeYwTYH1IQ==} + cpu: [x64] + os: [linux] + '@libsql/linux-x64-musl@0.3.18': resolution: {integrity: sha512-5SXwTlaLCUPzxYyq+P0c7Ko7tcEjpd1X6RZKe1DuRFmJPg6f7j2+LrPEhMSIbqKcrl5ACUUAyoKmGZqNYwz23w==} cpu: [x64] os: [linux] + '@libsql/win32-x64-msvc@0.2.0': + resolution: {integrity: sha512-rpK+trBIpRST15m3cMYg5aPaX7kvCIottxY7jZPINkKAaScvfbn9yulU/iZUM9YtuK96Y1ZmvwyVIK/Y5DzoMQ==} + cpu: [x64] + os: [win32] + '@libsql/win32-x64-msvc@0.3.18': resolution: {integrity: sha512-9EEIHz+e8tTbx9TMkb8ByZnzxc0pYFirK1nSbqC6cFEST95fiY0NCfQ/zAzJxe90KckbjifX6BbO69eWIi3TAg==} cpu: [x64] @@ -2655,6 +3249,10 @@ packages: resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} engines: {node: '>=14'} + '@pkgr/core@0.1.1': + resolution: {integrity: sha512-cq8o4cWH0ibXh9VGi5P20Tu9XF/0fFXl9EUinr9QfTM7a7p0oTA4iJRCQWppXR1Pg8dSM0UCItCkPwsk9qWWYA==} + engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} + '@planetscale/database@1.18.0': resolution: {integrity: sha512-t2XdOfrVgcF7AW791FtdPS27NyNqcE1SpoXgk3HpziousvUMsJi4Q6NL3JyOBpsMOrvk94749o8yyonvX5quPw==} engines: {node: '>=16'} @@ -2843,81 +3441,161 @@ packages: cpu: [arm] os: [android] + '@rollup/rollup-android-arm-eabi@4.18.1': + resolution: {integrity: sha512-lncuC4aHicncmbORnx+dUaAgzee9cm/PbIqgWz1PpXuwc+sa1Ct83tnqUDy/GFKleLiN7ZIeytM6KJ4cAn1SxA==} + cpu: [arm] + os: [android] + '@rollup/rollup-android-arm64@4.18.0': resolution: {integrity: sha512-avCea0RAP03lTsDhEyfy+hpfr85KfyTctMADqHVhLAF3MlIkq83CP8UfAHUssgXTYd+6er6PaAhx/QGv4L1EiA==} cpu: [arm64] os: [android] + '@rollup/rollup-android-arm64@4.18.1': + resolution: {integrity: sha512-F/tkdw0WSs4ojqz5Ovrw5r9odqzFjb5LIgHdHZG65dFI1lWTWRVy32KDJLKRISHgJvqUeUhdIvy43fX41znyDg==} + cpu: [arm64] + os: [android] + '@rollup/rollup-darwin-arm64@4.18.0': resolution: {integrity: sha512-IWfdwU7KDSm07Ty0PuA/W2JYoZ4iTj3TUQjkVsO/6U+4I1jN5lcR71ZEvRh52sDOERdnNhhHU57UITXz5jC1/w==} cpu: [arm64] os: [darwin] + '@rollup/rollup-darwin-arm64@4.18.1': + resolution: {integrity: sha512-vk+ma8iC1ebje/ahpxpnrfVQJibTMyHdWpOGZ3JpQ7Mgn/3QNHmPq7YwjZbIE7km73dH5M1e6MRRsnEBW7v5CQ==} + cpu: [arm64] + os: [darwin] + '@rollup/rollup-darwin-x64@4.18.0': resolution: {integrity: sha512-n2LMsUz7Ynu7DoQrSQkBf8iNrjOGyPLrdSg802vk6XT3FtsgX6JbE8IHRvposskFm9SNxzkLYGSq9QdpLYpRNA==} cpu: [x64] os: [darwin] + '@rollup/rollup-darwin-x64@4.18.1': + resolution: {integrity: sha512-IgpzXKauRe1Tafcej9STjSSuG0Ghu/xGYH+qG6JwsAUxXrnkvNHcq/NL6nz1+jzvWAnQkuAJ4uIwGB48K9OCGA==} + cpu: [x64] + os: [darwin] + '@rollup/rollup-linux-arm-gnueabihf@4.18.0': resolution: {integrity: sha512-C/zbRYRXFjWvz9Z4haRxcTdnkPt1BtCkz+7RtBSuNmKzMzp3ZxdM28Mpccn6pt28/UWUCTXa+b0Mx1k3g6NOMA==} cpu: [arm] os: [linux] + '@rollup/rollup-linux-arm-gnueabihf@4.18.1': + resolution: {integrity: sha512-P9bSiAUnSSM7EmyRK+e5wgpqai86QOSv8BwvkGjLwYuOpaeomiZWifEos517CwbG+aZl1T4clSE1YqqH2JRs+g==} + cpu: [arm] + os: [linux] + '@rollup/rollup-linux-arm-musleabihf@4.18.0': resolution: {integrity: sha512-l3m9ewPgjQSXrUMHg93vt0hYCGnrMOcUpTz6FLtbwljo2HluS4zTXFy2571YQbisTnfTKPZ01u/ukJdQTLGh9A==} cpu: [arm] os: [linux] + '@rollup/rollup-linux-arm-musleabihf@4.18.1': + resolution: {integrity: sha512-5RnjpACoxtS+aWOI1dURKno11d7krfpGDEn19jI8BuWmSBbUC4ytIADfROM1FZrFhQPSoP+KEa3NlEScznBTyQ==} + cpu: [arm] + os: [linux] + '@rollup/rollup-linux-arm64-gnu@4.18.0': resolution: {integrity: sha512-rJ5D47d8WD7J+7STKdCUAgmQk49xuFrRi9pZkWoRD1UeSMakbcepWXPF8ycChBoAqs1pb2wzvbY6Q33WmN2ftw==} cpu: [arm64] os: [linux] + '@rollup/rollup-linux-arm64-gnu@4.18.1': + resolution: {integrity: sha512-8mwmGD668m8WaGbthrEYZ9CBmPug2QPGWxhJxh/vCgBjro5o96gL04WLlg5BA233OCWLqERy4YUzX3bJGXaJgQ==} + cpu: [arm64] + os: [linux] + '@rollup/rollup-linux-arm64-musl@4.18.0': resolution: {integrity: sha512-be6Yx37b24ZwxQ+wOQXXLZqpq4jTckJhtGlWGZs68TgdKXJgw54lUUoFYrg6Zs/kjzAQwEwYbp8JxZVzZLRepQ==} cpu: [arm64] os: [linux] + '@rollup/rollup-linux-arm64-musl@4.18.1': + resolution: {integrity: sha512-dJX9u4r4bqInMGOAQoGYdwDP8lQiisWb9et+T84l2WXk41yEej8v2iGKodmdKimT8cTAYt0jFb+UEBxnPkbXEQ==} + cpu: [arm64] + os: [linux] + '@rollup/rollup-linux-powerpc64le-gnu@4.18.0': resolution: {integrity: sha512-hNVMQK+qrA9Todu9+wqrXOHxFiD5YmdEi3paj6vP02Kx1hjd2LLYR2eaN7DsEshg09+9uzWi2W18MJDlG0cxJA==} cpu: [ppc64] os: [linux] + '@rollup/rollup-linux-powerpc64le-gnu@4.18.1': + resolution: {integrity: sha512-V72cXdTl4EI0x6FNmho4D502sy7ed+LuVW6Ym8aI6DRQ9hQZdp5sj0a2usYOlqvFBNKQnLQGwmYnujo2HvjCxQ==} + cpu: [ppc64] + os: [linux] + '@rollup/rollup-linux-riscv64-gnu@4.18.0': resolution: {integrity: sha512-ROCM7i+m1NfdrsmvwSzoxp9HFtmKGHEqu5NNDiZWQtXLA8S5HBCkVvKAxJ8U+CVctHwV2Gb5VUaK7UAkzhDjlg==} cpu: [riscv64] os: [linux] + '@rollup/rollup-linux-riscv64-gnu@4.18.1': + resolution: {integrity: sha512-f+pJih7sxoKmbjghrM2RkWo2WHUW8UbfxIQiWo5yeCaCM0TveMEuAzKJte4QskBp1TIinpnRcxkquY+4WuY/tg==} + cpu: [riscv64] + os: [linux] + '@rollup/rollup-linux-s390x-gnu@4.18.0': resolution: {integrity: sha512-0UyyRHyDN42QL+NbqevXIIUnKA47A+45WyasO+y2bGJ1mhQrfrtXUpTxCOrfxCR4esV3/RLYyucGVPiUsO8xjg==} cpu: [s390x] os: [linux] + '@rollup/rollup-linux-s390x-gnu@4.18.1': + resolution: {integrity: sha512-qb1hMMT3Fr/Qz1OKovCuUM11MUNLUuHeBC2DPPAWUYYUAOFWaxInaTwTQmc7Fl5La7DShTEpmYwgdt2hG+4TEg==} + cpu: [s390x] + os: [linux] + '@rollup/rollup-linux-x64-gnu@4.18.0': resolution: {integrity: sha512-xuglR2rBVHA5UsI8h8UbX4VJ470PtGCf5Vpswh7p2ukaqBGFTnsfzxUBetoWBWymHMxbIG0Cmx7Y9qDZzr648w==} cpu: [x64] os: [linux] + '@rollup/rollup-linux-x64-gnu@4.18.1': + resolution: {integrity: sha512-7O5u/p6oKUFYjRbZkL2FLbwsyoJAjyeXHCU3O4ndvzg2OFO2GinFPSJFGbiwFDaCFc+k7gs9CF243PwdPQFh5g==} + cpu: [x64] + os: [linux] + '@rollup/rollup-linux-x64-musl@4.18.0': resolution: {integrity: sha512-LKaqQL9osY/ir2geuLVvRRs+utWUNilzdE90TpyoX0eNqPzWjRm14oMEE+YLve4k/NAqCdPkGYDaDF5Sw+xBfg==} cpu: [x64] os: [linux] + '@rollup/rollup-linux-x64-musl@4.18.1': + resolution: {integrity: sha512-pDLkYITdYrH/9Cv/Vlj8HppDuLMDUBmgsM0+N+xLtFd18aXgM9Nyqupb/Uw+HeidhfYg2lD6CXvz6CjoVOaKjQ==} + cpu: [x64] + os: [linux] + '@rollup/rollup-win32-arm64-msvc@4.18.0': resolution: {integrity: sha512-7J6TkZQFGo9qBKH0pk2cEVSRhJbL6MtfWxth7Y5YmZs57Pi+4x6c2dStAUvaQkHQLnEQv1jzBUW43GvZW8OFqA==} cpu: [arm64] os: [win32] + '@rollup/rollup-win32-arm64-msvc@4.18.1': + resolution: {integrity: sha512-W2ZNI323O/8pJdBGil1oCauuCzmVd9lDmWBBqxYZcOqWD6aWqJtVBQ1dFrF4dYpZPks6F+xCZHfzG5hYlSHZ6g==} + cpu: [arm64] + os: [win32] + '@rollup/rollup-win32-ia32-msvc@4.18.0': resolution: {integrity: sha512-Txjh+IxBPbkUB9+SXZMpv+b/vnTEtFyfWZgJ6iyCmt2tdx0OF5WhFowLmnh8ENGNpfUlUZkdI//4IEmhwPieNg==} cpu: [ia32] os: [win32] + '@rollup/rollup-win32-ia32-msvc@4.18.1': + resolution: {integrity: sha512-ELfEX1/+eGZYMaCIbK4jqLxO1gyTSOIlZr6pbC4SRYFaSIDVKOnZNMdoZ+ON0mrFDp4+H5MhwNC1H/AhE3zQLg==} + cpu: [ia32] + os: [win32] + '@rollup/rollup-win32-x64-msvc@4.18.0': resolution: {integrity: sha512-UOo5FdvOL0+eIVTgS4tIdbW+TtnBLWg1YBCcU2KWM7nuNwRz9bksDX1bekJJCpu25N1DVWaCwnT39dVQxzqS8g==} cpu: [x64] os: [win32] + '@rollup/rollup-win32-x64-msvc@4.18.1': + resolution: {integrity: sha512-yjk2MAkQmoaPYCSu35RLJ62+dz358nE83VfTePJRp8CG7aMg25mEJYpXFiD+NcevhX8LxD5OP5tktPXnXN7GDw==} + cpu: [x64] + os: [win32] + '@segment/loosely-validate-event@2.0.0': resolution: {integrity: sha512-ZMCSfztDBqwotkl848ODgVcAmN4OItEWDCkshcKz0/W6gGSQayuuCtWV/MlodFivAZD793d6UgANd6wCXUfrIw==} @@ -3342,6 +4020,9 @@ packages: '@types/fs-extra@11.0.4': resolution: {integrity: sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ==} + '@types/glob@8.1.0': + resolution: {integrity: sha512-IO+MJPVhoqz+28h1qLAcBEH2+xHMK6MTyHJc7MTnnYb6wsoLR29POVGJ7LycmVXIqyy/4/2ShP5sUwTXuOwb/w==} + '@types/http-errors@2.0.4': resolution: {integrity: sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==} @@ -3354,6 +4035,9 @@ packages: '@types/istanbul-reports@3.0.4': resolution: {integrity: sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==} + '@types/json-diff@1.0.3': + resolution: {integrity: sha512-Qvxm8fpRMv/1zZR3sQWImeRK2mBYJji20xF51Fq9Gt//Ed18u0x6/FNLogLS1xhfUWTEmDyqveJqn95ltB6Kvw==} + '@types/json-schema@7.0.13': resolution: {integrity: sha512-RbSSoHliUbnXj3ny0CNFOoxrIDV6SUGyStHsvDqosw6CkdPV8TtWGlfecuK4ToyMEAql6pzNxgCFKanovUzlgQ==} @@ -3366,6 +4050,9 @@ packages: '@types/mime@1.3.5': resolution: {integrity: sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==} + '@types/minimatch@5.1.2': + resolution: {integrity: sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA==} + '@types/minimist@1.2.2': resolution: {integrity: sha512-jhuKLIRrhvCPLqwPcx6INqmKeiA5EWrsCOPhrlFSrbrmU4ZMPjj5Ul/oLCMDO98XRUIwVm78xICz4EPCektzeQ==} @@ -3396,6 +4083,9 @@ packages: '@types/pg@8.6.6': resolution: {integrity: sha512-O2xNmXebtwVekJDD+02udOncjVcMZQuTEQEMpKJ0ZRf5E7/9JJX3izhKUcUifBkyKpljyUM6BTgy2trmviKlpw==} + '@types/pluralize@0.0.33': + resolution: {integrity: sha512-JOqsl+ZoCpP4e8TDke9W79FDcSgPAR0l6pixx2JHkhnRjvShyYiAYw2LVsnA7K08Y6DeOnaU6ujmENO4os/cYg==} + '@types/prop-types@15.7.12': resolution: {integrity: sha512-5zvhXYtRNRluoE/jAp4GVsSduVUzNWKkOZrCDBWYtE7biZywwdC2AcEzg+cSMLFRfVgeAFqpfNabiPjxFddV1Q==} @@ -3414,8 +4104,8 @@ packages: '@types/retry@0.12.5': resolution: {integrity: sha512-3xSjTp3v03X/lSQLkczaN9UIEwJMoMCA1+Nb5HfbJEQWogdeQIyVtTvxPXDQjZ5zws8rFQfVfRdz03ARihPJgw==} - '@types/semver@7.5.3': - resolution: {integrity: sha512-OxepLK9EuNEIPxWNME+C6WwbRAOOI2o2BaQEGzz5Lu2e4Z5eDnEo+/aVEDMIXywoJitJ7xWd641wrGLZdtwRyw==} + '@types/semver@7.5.8': + resolution: {integrity: sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ==} '@types/send@0.17.4': resolution: {integrity: sha512-x2EM6TJOybec7c52BX0ZspPodMsQUd5L6PRwOunVyVUhXiBSKf3AezDL8Dgvgt5o0UfKNfuA0eMLr2wLT4AiBA==} @@ -3438,8 +4128,8 @@ packages: '@types/which@3.0.0': resolution: {integrity: sha512-ASCxdbsrwNfSMXALlC3Decif9rwDMu+80KGp5zI2RLRotfMsTv7fHL8W8VDp24wymzDyIFudhUeSCugrgRFfHQ==} - '@types/ws@8.5.4': - resolution: {integrity: sha512-zdQDHKUgcX/zBc4GrwsE/7dVdAD8JR4EuiAXiiUhhfyIJXXb2+PrGshFyeXWQPMmmZ2XxgaqclgpIC7eTXc1mg==} + '@types/ws@8.5.11': + resolution: {integrity: sha512-4+q7P5h3SpJxaBft0Dzpbr6lmMaqh0Jr2tbhJZ/luAwvD7ohSCniYkwz/pLxuT2h0EOa6QADgJj1Ko+TzRfZ+w==} '@types/yargs-parser@21.0.3': resolution: {integrity: sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==} @@ -3461,6 +4151,17 @@ packages: typescript: optional: true + '@typescript-eslint/eslint-plugin@7.16.1': + resolution: {integrity: sha512-SxdPak/5bO0EnGktV05+Hq8oatjAYVY3Zh2bye9pGZy6+jwyR3LG3YKkV4YatlsgqXP28BTeVm9pqwJM96vf2A==} + engines: {node: ^18.18.0 || >=20.0.0} + peerDependencies: + '@typescript-eslint/parser': ^7.0.0 + eslint: ^8.56.0 + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + '@typescript-eslint/experimental-utils@5.62.0': resolution: {integrity: sha512-RTXpeB3eMkpoclG3ZHft6vG/Z30azNHuqY6wKPBHlVMZFuEvrtlEDe8gMqDb+SO+9hjC/pLekeSCryf9vMZlCw==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} @@ -3487,6 +4188,16 @@ packages: typescript: optional: true + '@typescript-eslint/parser@7.16.1': + resolution: {integrity: sha512-u+1Qx86jfGQ5i4JjK33/FnawZRpsLxRnKzGE6EABZ40KxVT/vWsiZFEBBHjFOljmmV3MBYOHEKi0Jm9hbAOClA==} + engines: {node: ^18.18.0 || >=20.0.0} + peerDependencies: + eslint: ^8.56.0 + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + '@typescript-eslint/rule-tester@6.10.0': resolution: {integrity: sha512-I0ZY+9ei73dlOuXwIYWsn/r/ue26Ygf4yEJPxeJRPI06YWDawmR1FI1dXL6ChAWVrmBQRvWep/1PxnV41zfcMA==} engines: {node: ^16.0.0 || >=18.0.0} @@ -3506,6 +4217,10 @@ packages: resolution: {integrity: sha512-wOlo0QnEou9cHO2TdkJmzF7DFGvAKEnB82PuPNHpT8ZKKaZu6Bm63ugOTn9fXNJtvuDPanBc78lGUGGytJoVzQ==} engines: {node: ^16.0.0 || >=18.0.0} + '@typescript-eslint/scope-manager@7.16.1': + resolution: {integrity: sha512-nYpyv6ALte18gbMz323RM+vpFpTjfNdyakbf3nsLvF43uF9KeNC289SUEW3QLZ1xPtyINJ1dIsZOuWuSRIWygw==} + engines: {node: ^18.18.0 || >=20.0.0} + '@typescript-eslint/type-utils@6.7.3': resolution: {integrity: sha512-Fc68K0aTDrKIBvLnKTZ5Pf3MXK495YErrbHb1R6aTpfK5OdSFj0rVN7ib6Tx6ePrZ2gsjLqr0s98NG7l96KSQw==} engines: {node: ^16.0.0 || >=18.0.0} @@ -3516,6 +4231,16 @@ packages: typescript: optional: true + '@typescript-eslint/type-utils@7.16.1': + resolution: {integrity: sha512-rbu/H2MWXN4SkjIIyWcmYBjlp55VT+1G3duFOIukTNFxr9PI35pLc2ydwAfejCEitCv4uztA07q0QWanOHC7dA==} + engines: {node: ^18.18.0 || >=20.0.0} + peerDependencies: + eslint: ^8.56.0 + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + '@typescript-eslint/types@5.62.0': resolution: {integrity: sha512-87NVngcbVXUahrRTqIK27gD2t5Cu1yuCXxbLcFtCzZGlfyVWWh8mLHkoxzjsB6DDNnvdL+fW8MiwPEJyGJQDgQ==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} @@ -3528,6 +4253,10 @@ packages: resolution: {integrity: sha512-4g+de6roB2NFcfkZb439tigpAMnvEIg3rIjWQ+EM7IBaYt/CdJt6em9BJ4h4UpdgaBWdmx2iWsafHTrqmgIPNw==} engines: {node: ^16.0.0 || >=18.0.0} + '@typescript-eslint/types@7.16.1': + resolution: {integrity: sha512-AQn9XqCzUXd4bAVEsAXM/Izk11Wx2u4H3BAfQVhSfzfDOm/wAON9nP7J5rpkCxts7E5TELmN845xTUCQrD1xIQ==} + engines: {node: ^18.18.0 || >=20.0.0} + '@typescript-eslint/typescript-estree@5.62.0': resolution: {integrity: sha512-CmcQ6uY7b9y694lKdRB8FEel7JbU/40iSAPomu++SjLMntB+2Leay2LO6i8VnJk58MtE9/nQSFIH6jpyRWyYzA==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} @@ -3555,6 +4284,15 @@ packages: typescript: optional: true + '@typescript-eslint/typescript-estree@7.16.1': + resolution: {integrity: sha512-0vFPk8tMjj6apaAZ1HlwM8w7jbghC8jc1aRNJG5vN8Ym5miyhTQGMqU++kuBFDNKe9NcPeZ6x0zfSzV8xC1UlQ==} + engines: {node: ^18.18.0 || >=20.0.0} + peerDependencies: + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + '@typescript-eslint/utils@5.62.0': resolution: {integrity: sha512-n8oxjeb5aIbPFEtmQxQYOLI0i9n5ySBEY/ZEHHZqKQSFnxio1rv6dthascc9dLuwrL0RC5mPCxB7vnAVGAYWAQ==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} @@ -3573,6 +4311,12 @@ packages: peerDependencies: eslint: ^7.0.0 || ^8.0.0 + '@typescript-eslint/utils@7.16.1': + resolution: {integrity: sha512-WrFM8nzCowV0he0RlkotGDujx78xudsxnGMBHI88l5J8wEhED6yBwaSLP99ygfrzAjsQvcYQ94quDwI0d7E1fA==} + engines: {node: ^18.18.0 || >=20.0.0} + peerDependencies: + eslint: ^8.56.0 + '@typescript-eslint/visitor-keys@5.62.0': resolution: {integrity: sha512-07ny+LHRzQXepkGg6w0mFY41fVUNBrL2Roj/++7V1txKugfjm/Ci/qSND03r2RhlJhJYMcTn9AhhSSqQp0Ysyw==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} @@ -3585,6 +4329,10 @@ packages: resolution: {integrity: sha512-HEVXkU9IB+nk9o63CeICMHxFWbHWr3E1mpilIQBe9+7L/lH97rleFLVtYsfnWB+JVMaiFnEaxvknvmIzX+CqVg==} engines: {node: ^16.0.0 || >=18.0.0} + '@typescript-eslint/visitor-keys@7.16.1': + resolution: {integrity: sha512-Qlzzx4sE4u3FsHTPQAAQFJFNOuqtuY0LFrZHwQ8IHK705XxBiWOFkfKRWu6niB7hwfgnwIpO4jTC75ozW1PHWg==} + engines: {node: ^18.18.0 || >=20.0.0} + '@typescript/analyze-trace@0.10.1': resolution: {integrity: sha512-RnlSOPh14QbopGCApgkSx5UBgGda5MX1cHqp2fsqfiDyCwGL/m1jaeB9fzu7didVS81LQqGZZuxFBcg8YU8EVw==} hasBin: true @@ -3827,6 +4575,9 @@ packages: resolution: {integrity: sha512-tLkvA81vQG/XqE2mjDkGQHoOINtMHtysSnemrmoGe6PydDPMRbVugqyk4A6V/WDWEfm3l+0d8anA9r8cv/5Jaw==} engines: {node: '>=12'} + as-table@1.0.55: + resolution: {integrity: sha512-xvsWESUJn0JN421Xb9MQw6AsMHRCUknCe0Wjlxvjud80mU4E6hQf1A6NzQKcYNmYw62MfzEtXc+badstZP3JpQ==} + asap@2.0.6: resolution: {integrity: sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==} @@ -3879,6 +4630,10 @@ packages: resolution: {integrity: sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==} engines: {node: '>= 0.4'} + aws-ssl-profiles@1.1.1: + resolution: {integrity: sha512-+H+kuK34PfMaI9PNU/NSjBKL5hh/KDM9J72kwYeYEm0A8B1AC4fuCy3qsjnA7lxklgyXsB68yn8Z2xoZEjgwCQ==} + engines: {node: '>= 6.0.0'} + axios@1.6.8: resolution: {integrity: sha512-v/ZHtJDU39mDpyBoFVkETcd/uNdxrWRrg3bKpOKzXFA6Bvqopts6ALSMU3y6ijYxbw2B+wPrIv46egTzJXCLGQ==} @@ -3924,12 +4679,12 @@ packages: resolution: {integrity: sha512-aVNobHnJqLiUelTaHat9DZ1qM2w0C0Eym4LPI/3JxOnSokGVdsl1T1kN7TFvsEAD8G47A6VKQ0TVHqbBnYMJlQ==} engines: {node: '>=12.0.0'} - better-sqlite3@10.0.0: - resolution: {integrity: sha512-rOz0JY8bt9oMgrFssP7GnvA5R3yln73y/NizzWqy3WlFth8Ux8+g4r/N9fjX97nn4X1YX6MTER2doNpTu5pqiA==} - better-sqlite3@8.7.0: resolution: {integrity: sha512-99jZU4le+f3G6aIl6PmmV0cxUIWqKieHxsiF7G34CVFiE+/UabpYqkU0NJIkY/96mQKikHeBjtR27vFfs5JpEw==} + better-sqlite3@9.6.0: + resolution: {integrity: sha512-yR5HATnqeYNVnkaUTf4bOP2dJSnyhP4puJN/QPRyx4YkBEEUxib422n2XzPqDEHjQQqazoYoADdAm5vE15+dAQ==} + big-integer@1.6.52: resolution: {integrity: sha512-QxD8cf2eVqJOOz63z6JIN9BzvVs/dlySa5HGSBH5xtR8dPteIRQnBxxKqkNTiT6jbDTF6jAfrd4oMcND9RGbQg==} engines: {node: '>=0.6'} @@ -3944,6 +4699,9 @@ packages: bl@4.1.0: resolution: {integrity: sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==} + blake3-wasm@2.1.5: + resolution: {integrity: sha512-F1+K8EbfOZE49dtoPtmxUQrpXaBIl3ICvasLh+nJta0xkz+9kF/7uet9fLnwKqhDrmj6g+6K3Tw9yQPUg2ka5g==} + blueimp-md5@2.19.0: resolution: {integrity: sha512-DRQrD6gJyy8FbiE4s+bDoXS9hiW3Vbx5uCdwvcCf3zLHL+Iv7LtGHLpr+GZV8rHG8tK766FGYBwRbu8pELTt+w==} @@ -4032,6 +4790,12 @@ packages: peerDependencies: esbuild: '>=0.17' + bundle-require@5.0.0: + resolution: {integrity: sha512-GuziW3fSSmopcx4KRymQEJVbZUfqlCqcq7dvs6TYwKRZiegK/2buMxQTPs6MGlNv50wms1699qYO54R8XfRX4w==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + peerDependencies: + esbuild: '>=0.18' + busboy@1.6.0: resolution: {integrity: sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==} engines: {node: '>=10.16.0'} @@ -4098,6 +4862,9 @@ packages: caniuse-lite@1.0.30001624: resolution: {integrity: sha512-0dWnQG87UevOCPYaOR49CBcLBwoZLpws+k6W37nLjWUhumP1Isusj0p2u+3KhjNloRWK9OKMgjBBzPujQHw4nA==} + capnp-ts@0.7.0: + resolution: {integrity: sha512-XKxXAC3HVPv7r674zP0VC3RTXz+/JKhfyw94ljvF80yynK6VkTnqE3jMuN8b3dUVmmc43TjyxjW4KTsmB3c86g==} + cardinal@2.1.1: resolution: {integrity: sha512-JSr5eOgoEymtYHBjNWyjrMqet9Am2miJhlfKNdqLp6zoeAh0KN5dRAcxlecj5mAJrmQomgiOBj35xHLrFjqBpw==} hasBin: true @@ -4136,6 +4903,10 @@ packages: resolution: {integrity: sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==} engines: {node: '>= 8.10.0'} + chokidar@3.6.0: + resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==} + engines: {node: '>= 8.10.0'} + chownr@1.1.4: resolution: {integrity: sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==} @@ -4254,6 +5025,10 @@ packages: colorette@2.0.19: resolution: {integrity: sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ==} + colors@1.4.0: + resolution: {integrity: sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA==} + engines: {node: '>=0.1.90'} + combined-stream@1.0.8: resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} engines: {node: '>= 0.8'} @@ -4269,6 +5044,10 @@ packages: resolution: {integrity: sha512-9HMlXtt/BNoYr8ooyjjNRdIilOTkVJXB+GhxMTtOKwk0R4j4lS4NpjuqmRxroBfnfTSHQIHQB7wryHhXarNjmQ==} engines: {node: '>=16'} + commander@12.1.0: + resolution: {integrity: sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==} + engines: {node: '>=18'} + commander@2.20.3: resolution: {integrity: sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==} @@ -4320,6 +5099,10 @@ packages: resolution: {integrity: sha512-ZqRXc+tZukToSNmh5C2iWMSoV3X1YUcPbqEM4DkEG5tNQXrQUZCNVGGv3IuicnkMtPfGf3Xtp8WCXs295iQ1pQ==} engines: {node: '>= 0.10.0'} + consola@3.2.3: + resolution: {integrity: sha512-I5qxpzLv+sJhTVEoLYNcTW+bThDCPsit0vLNKShZx6rLtpilNpmmeTPaeqJb9ZE9dV3DGaeby6Vuhrw38WjeyQ==} + engines: {node: ^14.18.0 || >=16.10.0} + console-control-strings@1.1.0: resolution: {integrity: sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==} @@ -4341,10 +5124,18 @@ packages: cookie-signature@1.0.6: resolution: {integrity: sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==} + cookie@0.5.0: + resolution: {integrity: sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==} + engines: {node: '>= 0.6'} + cookie@0.6.0: resolution: {integrity: sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==} engines: {node: '>= 0.6'} + copy-anything@3.0.5: + resolution: {integrity: sha512-yCEafptTtb4bk7GLEQoM8KVJpxAfdBJYaXyzQEgQQQgYrZiDp8SJmGKlYza6CYjEDNstAdNdKA3UuoULlEbS6w==} + engines: {node: '>=12.13'} + core-js-compat@3.37.1: resolution: {integrity: sha512-9TNiImhKvQqSUkOvk/mMRZzOANTiEVC7WaBNhHcKM7x+/5E1l5NvsysR19zuDQScE8k+kfQXWRN3AtS/eOSHpg==} @@ -4415,6 +5206,9 @@ packages: dag-map@1.0.2: resolution: {integrity: sha512-+LSAiGFwQ9dRnRdOeaj7g47ZFJcOUPukAP8J3A3fuZ1g9Y44BG+P1sgApjLXTQPOzC4+7S9Wr8kXsfpINM4jpw==} + data-uri-to-buffer@2.0.2: + resolution: {integrity: sha512-ND9qDTLc6diwj+Xe5cdAgVTbLVdXbtxTJRXRhli8Mowuaan+0EJOtdqJ0QCHNSSPyoXGx9HX2/VMnKeC34AChA==} + data-uri-to-buffer@4.0.1: resolution: {integrity: sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==} engines: {node: '>= 12'} @@ -4435,6 +5229,9 @@ packages: resolution: {integrity: sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==} engines: {node: '>=0.11'} + date-fns@3.6.0: + resolution: {integrity: sha512-fRHTG8g/Gif+kSh50gaGEdToemgfj74aRX3swtiouboip5JDLAyDE9F11nHMIcvOaXeOC6D7SpNhi7uFyB7Uww==} + date-time@3.1.0: resolution: {integrity: sha512-uqCUKXE5q1PNBXjPqvwhwJf9SwMoAHBgWJ6DcrnS5o+W2JOiIILl0JEdVD8SGujrNS02GGxgwAg2PN2zONgtjg==} engines: {node: '>=6'} @@ -4467,6 +5264,15 @@ packages: supports-color: optional: true + debug@4.3.5: + resolution: {integrity: sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + decamelize@1.2.0: resolution: {integrity: sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==} engines: {node: '>=0.10.0'} @@ -4513,6 +5319,9 @@ packages: resolution: {integrity: sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==} engines: {node: '>= 0.4'} + defu@6.1.4: + resolution: {integrity: sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg==} + del@6.1.1: resolution: {integrity: sha512-ua8BhapfP0JUJKC/zV9yHHDW/rDoDxP4Zhn3AkA6/xT6gY7jYXJiaeyBZznYVujhZZET+UgcbZiQ7sN3WqcImg==} engines: {node: '>=10'} @@ -4615,6 +5424,10 @@ packages: resolution: {integrity: sha512-Rba5VW1O2JfJlwVBeZ8Zwt2E2us5oZ08PQBDiVSGlug53TOc8hzXjblZFuF+dnll9/RQEHrkzBmJFgqTvn5Rxg==} hasBin: true + drizzle-kit@0.21.2: + resolution: {integrity: sha512-U87IhZyCt/9d0ZT/Na3KFJVY31tSxtTx/n9UMcWFpW/5c2Ede39xiCG5efNV/0iimsv97UIRtDI0ldLBW5lbcg==} + hasBin: true + drizzle-orm@0.27.2: resolution: {integrity: sha512-ZvBvceff+JlgP7FxHKe0zOU9CkZ4RcOtibumIrqfYzDGuOeF0YUY0F9iMqYpRM7pxnLRfC+oO7rWOUH3T5oFQA==} peerDependencies: @@ -4724,6 +5537,10 @@ packages: resolution: {integrity: sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==} engines: {node: '>=6'} + env-paths@3.0.0: + resolution: {integrity: sha512-dtJUTepzMW3Lm/NPxRf3wP4642UWhjL2sQxc+ym2YMj1m/H2zDNQOlezafzkHwn6sMstjHTwG6iQQsctDW/b1A==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + envinfo@7.13.0: resolution: {integrity: sha512-cvcaMr7KqXVh4nyzGTVqTum+gAiL265x5jUWQIDLq//zOGbW+gSW/C+OWLleY/rs9Qole6AZLMXPbtIFQbqu+Q==} engines: {node: '>=4'} @@ -4883,14 +5700,20 @@ packages: cpu: [x64] os: [netbsd] + esbuild-node-externals@1.14.0: + resolution: {integrity: sha512-jMWnTlCII3cLEjR5+u0JRSTJuP+MgbjEHKfwSIAI41NgLQ0ZjfzjchlbEn0r7v2u5gCBMSEYvYlkO7GDG8gG3A==} + engines: {node: '>=12'} + peerDependencies: + esbuild: 0.12 - 0.23 + esbuild-openbsd-64@0.14.54: resolution: {integrity: sha512-Qyk7ikT2o7Wu76UsvvDS5q0amJvmRzDyVlL0qf5VLsLchjCa1+IAvd8kTBgUxD7VBUUVgItLkk609ZHUc1oCaw==} engines: {node: '>=12'} cpu: [x64] os: [openbsd] - esbuild-register@3.4.2: - resolution: {integrity: sha512-kG/XyTDyz6+YDuyfB9ZoSIOOmgyFCH+xPRtsCa8W85HLRV5Csp+o3jWVbOSHgSLfyLc5DmP+KFDNwty4mEjC+Q==} + esbuild-register@3.5.0: + resolution: {integrity: sha512-+4G/XmakeBAsvJuDugJvtyF1x+XJT4FMocynNpxrvEBViirpfUn2PgNpCHedfWhF4WokNsO/OvMKrmJOIJsI5A==} peerDependencies: esbuild: '>=0.12 <1' @@ -4933,6 +5756,11 @@ packages: engines: {node: '>=12'} hasBin: true + esbuild@0.19.12: + resolution: {integrity: sha512-aARqgq8roFBj054KvQr5f1sFu0D65G+miZRCuJyJ0G13Zwx7vRar5Zhn2tkQNzIXcBrNVsv/8stehpj+GAjgbg==} + engines: {node: '>=12'} + hasBin: true + esbuild@0.20.2: resolution: {integrity: sha512-WdOOppmUNU+IbZ0PaDiTst80zjnrOkyJNHoKupIcVyU8Lvla3Ugx94VzkQ32Ijqd7UhHJy75gNWDMUekcrSJ6g==} engines: {node: '>=12'} @@ -4943,6 +5771,11 @@ packages: engines: {node: '>=12'} hasBin: true + esbuild@0.23.0: + resolution: {integrity: sha512-1lvV17H2bMYda/WaFb2jLPeHU3zml2k4/yagNMG8Q/YtfMjCwEUZa2eXXMgZTVSL5q1n4H7sQ0X6CdJDqqeCFA==} + engines: {node: '>=18'} + hasBin: true + escalade@3.1.1: resolution: {integrity: sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==} engines: {node: '>=6'} @@ -4970,6 +5803,12 @@ packages: resolution: {integrity: sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==} engines: {node: '>=12'} + eslint-config-prettier@9.1.0: + resolution: {integrity: sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw==} + hasBin: true + peerDependencies: + eslint: '>=7.0.0' + eslint-import-resolver-node@0.3.9: resolution: {integrity: sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==} @@ -5007,6 +5846,20 @@ packages: eslint-plugin-no-instanceof@1.0.1: resolution: {integrity: sha512-zlqQ7EsfzbRO68uI+p8FIE7zYB4njs+nNbkNjSb5QmLi2et67zQLqSeaao5U9SpnlZTTJC87nS2oyHo2ACtajw==} + eslint-plugin-prettier@5.2.1: + resolution: {integrity: sha512-gH3iR3g4JfF+yYPaJYkN7jEl9QbweL/YfkoRlNnuIEHEz1vHVlCmWOS+eGGiRuzHQXdJFCOTxRgvju9b8VUmrw==} + engines: {node: ^14.18.0 || >=16.0.0} + peerDependencies: + '@types/eslint': '>=8.0.0' + eslint: '>=8.0.0' + eslint-config-prettier: '*' + prettier: '>=3.0.0' + peerDependenciesMeta: + '@types/eslint': + optional: true + eslint-config-prettier: + optional: true + eslint-plugin-unicorn@48.0.1: resolution: {integrity: sha512-FW+4r20myG/DqFcCSzoumaddKBicIPeFnTrifon2mWIzlfyvzwyqZjqVP7m4Cqr/ZYisS2aiLghkUWaPg6vtCw==} engines: {node: '>=16'} @@ -5053,6 +5906,11 @@ packages: engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} hasBin: true + eslint@8.57.0: + resolution: {integrity: sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + hasBin: true + esm@3.2.25: resolution: {integrity: sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA==} engines: {node: '>=6'} @@ -5086,6 +5944,9 @@ packages: resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==} engines: {node: '>=4.0'} + estree-walker@0.6.1: + resolution: {integrity: sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w==} + estree-walker@2.0.2: resolution: {integrity: sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==} @@ -5129,6 +5990,10 @@ packages: resolution: {integrity: sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==} engines: {node: '>=16.17'} + exit-hook@2.2.1: + resolution: {integrity: sha512-eNTPlAD67BmP31LDINZ3U7HSF8l57TxOY2PmBJ1shpCvpnxBF93mWCE8YHBnXs8qiUZJc9WDcWIeC3a2HIAMfw==} + engines: {node: '>=6'} + exit@0.1.2: resolution: {integrity: sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==} engines: {node: '>= 0.8.0'} @@ -5440,10 +6305,17 @@ packages: resolution: {integrity: sha512-x5UJKlgeUiNT8nyo/AcnwLnZuZNcSjSw0kogRB+Whd1fjjFq4B1hySFxSFWWSn4mIBzg3sRNUDFYc4g5gjPoLg==} engines: {node: '>=4'} + get-port@6.1.2: + resolution: {integrity: sha512-BrGGraKm2uPqurfGVj/z97/zv8dPleC6x9JBNRTrDNtCkkRF4rPwrQXFgL7+I+q8QSdU4ntLQX2D7KIxSy8nGw==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + get-port@7.1.0: resolution: {integrity: sha512-QB9NKEeDg3xxVwCCwJQ9+xycaz6pBB6iQ76wiWMl1927n0Kir6alPiP+yuiICLLU4jpMe08dXfpebuQppFA2zw==} engines: {node: '>=16'} + get-source@2.0.12: + resolution: {integrity: sha512-X5+4+iD+HoSeEED+uwrQ07BOQr0kEDFMVqqpBuI+RaZBpBpHCuXxo70bjar6f0b0u/DQJsJ7ssurpP0V60Az+w==} + get-stream@4.1.0: resolution: {integrity: sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==} engines: {node: '>=6'} @@ -5485,10 +6357,8 @@ packages: resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==} engines: {node: '>=10.13.0'} - glob@10.2.2: - resolution: {integrity: sha512-Xsa0BcxIC6th9UwNjZkhrMtNo/MnyRL8jGCP+uEwhA5oFOCY1f2s1/oNKY47xQ0Bg5nkjsfAEIej1VeH62bDDQ==} - engines: {node: '>=16 || 14 >=14.17'} - hasBin: true + glob-to-regexp@0.4.1: + resolution: {integrity: sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==} glob@10.3.10: resolution: {integrity: sha512-fa46+tv1Ak0UPK1TOy/pZrIybNNt4HCv7SDzwyfiOZkvZLEbjsZkJBPtDHVshZjbecAoAGSC20MjLDG/qr679g==} @@ -5506,13 +6376,16 @@ packages: glob@7.1.6: resolution: {integrity: sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==} + deprecated: Glob versions prior to v9 are no longer supported glob@7.2.3: resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} + deprecated: Glob versions prior to v9 are no longer supported glob@8.1.0: resolution: {integrity: sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==} engines: {node: '>=12'} + deprecated: Glob versions prior to v9 are no longer supported globals@11.12.0: resolution: {integrity: sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==} @@ -5638,6 +6511,10 @@ packages: resolution: {integrity: sha512-S9cREGPJIAK437RhroOf1PGlJPIlt5itl69OmQ6onPLo5pdCbSHGL8v4uAKxrdHjcTyuoyvKPqWm5jv0dGkdFA==} engines: {node: '>=16.0.0'} + hono@4.5.0: + resolution: {integrity: sha512-ZbezypZfn4odyApjCCv+Fw5OgweBqRLA/EsMyc4FUknFvBJcBIKhHy4sqmD1rWpBc/3wUlaQ6tqOPjk36R1ckg==} + engines: {node: '>=16.0.0'} + hosted-git-info@2.8.9: resolution: {integrity: sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==} @@ -5731,6 +6608,7 @@ packages: inflight@1.0.6: resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} + deprecated: This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful. inherits@2.0.4: resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} @@ -5971,6 +6849,10 @@ packages: is-weakref@1.0.2: resolution: {integrity: sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==} + is-what@4.1.16: + resolution: {integrity: sha512-ZhMwEosbFJkA0YhFnNDgTM4ZxDRsS6HqTo7qsZM08fehyRYIYa0yHu5R6mgo1n/8MgaPBXiPimPD77baVFYg+A==} + engines: {node: '>=12.13'} + is-wsl@1.1.0: resolution: {integrity: sha512-gfygJYZ2gLTDlmbWMI0CE2MwnFzSN/2SZfkMlItC4K/JBlsWVDB0bO6XhqcY13YXE7iMcAJnzTCJjPiTeJJ0Mw==} engines: {node: '>=4'} @@ -5992,10 +6874,6 @@ packages: resolution: {integrity: sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==} engines: {node: '>=0.10.0'} - jackspeak@2.1.0: - resolution: {integrity: sha512-DiEwVPqsieUzZBNxQ2cxznmFzfg/AMgJUjYw5xl6rSmCxAQXECcbSdwcLM6Ds6T09+SBfSNCGPhYUoQ96P4h7A==} - engines: {node: '>=14'} - jackspeak@2.3.6: resolution: {integrity: sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ==} engines: {node: '>=14'} @@ -6111,6 +6989,10 @@ packages: resolution: {integrity: sha512-cVnggDrVkAAA3OvFfHpFEhOnmcsUpleEKq4d4O8sQWWSH40MBrWstKigVB1kGrgLWzuom+7rRdaCsnBD6VyObQ==} hasBin: true + json-diff@1.0.6: + resolution: {integrity: sha512-tcFIPRdlc35YkYdGxcamJjllUhXWv4n2rK9oJ2RsAzV4FBkuV4ojKEDgcZ+kpKxDmJKv+PFK65+1tVVOnSeEqA==} + hasBin: true + json-parse-better-errors@1.0.2: resolution: {integrity: sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==} @@ -6198,42 +7080,10 @@ packages: tedious: optional: true - knex@3.1.0: - resolution: {integrity: sha512-GLoII6hR0c4ti243gMs5/1Rb3B+AjwMOfjYm97pu0FOQa7JH56hgBxYf5WK2525ceSbBY1cjeZ9yk99GPMB6Kw==} - engines: {node: '>=16'} - hasBin: true - peerDependencies: - better-sqlite3: '*' - mysql: '*' - mysql2: '*' - pg: '*' - pg-native: '*' - sqlite3: '*' - tedious: '*' - peerDependenciesMeta: - better-sqlite3: - optional: true - mysql: - optional: true - mysql2: - optional: true - pg: - optional: true - pg-native: - optional: true - sqlite3: - optional: true - tedious: - optional: true - kysely@0.25.0: resolution: {integrity: sha512-srn0efIMu5IoEBk0tBmtGnoUss4uwvxtbFQWG/U2MosfqIace1l43IFP1PmEpHRDp+Z79xIcKEqmHH3dAvQdQA==} engines: {node: '>=14.0.0'} - kysely@0.27.3: - resolution: {integrity: sha512-lG03Ru+XyOJFsjH3OMY6R/9U38IjDPfnOfDgO3ynhbDr+Dz8fak+X6L62vqu3iybQnj+lG84OttBuU9KY3L9kA==} - engines: {node: '>=14.0.0'} - leven@3.1.0: resolution: {integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==} engines: {node: '>=6'} @@ -6242,6 +7092,11 @@ packages: resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} engines: {node: '>= 0.8.0'} + libsql@0.2.0: + resolution: {integrity: sha512-ELBRqhpJx5Dap0187zKQnntZyk4EjlDHSrjIVL8t+fQ5e8IxbQTeYgZgigMjB1EvrETdkm0Y0VxBGhzPQ+t0Jg==} + cpu: [x64, arm64] + os: [darwin, linux, win32] + libsql@0.3.18: resolution: {integrity: sha512-lvhKr7WV3NLWRbXkjn/MeKqXOAqWKU0PX9QYrvDh7fneukapj+iUQ4qgJASrQyxcCrEsClXCQiiK5W6OoYPAlA==} os: [darwin, linux, win32] @@ -6363,6 +7218,10 @@ packages: resolution: {integrity: sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==} engines: {node: '>=10'} + lilconfig@3.1.2: + resolution: {integrity: sha512-eop+wDAvpItUys0FWkHIKeC9ybYrTGbU41U5K7+bttZZeohvnY7M9dZ5kB21GNWiFT2q1OoPTvncPCgSOVO5ow==} + engines: {node: '>=14'} + lines-and-columns@1.2.4: resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} @@ -6457,6 +7316,9 @@ packages: lru-queue@0.1.0: resolution: {integrity: sha512-BpdYkt9EvGl8OfWHDQPISVpcl5xZthb+XPsbELj5AQXxIC8IriDZIQYjBJPEm5rS420sjZ0TLEzRcq5KdBhYrQ==} + magic-string@0.25.9: + resolution: {integrity: sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==} + magic-string@0.30.10: resolution: {integrity: sha512-iIRwTIf0QKV3UAnYK4PU8uiEc4SRh5jX0mwpIwETPpHdhVM4f53RSwS/vXvN1JhGX+Cs7B8qIq3d6AH49O5fAQ==} @@ -6636,6 +7498,11 @@ packages: engines: {node: '>=4.0.0'} hasBin: true + mime@3.0.0: + resolution: {integrity: sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==} + engines: {node: '>=10.0.0'} + hasBin: true + mimic-fn@1.2.0: resolution: {integrity: sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==} engines: {node: '>=4'} @@ -6656,6 +7523,11 @@ packages: resolution: {integrity: sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==} engines: {node: '>=4'} + miniflare@3.20240712.0: + resolution: {integrity: sha512-zVbsMX2phvJS1uTPmjK6CvVBq4ON2UkmvTw9IMfNPACsWJmHEdsBDxsYEG1vKAduJdI5gULLuJf7qpFxByDhGw==} + engines: {node: '>=16.13'} + hasBin: true + minimatch@3.1.2: resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} @@ -6667,10 +7539,6 @@ packages: resolution: {integrity: sha512-sBz8G/YjVniEz6lKPNpKxXwazJe4c19fEfV2GDMX6AjFz+MX9uDWIZW8XreVhkFW3fkIdTv/gxWr/Kks5FFAVw==} engines: {node: '>=10'} - minimatch@9.0.1: - resolution: {integrity: sha512-0jWhJpD/MdhPXwPuiRkCbfYfSKp2qnn2eOc279qI7f+osl/l+prKSrvhg157zSYvx/1nmgn2NqdT6k2Z7zSH9w==} - engines: {node: '>=16 || 14 >=14.17'} - minimatch@9.0.4: resolution: {integrity: sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==} engines: {node: '>=16 || 14 >=14.17'} @@ -6750,16 +7618,20 @@ packages: ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + mustache@4.2.0: + resolution: {integrity: sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==} + hasBin: true + mv@2.1.1: resolution: {integrity: sha512-at/ZndSy3xEGJ8i0ygALh8ru9qy7gWW1cmkaqBN29JmMlIvM//MEO9y1sk/avxuwnPcfhkejkLsuPxH81BrkSg==} engines: {node: '>=0.8.0'} - mysql2@3.3.3: - resolution: {integrity: sha512-MxDQJztArk4JFX1PKVjDhIXRzAmVJfuqZrVU+my6NeYBAA/XZRaDw5q7vga8TNvgyy3Lv3rivBFBBuJFbsdjaw==} + mysql2@3.11.0: + resolution: {integrity: sha512-J9phbsXGvTOcRVPR95YedzVSxJecpW5A5+cQ57rhHIFXteTP10HCs+VBjS7DHIKfEaI1zQ5tlVrquCd64A6YvA==} engines: {node: '>= 8.0'} - mysql2@3.9.8: - resolution: {integrity: sha512-+5JKNjPuks1FNMoy9TYpl77f+5frbTklz7eb3XDwbpsERRLEeXiW2PDEkakYF50UuKU2qwfGnyXpKYvukv8mGA==} + mysql2@3.3.3: + resolution: {integrity: sha512-MxDQJztArk4JFX1PKVjDhIXRzAmVJfuqZrVU+my6NeYBAA/XZRaDw5q7vga8TNvgyy3Lv3rivBFBBuJFbsdjaw==} engines: {node: '>= 8.0'} mz@2.7.0: @@ -6833,6 +7705,9 @@ packages: resolution: {integrity: sha512-E2WEOVsgs7O16zsURJ/eH8BqhF029wGpEOnv7Urwdo2wmQanOACwJQh0devF9D9RhoZru0+9JXIS0dBXIAz+lA==} engines: {node: '>=18'} + node-fetch-native@1.6.4: + resolution: {integrity: sha512-IhOigYzAKHd244OC0JIMIUrjzctirCmPkaIfhDeGcEETWof5zKYUW7e7MYvChGWh/4CJeXEgsRyGzuF334rOOQ==} + node-fetch@2.7.0: resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==} engines: {node: 4.x || >=6.0.0} @@ -7168,13 +8043,12 @@ packages: resolution: {integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==} engines: {node: '>=16 || 14 >=14.18'} - path-scurry@1.7.0: - resolution: {integrity: sha512-UkZUeDjczjYRE495+9thsgcVgsaCPkaw80slmfVFgllxY+IO8ubTsOpFVjDPROBqJdHfVPUFRHPBV/WciOVfWg==} - engines: {node: '>=16 || 14 >=14.17'} - path-to-regexp@0.1.7: resolution: {integrity: sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==} + path-to-regexp@6.2.2: + resolution: {integrity: sha512-GQX3SSMokngb36+whdpRXE+3f9V8UzyAorlYvOGx87ufGHehNTn5lCxrKtLyZ4Yl/wEKnNnr98ZzOwwDZV5ogw==} + path-type@4.0.0: resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} engines: {node: '>=8'} @@ -7194,9 +8068,6 @@ packages: pg-connection-string@2.6.1: resolution: {integrity: sha512-w6ZzNu6oMmIzEAYVw+RLK0+nqHPt8K3ZnknKi+g48Ak2pr3dtljJW3o+D/n2zzCG07Zoe9VOX3aiKpj+BN0pjg==} - pg-connection-string@2.6.2: - resolution: {integrity: sha512-ch6OwaeaPYcova4kKZ15sbJ2hKb/VP48ZD2gE7i1J+L4MspCtBMAx8nMgz7bksc7IojCIIWuEhHibSMFH8m8oA==} - pg-connection-string@2.6.4: resolution: {integrity: sha512-v+Z7W/0EO707aNMaAEfiGnGL9sxxumwLl2fJvCQtMn9Fxsg+lPpPkdcyBSv/KFgpGdYkMfn+EI1Or2EHjpgLCA==} @@ -7301,6 +8172,24 @@ packages: ts-node: optional: true + postcss-load-config@6.0.1: + resolution: {integrity: sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g==} + engines: {node: '>= 18'} + peerDependencies: + jiti: '>=1.21.0' + postcss: '>=8.0.9' + tsx: ^4.8.1 + yaml: ^2.4.2 + peerDependenciesMeta: + jiti: + optional: true + postcss: + optional: true + tsx: + optional: true + yaml: + optional: true + postcss@8.4.38: resolution: {integrity: sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==} engines: {node: ^10 || ^12 || >=14} @@ -7360,6 +8249,15 @@ packages: resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} engines: {node: '>= 0.8.0'} + prettier-linter-helpers@1.0.0: + resolution: {integrity: sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==} + engines: {node: '>=6.0.0'} + + prettier@2.8.8: + resolution: {integrity: sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==} + engines: {node: '>=10.13.0'} + hasBin: true + prettier@3.0.3: resolution: {integrity: sha512-L/4pUDMxcNa8R/EthV08Zt42WBO4h1rarVtK0K+QJG0X187OLo7l699jWw0GKuwzkPQ//jMFA/8Xm6Fh3J/DAg==} engines: {node: '>=14'} @@ -7381,6 +8279,9 @@ packages: resolution: {integrity: sha512-ASJqOugUF1bbzI35STMBUpZqdfYKlJugy6JBziGi2EE+AL5JPJGSzvpeVXojxrr0ViUYoToUjb5kjSEGf7Y83Q==} engines: {node: '>=14.16'} + printable-characters@1.0.42: + resolution: {integrity: sha512-dKp+C4iXWK4vVYZmYSd0KBH5F/h1HoZRsbJ82AVKRO3PEo8L4lBS/vLwhVtpwwuYcoIsVY+1JYKR268yn480uQ==} + prisma@5.14.0: resolution: {integrity: sha512-gCNZco7y5XtjrnQYeDJTiVZmT/ncqCr5RY1/Cf8X2wgLRmyh9ayPAGBNziI4qEE4S6SxCH5omQLVo9lmURaJ/Q==} engines: {node: '>=16.13'} @@ -7704,6 +8605,16 @@ packages: engines: {node: '>=14'} hasBin: true + rollup-plugin-inject@3.0.2: + resolution: {integrity: sha512-ptg9PQwzs3orn4jkgXJ74bfs5vYz1NCZlSQMBUA0wKcGp5i5pA1AO3fOUEte8enhGUC+iapTCzEWw2jEFFUO/w==} + deprecated: This package has been deprecated and is no longer maintained. Please use @rollup/plugin-inject. + + rollup-plugin-node-polyfills@0.2.1: + resolution: {integrity: sha512-4kCrKPTJ6sK4/gLL/U5QzVT8cxJcofO0OU74tnB19F40cmuAKSzH5/siithxlofFEjwvw1YAhPmbvGNA6jEroA==} + + rollup-pluginutils@2.8.2: + resolution: {integrity: sha512-EEp9NhnUkwY8aif6bxgovPHMoMoNr2FulJziTndpt5H9RdwC47GSGuII9XxpSdzVGM0GWrNPHV6ie1LTNJPaLQ==} + rollup@3.20.7: resolution: {integrity: sha512-P7E2zezKSLhWnTz46XxjSmInrbOCiul1yf+kJccMxT56vxjHwCbDfoLbiqFgu+WQoo9ij2PkraYaBstgB2prBA==} engines: {node: '>=14.18.0', npm: '>=8.0.0'} @@ -7719,6 +8630,11 @@ packages: engines: {node: '>=18.0.0', npm: '>=8.0.0'} hasBin: true + rollup@4.18.1: + resolution: {integrity: sha512-Elx2UT8lzxxOXMpy5HWQGZqkrQOtrVDDa/bm9l10+U4rQnVzbL/LgZ4NOM1MPIDyHk69W4InuYDF5dzRh4Kw1A==} + engines: {node: '>=18.0.0', npm: '>=8.0.0'} + hasBin: true + run-parallel@1.2.0: resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} @@ -7774,21 +8690,6 @@ packages: resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==} hasBin: true - semver@7.5.1: - resolution: {integrity: sha512-Wvss5ivl8TMRZXXESstBA4uR5iXgEN/VC5/sOcuXdVLzcdkz4HWetIoRfG5gb5X+ij/G9rw9YoGn3QoQ8OCSpw==} - engines: {node: '>=10'} - hasBin: true - - semver@7.5.4: - resolution: {integrity: sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==} - engines: {node: '>=10'} - hasBin: true - - semver@7.6.1: - resolution: {integrity: sha512-f/vbBsu+fOiYt+lmwZV0rVwJScl46HppnOA1ZvIuBWKOTlllpyJ3bfVax76/OrhCH38dyxoDIA8K7uB963IYgA==} - engines: {node: '>=10'} - hasBin: true - semver@7.6.2: resolution: {integrity: sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==} engines: {node: '>=10'} @@ -7958,6 +8859,10 @@ packages: resolution: {integrity: sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA==} engines: {node: '>= 8'} + sourcemap-codec@1.4.8: + resolution: {integrity: sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==} + deprecated: Please use @jridgewell/sourcemap-codec instead + spawn-command@0.0.2: resolution: {integrity: sha512-zC8zGoGkmc8J9ndvml8Xksr1Amk9qBujgbF0JAIWO7kXr43w0h/0GJNM/Vustixu+YE8N/MTrQ7N31FvHUACxQ==} @@ -8034,6 +8939,9 @@ packages: resolution: {integrity: sha512-KJP1OCML99+8fhOHxwwzyWrlUuVX5GQ0ZpJTd1DFXhdkrvg1szxfHhawXUZ3g9TkXORQd4/WG68jMlQZ2p8wlg==} engines: {node: '>=6'} + stacktracey@2.1.8: + resolution: {integrity: sha512-Kpij9riA+UNg7TnphqjH7/CzctQ/owJGNbFkfEeve4Z4uxT5+JapVLFXcsurIfN34gnTWZNJ/f7NMG0E8JDzTw==} + statuses@1.5.0: resolution: {integrity: sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==} engines: {node: '>= 0.6'} @@ -8045,6 +8953,10 @@ packages: std-env@3.7.0: resolution: {integrity: sha512-JPbdCEQLj1w5GilpiHAx3qJvFndqybBysA3qUOnznweH4QbNYUsW/ea8QzSrnh0vNsezMMw5bcVool8lM0gwzg==} + stoppable@1.1.0: + resolution: {integrity: sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw==} + engines: {node: '>=4', npm: '>=6'} + stream-buffers@2.2.0: resolution: {integrity: sha512-uyQK/mx5QjHun80FLJTfaWE7JtwfRMKBLkMne6udYOmvH0CawotVa7TfgYHzAnpphn4+TweIx1QKMnRIbipmUg==} engines: {node: '>= 0.10.0'} @@ -8145,6 +9057,11 @@ packages: engines: {node: '>=8'} hasBin: true + sucrase@3.35.0: + resolution: {integrity: sha512-8EbVDiu9iN/nESwxeSxDKe0dunta1GOlHufmSSXxMD2z2/tMZpDMpvXQGsc+ajGo8y2uYUmixaSRUc/QPoQ0GA==} + engines: {node: '>=16 || 14 >=14.17'} + hasBin: true + sudo-prompt@8.2.5: resolution: {integrity: sha512-rlBo3HU/1zAJUrkY6jNxDOC9eVYliG6nS4JA8u8KAshITd07tafMc/Br7xQwCSseXwJ2iCcHCE8SNWX3q8Z+kw==} @@ -8154,6 +9071,10 @@ packages: sudo-prompt@9.2.1: resolution: {integrity: sha512-Mu7R0g4ig9TUuGSxJavny5Rv0egCEtpZRNMrZaYS1vxkiIxGiGUwoezU3LazIQ+KE04hTrTfNPgxU5gzi7F5Pw==} + superjson@2.2.1: + resolution: {integrity: sha512-8iGv75BYOa0xRJHK5vRLEjE2H/i4lulTjzpUXic3Eg8akftYjkmQDa8JARQ42rlczXyFR3IeRoeFCc7RxHsYZA==} + engines: {node: '>=16'} + supertap@3.0.1: resolution: {integrity: sha512-u1ZpIBCawJnO+0QePsEiOknOfCRq0yERxiAchT0i4li0WHNUJbf0evXXSXOcCAR4M8iMDoajXYmstm/qO81Isw==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} @@ -8182,6 +9103,10 @@ packages: resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} engines: {node: '>= 0.4'} + synckit@0.9.1: + resolution: {integrity: sha512-7gr8p9TQP6RAHusBOSLs46F4564ZrjV8xFmw5zCmgmhGUcw2hxsShhJ6CEiHQMgPDwAQ1fWHPM0ypc4RMAig4A==} + engines: {node: ^14.18.0 || >=16.0.0} + tar-fs@2.0.1: resolution: {integrity: sha512-6tzWDMeroL87uF/+lin46k+Q+46rAJ0SyPGz7OW7wTgblI273hsBqk2C1j0/xNadNLKDTUL9BukSjB7cwgmlPA==} @@ -8335,6 +9260,12 @@ packages: peerDependencies: typescript: '>=4.2.0' + ts-api-utils@1.3.0: + resolution: {integrity: sha512-UQMIo7pb8WRomKR1/+MFVLTroIvDVtMX3K6OUir8ynLyzB8Jeriont2bTAtmNPa1ekAgN7YPDyf6V+ygrdU+eQ==} + engines: {node: '>=16'} + peerDependencies: + typescript: '>=4.2.0' + ts-expose-internals-conditionally@1.0.0-empty.0: resolution: {integrity: sha512-F8m9NOF6ZhdOClDVdlM8gj3fDCav4ZIFSs/EI3ksQbAAXVSCN/Jh5OCJDDZWBuBy9psFc6jULGDlPwjMYMhJDw==} @@ -8390,6 +9321,25 @@ packages: typescript: optional: true + tsup@8.1.2: + resolution: {integrity: sha512-Gzw/PXSX/z0aYMNmkcI54bKKFVFJQbLne+EqTJZeQ3lNT3QpumjtMU4rl+ZwTTp8oRF3ahMbEAxT2sZPJLFSrg==} + engines: {node: '>=18'} + hasBin: true + peerDependencies: + '@microsoft/api-extractor': ^7.36.0 + '@swc/core': ^1 + postcss: ^8.4.12 + typescript: '>=4.5.0' + peerDependenciesMeta: + '@microsoft/api-extractor': + optional: true + '@swc/core': + optional: true + postcss: + optional: true + typescript: + optional: true + tsutils@3.21.0: resolution: {integrity: sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==} engines: {node: '>= 6'} @@ -8570,6 +9520,13 @@ packages: resolution: {integrity: sha512-wh1pHJHnUeQV5Xa8/kyQhO7WFa8M34l026L5P/+2TYiakvGy5Rdc8jWZVyG7ieht/0WgJLEd3kcU5gKx+6GC8w==} engines: {node: '>=14.0'} + undici@5.28.4: + resolution: {integrity: sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==} + engines: {node: '>=14.0'} + + unenv-nightly@1.10.0-1717606461.a117952: + resolution: {integrity: sha512-u3TfBX02WzbHTpaEfWEKwDijDSFAHcgXkayUZ+MVDrjhLFvgAJzFGTSTmwlEhwWi2exyRQey23ah9wELMM6etg==} + unicode-canonical-property-names-ecmascript@2.0.0: resolution: {integrity: sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==} engines: {node: '>=4'} @@ -8882,6 +9839,21 @@ packages: wordwrap@1.0.0: resolution: {integrity: sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==} + workerd@1.20240712.0: + resolution: {integrity: sha512-hdIHZif82hBDy9YnMtcmDGgbLU5f2P2aGpi/X8EKhTSLDppVUGrkY3XB536J4jGjA2D5dS0FUEXCl5bAJEed8Q==} + engines: {node: '>=16'} + hasBin: true + + wrangler@3.65.0: + resolution: {integrity: sha512-IDy4ttyJZssazAd5CXHw4NWeZFGxngdNF5m2ogltdT3CV7uHfCvPVdMcr4uNMpRZd0toHmAE3LtQeXxDFFp88A==} + engines: {node: '>=16.17.0'} + hasBin: true + peerDependencies: + '@cloudflare/workers-types': ^4.20240712.0 + peerDependenciesMeta: + '@cloudflare/workers-types': + optional: true + wrap-ansi@6.2.0: resolution: {integrity: sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==} engines: {node: '>=8'} @@ -8951,6 +9923,18 @@ packages: utf-8-validate: optional: true + ws@8.18.0: + resolution: {integrity: sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==} + engines: {node: '>=10.0.0'} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: '>=5.0.2' + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + xcode@3.0.1: resolution: {integrity: sha512-kCz5k7J7XbJtjABOvkc5lJmkiDh8VhjVCGNiqdKCscmVpdVUpEAyXv1xmCLkQJ5dsHqx3IPO4XW+NTDhU/fatA==} engines: {node: '>=10.0.0'} @@ -8975,6 +9959,9 @@ packages: resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==} engines: {node: '>=0.4'} + xxhash-wasm@1.0.2: + resolution: {integrity: sha512-ibF0Or+FivM9lNrg+HGJfVX8WJqgo+kCLDc4vx6xMeTce7Aj+DLttKbxxRR/gNLSAelRc1omAPlJ77N/Jem07A==} + y18n@4.0.3: resolution: {integrity: sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==} @@ -9033,6 +10020,9 @@ packages: resolution: {integrity: sha512-9bnSc/HEW2uRy67wc+T8UwauLuPJVn28jb+GtJY16iiKWyvmYJRXVT4UamsAEGQfPohgr2q4Tq0sQbQlxTfi1g==} engines: {node: '>=12.20'} + youch@3.3.3: + resolution: {integrity: sha512-qSFXUk3UZBLfggAW3dJKg0BMblG5biqSF8M34E06o5CSsZtH92u9Hqmj2RzGiHDi64fhe83+4tENFP2DB6t6ZA==} + zod@3.21.4: resolution: {integrity: sha512-m46AKbrzKVzOzs/DZgVnG5H55N1sv1M8qZU3A8RIKbs3mrACDNeIOeilDymVb2HdmP8uwshOCF4uJ8uM9rCqJw==} @@ -9217,10 +10207,10 @@ snapshots: dependencies: '@aws-crypto/sha256-browser': 3.0.0 '@aws-crypto/sha256-js': 3.0.0 - '@aws-sdk/client-sso-oidc': 3.583.0 - '@aws-sdk/client-sts': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/client-sso-oidc': 3.583.0(@aws-sdk/client-sts@3.583.0) + '@aws-sdk/client-sts': 3.583.0 '@aws-sdk/core': 3.582.0 - '@aws-sdk/credential-provider-node': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.583.0) + '@aws-sdk/credential-provider-node': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))(@aws-sdk/client-sts@3.583.0) '@aws-sdk/middleware-host-header': 3.577.0 '@aws-sdk/middleware-logger': 3.577.0 '@aws-sdk/middleware-recursion-detection': 3.577.0 @@ -9304,13 +10294,13 @@ snapshots: transitivePeerDependencies: - aws-crt - '@aws-sdk/client-sso-oidc@3.583.0': + '@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0)': dependencies: '@aws-crypto/sha256-browser': 3.0.0 '@aws-crypto/sha256-js': 3.0.0 - '@aws-sdk/client-sts': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/client-sts': 3.583.0 '@aws-sdk/core': 3.582.0 - '@aws-sdk/credential-provider-node': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.583.0) + '@aws-sdk/credential-provider-node': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))(@aws-sdk/client-sts@3.583.0) '@aws-sdk/middleware-host-header': 3.577.0 '@aws-sdk/middleware-logger': 3.577.0 '@aws-sdk/middleware-recursion-detection': 3.577.0 @@ -9347,6 +10337,7 @@ snapshots: '@smithy/util-utf8': 3.0.0 tslib: 2.6.2 transitivePeerDependencies: + - '@aws-sdk/client-sts' - aws-crt '@aws-sdk/client-sso@3.478.0': @@ -9613,13 +10604,13 @@ snapshots: - '@aws-sdk/client-sso-oidc' - aws-crt - '@aws-sdk/client-sts@3.583.0(@aws-sdk/client-sso-oidc@3.583.0)': + '@aws-sdk/client-sts@3.583.0': dependencies: '@aws-crypto/sha256-browser': 3.0.0 '@aws-crypto/sha256-js': 3.0.0 - '@aws-sdk/client-sso-oidc': 3.583.0 + '@aws-sdk/client-sso-oidc': 3.583.0(@aws-sdk/client-sts@3.583.0) '@aws-sdk/core': 3.582.0 - '@aws-sdk/credential-provider-node': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.583.0) + '@aws-sdk/credential-provider-node': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))(@aws-sdk/client-sts@3.583.0) '@aws-sdk/middleware-host-header': 3.577.0 '@aws-sdk/middleware-logger': 3.577.0 '@aws-sdk/middleware-recursion-detection': 3.577.0 @@ -9656,7 +10647,6 @@ snapshots: '@smithy/util-utf8': 3.0.0 tslib: 2.6.2 transitivePeerDependencies: - - '@aws-sdk/client-sso-oidc' - aws-crt '@aws-sdk/core@3.477.0': @@ -9809,12 +10799,12 @@ snapshots: - '@aws-sdk/client-sso-oidc' - aws-crt - '@aws-sdk/credential-provider-ini@3.583.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.583.0)': + '@aws-sdk/credential-provider-ini@3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))(@aws-sdk/client-sts@3.583.0)': dependencies: - '@aws-sdk/client-sts': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/client-sts': 3.583.0 '@aws-sdk/credential-provider-env': 3.577.0 '@aws-sdk/credential-provider-process': 3.577.0 - '@aws-sdk/credential-provider-sso': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/credential-provider-sso': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0)) '@aws-sdk/credential-provider-web-identity': 3.577.0(@aws-sdk/client-sts@3.583.0) '@aws-sdk/types': 3.577.0 '@smithy/credential-provider-imds': 3.0.0 @@ -9899,13 +10889,13 @@ snapshots: - '@aws-sdk/client-sts' - aws-crt - '@aws-sdk/credential-provider-node@3.583.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.583.0)': + '@aws-sdk/credential-provider-node@3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))(@aws-sdk/client-sts@3.583.0)': dependencies: '@aws-sdk/credential-provider-env': 3.577.0 '@aws-sdk/credential-provider-http': 3.582.0 - '@aws-sdk/credential-provider-ini': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.583.0) + '@aws-sdk/credential-provider-ini': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))(@aws-sdk/client-sts@3.583.0) '@aws-sdk/credential-provider-process': 3.577.0 - '@aws-sdk/credential-provider-sso': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/credential-provider-sso': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0)) '@aws-sdk/credential-provider-web-identity': 3.577.0(@aws-sdk/client-sts@3.583.0) '@aws-sdk/types': 3.577.0 '@smithy/credential-provider-imds': 3.0.0 @@ -9980,10 +10970,10 @@ snapshots: - '@aws-sdk/client-sso-oidc' - aws-crt - '@aws-sdk/credential-provider-sso@3.583.0(@aws-sdk/client-sso-oidc@3.583.0)': + '@aws-sdk/credential-provider-sso@3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))': dependencies: '@aws-sdk/client-sso': 3.583.0 - '@aws-sdk/token-providers': 3.577.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/token-providers': 3.577.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0)) '@aws-sdk/types': 3.577.0 '@smithy/property-provider': 3.0.0 '@smithy/shared-ini-file-loader': 3.0.0 @@ -10018,7 +11008,7 @@ snapshots: '@aws-sdk/credential-provider-web-identity@3.577.0(@aws-sdk/client-sts@3.583.0)': dependencies: - '@aws-sdk/client-sts': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/client-sts': 3.583.0 '@aws-sdk/types': 3.577.0 '@smithy/property-provider': 3.0.0 '@smithy/types': 3.0.0 @@ -10219,16 +11209,16 @@ snapshots: '@aws-sdk/token-providers@3.568.0(@aws-sdk/client-sso-oidc@3.583.0)': dependencies: - '@aws-sdk/client-sso-oidc': 3.583.0 + '@aws-sdk/client-sso-oidc': 3.583.0(@aws-sdk/client-sts@3.583.0) '@aws-sdk/types': 3.567.0 '@smithy/property-provider': 2.2.0 '@smithy/shared-ini-file-loader': 2.4.0 '@smithy/types': 2.12.0 tslib: 2.6.2 - '@aws-sdk/token-providers@3.577.0(@aws-sdk/client-sso-oidc@3.583.0)': + '@aws-sdk/token-providers@3.577.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))': dependencies: - '@aws-sdk/client-sso-oidc': 3.583.0 + '@aws-sdk/client-sso-oidc': 3.583.0(@aws-sdk/client-sts@3.583.0) '@aws-sdk/types': 3.577.0 '@smithy/property-provider': 3.0.0 '@smithy/shared-ini-file-loader': 3.0.0 @@ -10354,7 +11344,7 @@ snapshots: '@babel/traverse': 7.24.6 '@babel/types': 7.24.6 convert-source-map: 2.0.0 - debug: 4.3.4 + debug: 4.3.5 gensync: 1.0.0-beta.2 json5: 2.2.3 semver: 6.3.1 @@ -10415,7 +11405,7 @@ snapshots: '@babel/core': 7.24.6 '@babel/helper-compilation-targets': 7.24.6 '@babel/helper-plugin-utils': 7.24.6 - debug: 4.3.4 + debug: 4.3.5 lodash.debounce: 4.0.8 resolve: 1.22.8 transitivePeerDependencies: @@ -11287,7 +12277,7 @@ snapshots: '@babel/helper-split-export-declaration': 7.24.6 '@babel/parser': 7.24.6 '@babel/types': 7.24.6 - debug: 4.3.4 + debug: 4.3.5 globals: 11.12.0 transitivePeerDependencies: - supports-color @@ -11317,11 +12307,29 @@ snapshots: '@balena/dockerignore@1.0.2': {} - '@cloudflare/workers-types@4.20240512.0': {} + '@cloudflare/kv-asset-handler@0.3.4': + dependencies: + mime: 3.0.0 - '@cloudflare/workers-types@4.20240524.0': + '@cloudflare/workerd-darwin-64@1.20240712.0': + optional: true + + '@cloudflare/workerd-darwin-arm64@1.20240712.0': + optional: true + + '@cloudflare/workerd-linux-64@1.20240712.0': + optional: true + + '@cloudflare/workerd-linux-arm64@1.20240712.0': + optional: true + + '@cloudflare/workerd-windows-64@1.20240712.0': optional: true + '@cloudflare/workers-types@4.20240512.0': {} + + '@cloudflare/workers-types@4.20240524.0': {} + '@colors/colors@1.5.0': optional: true @@ -11350,6 +12358,8 @@ snapshots: '@dprint/win32-x64@0.46.3': optional: true + '@drizzle-team/brocli@0.10.1': {} + '@drizzle-team/studio@0.0.5': {} '@electric-sql/pglite@0.1.5': {} @@ -11364,132 +12374,208 @@ snapshots: '@esbuild-kit/core-utils': 3.1.0 get-tsconfig: 4.7.5 + '@esbuild-plugins/node-globals-polyfill@0.2.3(esbuild@0.17.19)': + dependencies: + esbuild: 0.17.19 + + '@esbuild-plugins/node-modules-polyfill@0.2.2(esbuild@0.17.19)': + dependencies: + esbuild: 0.17.19 + escape-string-regexp: 4.0.0 + rollup-plugin-node-polyfills: 0.2.1 + + '@esbuild/aix-ppc64@0.19.12': + optional: true + '@esbuild/aix-ppc64@0.20.2': optional: true '@esbuild/aix-ppc64@0.21.5': optional: true + '@esbuild/aix-ppc64@0.23.0': + optional: true + '@esbuild/android-arm64@0.17.19': optional: true '@esbuild/android-arm64@0.18.20': optional: true + '@esbuild/android-arm64@0.19.12': + optional: true + '@esbuild/android-arm64@0.20.2': optional: true '@esbuild/android-arm64@0.21.5': optional: true + '@esbuild/android-arm64@0.23.0': + optional: true + '@esbuild/android-arm@0.17.19': optional: true '@esbuild/android-arm@0.18.20': optional: true + '@esbuild/android-arm@0.19.12': + optional: true + '@esbuild/android-arm@0.20.2': optional: true '@esbuild/android-arm@0.21.5': optional: true + '@esbuild/android-arm@0.23.0': + optional: true + '@esbuild/android-x64@0.17.19': optional: true '@esbuild/android-x64@0.18.20': optional: true + '@esbuild/android-x64@0.19.12': + optional: true + '@esbuild/android-x64@0.20.2': optional: true '@esbuild/android-x64@0.21.5': optional: true + '@esbuild/android-x64@0.23.0': + optional: true + '@esbuild/darwin-arm64@0.17.19': optional: true '@esbuild/darwin-arm64@0.18.20': optional: true + '@esbuild/darwin-arm64@0.19.12': + optional: true + '@esbuild/darwin-arm64@0.20.2': optional: true '@esbuild/darwin-arm64@0.21.5': optional: true + '@esbuild/darwin-arm64@0.23.0': + optional: true + '@esbuild/darwin-x64@0.17.19': optional: true '@esbuild/darwin-x64@0.18.20': optional: true + '@esbuild/darwin-x64@0.19.12': + optional: true + '@esbuild/darwin-x64@0.20.2': optional: true '@esbuild/darwin-x64@0.21.5': optional: true + '@esbuild/darwin-x64@0.23.0': + optional: true + '@esbuild/freebsd-arm64@0.17.19': optional: true '@esbuild/freebsd-arm64@0.18.20': optional: true + '@esbuild/freebsd-arm64@0.19.12': + optional: true + '@esbuild/freebsd-arm64@0.20.2': optional: true '@esbuild/freebsd-arm64@0.21.5': optional: true + '@esbuild/freebsd-arm64@0.23.0': + optional: true + '@esbuild/freebsd-x64@0.17.19': optional: true '@esbuild/freebsd-x64@0.18.20': optional: true + '@esbuild/freebsd-x64@0.19.12': + optional: true + '@esbuild/freebsd-x64@0.20.2': optional: true '@esbuild/freebsd-x64@0.21.5': optional: true + '@esbuild/freebsd-x64@0.23.0': + optional: true + '@esbuild/linux-arm64@0.17.19': optional: true '@esbuild/linux-arm64@0.18.20': optional: true + '@esbuild/linux-arm64@0.19.12': + optional: true + '@esbuild/linux-arm64@0.20.2': optional: true '@esbuild/linux-arm64@0.21.5': optional: true + '@esbuild/linux-arm64@0.23.0': + optional: true + '@esbuild/linux-arm@0.17.19': optional: true '@esbuild/linux-arm@0.18.20': optional: true + '@esbuild/linux-arm@0.19.12': + optional: true + '@esbuild/linux-arm@0.20.2': optional: true '@esbuild/linux-arm@0.21.5': optional: true + '@esbuild/linux-arm@0.23.0': + optional: true + '@esbuild/linux-ia32@0.17.19': optional: true '@esbuild/linux-ia32@0.18.20': optional: true + '@esbuild/linux-ia32@0.19.12': + optional: true + '@esbuild/linux-ia32@0.20.2': optional: true '@esbuild/linux-ia32@0.21.5': optional: true + '@esbuild/linux-ia32@0.23.0': + optional: true + '@esbuild/linux-loong64@0.14.54': optional: true @@ -11499,144 +12585,219 @@ snapshots: '@esbuild/linux-loong64@0.18.20': optional: true + '@esbuild/linux-loong64@0.19.12': + optional: true + '@esbuild/linux-loong64@0.20.2': optional: true '@esbuild/linux-loong64@0.21.5': optional: true + '@esbuild/linux-loong64@0.23.0': + optional: true + '@esbuild/linux-mips64el@0.17.19': optional: true '@esbuild/linux-mips64el@0.18.20': optional: true + '@esbuild/linux-mips64el@0.19.12': + optional: true + '@esbuild/linux-mips64el@0.20.2': optional: true '@esbuild/linux-mips64el@0.21.5': optional: true + '@esbuild/linux-mips64el@0.23.0': + optional: true + '@esbuild/linux-ppc64@0.17.19': optional: true '@esbuild/linux-ppc64@0.18.20': optional: true + '@esbuild/linux-ppc64@0.19.12': + optional: true + '@esbuild/linux-ppc64@0.20.2': optional: true '@esbuild/linux-ppc64@0.21.5': optional: true + '@esbuild/linux-ppc64@0.23.0': + optional: true + '@esbuild/linux-riscv64@0.17.19': optional: true '@esbuild/linux-riscv64@0.18.20': optional: true + '@esbuild/linux-riscv64@0.19.12': + optional: true + '@esbuild/linux-riscv64@0.20.2': optional: true '@esbuild/linux-riscv64@0.21.5': optional: true + '@esbuild/linux-riscv64@0.23.0': + optional: true + '@esbuild/linux-s390x@0.17.19': optional: true '@esbuild/linux-s390x@0.18.20': optional: true + '@esbuild/linux-s390x@0.19.12': + optional: true + '@esbuild/linux-s390x@0.20.2': optional: true '@esbuild/linux-s390x@0.21.5': optional: true + '@esbuild/linux-s390x@0.23.0': + optional: true + '@esbuild/linux-x64@0.17.19': optional: true '@esbuild/linux-x64@0.18.20': optional: true + '@esbuild/linux-x64@0.19.12': + optional: true + '@esbuild/linux-x64@0.20.2': optional: true '@esbuild/linux-x64@0.21.5': optional: true + '@esbuild/linux-x64@0.23.0': + optional: true + '@esbuild/netbsd-x64@0.17.19': optional: true '@esbuild/netbsd-x64@0.18.20': optional: true + '@esbuild/netbsd-x64@0.19.12': + optional: true + '@esbuild/netbsd-x64@0.20.2': optional: true '@esbuild/netbsd-x64@0.21.5': optional: true + '@esbuild/netbsd-x64@0.23.0': + optional: true + + '@esbuild/openbsd-arm64@0.23.0': + optional: true + '@esbuild/openbsd-x64@0.17.19': optional: true '@esbuild/openbsd-x64@0.18.20': optional: true + '@esbuild/openbsd-x64@0.19.12': + optional: true + '@esbuild/openbsd-x64@0.20.2': optional: true '@esbuild/openbsd-x64@0.21.5': optional: true + '@esbuild/openbsd-x64@0.23.0': + optional: true + '@esbuild/sunos-x64@0.17.19': optional: true '@esbuild/sunos-x64@0.18.20': optional: true + '@esbuild/sunos-x64@0.19.12': + optional: true + '@esbuild/sunos-x64@0.20.2': optional: true '@esbuild/sunos-x64@0.21.5': optional: true + '@esbuild/sunos-x64@0.23.0': + optional: true + '@esbuild/win32-arm64@0.17.19': optional: true '@esbuild/win32-arm64@0.18.20': optional: true + '@esbuild/win32-arm64@0.19.12': + optional: true + '@esbuild/win32-arm64@0.20.2': optional: true '@esbuild/win32-arm64@0.21.5': optional: true + '@esbuild/win32-arm64@0.23.0': + optional: true + '@esbuild/win32-ia32@0.17.19': optional: true '@esbuild/win32-ia32@0.18.20': optional: true + '@esbuild/win32-ia32@0.19.12': + optional: true + '@esbuild/win32-ia32@0.20.2': optional: true '@esbuild/win32-ia32@0.21.5': optional: true + '@esbuild/win32-ia32@0.23.0': + optional: true + '@esbuild/win32-x64@0.17.19': optional: true '@esbuild/win32-x64@0.18.20': optional: true + '@esbuild/win32-x64@0.19.12': + optional: true + '@esbuild/win32-x64@0.20.2': optional: true '@esbuild/win32-x64@0.21.5': optional: true + '@esbuild/win32-x64@0.23.0': + optional: true + '@eslint-community/eslint-utils@4.4.0(eslint@8.50.0)': dependencies: eslint: 8.50.0 @@ -11647,6 +12808,13 @@ snapshots: eslint: 8.53.0 eslint-visitor-keys: 3.4.3 + '@eslint-community/eslint-utils@4.4.0(eslint@8.57.0)': + dependencies: + eslint: 8.57.0 + eslint-visitor-keys: 3.4.3 + + '@eslint-community/regexpp@4.11.0': {} + '@eslint-community/regexpp@4.9.0': {} '@eslint/eslintrc@2.1.2': @@ -11655,7 +12823,7 @@ snapshots: debug: 4.3.4 espree: 9.6.1 globals: 13.22.0 - ignore: 5.2.4 + ignore: 5.3.1 import-fresh: 3.3.0 js-yaml: 4.1.0 minimatch: 3.1.2 @@ -11669,7 +12837,7 @@ snapshots: debug: 4.3.4 espree: 9.6.1 globals: 13.22.0 - ignore: 5.2.4 + ignore: 5.3.1 import-fresh: 3.3.0 js-yaml: 4.1.0 minimatch: 3.1.2 @@ -11677,10 +12845,24 @@ snapshots: transitivePeerDependencies: - supports-color - '@eslint/eslintrc@3.1.0': + '@eslint/eslintrc@2.1.4': dependencies: ajv: 6.12.6 debug: 4.3.4 + espree: 9.6.1 + globals: 13.22.0 + ignore: 5.3.1 + import-fresh: 3.3.0 + js-yaml: 4.1.0 + minimatch: 3.1.2 + strip-json-comments: 3.1.1 + transitivePeerDependencies: + - supports-color + + '@eslint/eslintrc@3.1.0': + dependencies: + ajv: 6.12.6 + debug: 4.3.5 espree: 10.0.1 globals: 14.0.0 ignore: 5.3.1 @@ -11695,6 +12877,12 @@ snapshots: '@eslint/js@8.53.0': {} + '@eslint/js@8.57.0': {} + + '@ewoudenberg/difflib@0.1.0': + dependencies: + heap: 0.2.7 + '@expo/bunyan@4.0.0': dependencies: uuid: 8.3.2 @@ -11702,7 +12890,7 @@ snapshots: mv: 2.1.1 safe-json-stringify: 1.2.0 - '@expo/cli@0.18.13(bufferutil@4.0.8)(encoding@0.1.13)(expo-modules-autolinking@1.11.1)(utf-8-validate@6.0.3)': + '@expo/cli@0.18.13(bufferutil@4.0.8)(encoding@0.1.13)(expo-modules-autolinking@1.11.1)': dependencies: '@babel/runtime': 7.24.6 '@expo/code-signing-certificates': 0.0.5 @@ -11720,7 +12908,7 @@ snapshots: '@expo/rudder-sdk-node': 1.1.1(encoding@0.1.13) '@expo/spawn-async': 1.7.2 '@expo/xcpretty': 4.3.1 - '@react-native/dev-middleware': 0.74.83(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + '@react-native/dev-middleware': 0.74.83(bufferutil@4.0.8)(encoding@0.1.13) '@urql/core': 2.3.6(graphql@15.8.0) '@urql/exchange-retry': 0.3.0(graphql@15.8.0) accepts: 1.3.8 @@ -11731,7 +12919,7 @@ snapshots: chalk: 4.1.2 ci-info: 3.9.0 connect: 3.7.0 - debug: 4.3.4 + debug: 4.3.5 env-editor: 0.4.2 fast-glob: 3.3.2 find-yarn-workspace-root: 2.0.0 @@ -11779,7 +12967,7 @@ snapshots: text-table: 0.2.0 url-join: 4.0.0 wrap-ansi: 7.0.0 - ws: 8.17.0(bufferutil@4.0.8)(utf-8-validate@6.0.3) + ws: 8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.3) transitivePeerDependencies: - bufferutil - encoding @@ -11799,7 +12987,7 @@ snapshots: '@expo/plist': 0.1.3 '@expo/sdk-runtime-versions': 1.0.0 chalk: 4.1.2 - debug: 4.3.4 + debug: 4.3.5 find-up: 5.0.0 getenv: 1.0.0 glob: 7.1.6 @@ -11851,7 +13039,7 @@ snapshots: '@expo/env@0.3.0': dependencies: chalk: 4.1.2 - debug: 4.3.4 + debug: 4.3.5 dotenv: 16.4.5 dotenv-expand: 11.0.6 getenv: 1.0.0 @@ -11890,7 +13078,7 @@ snapshots: '@expo/json-file': 8.3.3 '@expo/spawn-async': 1.7.2 chalk: 4.1.2 - debug: 4.3.4 + debug: 4.3.5 find-yarn-workspace-root: 2.0.0 fs-extra: 9.1.0 getenv: 1.0.0 @@ -11936,7 +13124,7 @@ snapshots: '@expo/image-utils': 0.5.1(encoding@0.1.13) '@expo/json-file': 8.3.3 '@react-native/normalize-colors': 0.74.83 - debug: 4.3.4 + debug: 4.3.5 expo-modules-autolinking: 1.11.1 fs-extra: 9.1.0 resolve-from: 5.0.0 @@ -11998,6 +13186,13 @@ snapshots: dependencies: '@hapi/hoek': 9.3.0 + '@hono/node-server@1.12.0': {} + + '@hono/zod-validator@0.2.2(hono@4.5.0)(zod@3.23.7)': + dependencies: + hono: 4.5.0 + zod: 3.23.7 + '@humanwhocodes/config-array@0.11.11': dependencies: '@humanwhocodes/object-schema': 1.2.1 @@ -12014,12 +13209,22 @@ snapshots: transitivePeerDependencies: - supports-color + '@humanwhocodes/config-array@0.11.14': + dependencies: + '@humanwhocodes/object-schema': 2.0.3 + debug: 4.3.4 + minimatch: 3.1.2 + transitivePeerDependencies: + - supports-color + '@humanwhocodes/module-importer@1.0.1': {} '@humanwhocodes/object-schema@1.2.1': {} '@humanwhocodes/object-schema@2.0.1': {} + '@humanwhocodes/object-schema@2.0.3': {} + '@iarna/toml@2.2.5': {} '@isaacs/cliui@8.0.2': @@ -12123,40 +13328,46 @@ snapshots: '@jridgewell/resolve-uri': 3.1.2 '@jridgewell/sourcemap-codec': 1.4.15 - '@libsql/client@0.5.6(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)': + '@libsql/client@0.4.3(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)': dependencies: - '@libsql/core': 0.5.6 + '@libsql/core': 0.4.3 '@libsql/hrana-client': 0.5.6(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) js-base64: 3.7.7 - libsql: 0.3.18 + optionalDependencies: + libsql: 0.2.0 transitivePeerDependencies: - bufferutil - encoding - utf-8-validate - '@libsql/client@0.6.0': + '@libsql/client@0.5.6(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)': dependencies: - '@libsql/core': 0.6.0 - '@libsql/hrana-client': 0.6.0 + '@libsql/core': 0.5.6 + '@libsql/hrana-client': 0.5.6(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) js-base64: 3.7.7 libsql: 0.3.18 transitivePeerDependencies: - bufferutil + - encoding - utf-8-validate - optional: true - '@libsql/core@0.5.6': + '@libsql/core@0.4.3': dependencies: js-base64: 3.7.7 - '@libsql/core@0.6.0': + '@libsql/core@0.5.6': dependencies: js-base64: 3.7.7 + + '@libsql/darwin-arm64@0.2.0': optional: true '@libsql/darwin-arm64@0.3.18': optional: true + '@libsql/darwin-x64@0.2.0': + optional: true + '@libsql/darwin-x64@0.3.18': optional: true @@ -12171,17 +13382,6 @@ snapshots: - encoding - utf-8-validate - '@libsql/hrana-client@0.6.0': - dependencies: - '@libsql/isomorphic-fetch': 0.2.1 - '@libsql/isomorphic-ws': 0.1.5(bufferutil@4.0.8)(utf-8-validate@6.0.3) - js-base64: 3.7.7 - node-fetch: 3.3.2 - transitivePeerDependencies: - - bufferutil - - utf-8-validate - optional: true - '@libsql/isomorphic-fetch@0.1.12(encoding@0.1.13)': dependencies: '@types/node-fetch': 2.6.11 @@ -12189,29 +13389,41 @@ snapshots: transitivePeerDependencies: - encoding - '@libsql/isomorphic-fetch@0.2.1': - optional: true - '@libsql/isomorphic-ws@0.1.5(bufferutil@4.0.8)(utf-8-validate@6.0.3)': dependencies: - '@types/ws': 8.5.4 + '@types/ws': 8.5.11 ws: 8.17.0(bufferutil@4.0.8)(utf-8-validate@6.0.3) transitivePeerDependencies: - bufferutil - utf-8-validate + '@libsql/linux-arm64-gnu@0.2.0': + optional: true + '@libsql/linux-arm64-gnu@0.3.18': optional: true + '@libsql/linux-arm64-musl@0.2.0': + optional: true + '@libsql/linux-arm64-musl@0.3.18': optional: true + '@libsql/linux-x64-gnu@0.2.0': + optional: true + '@libsql/linux-x64-gnu@0.3.18': optional: true + '@libsql/linux-x64-musl@0.2.0': + optional: true + '@libsql/linux-x64-musl@0.3.18': optional: true + '@libsql/win32-x64-msvc@0.2.0': + optional: true + '@libsql/win32-x64-msvc@0.3.18': optional: true @@ -12261,7 +13473,6 @@ snapshots: '@neondatabase/serverless@0.9.3': dependencies: '@types/pg': 8.11.6 - optional: true '@noble/hashes@1.4.0': {} @@ -12293,10 +13504,10 @@ snapshots: rimraf: 3.0.2 optional: true - '@op-engineering/op-sqlite@2.0.22(react-native@0.74.1(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(@types/react@18.3.1)(bufferutil@4.0.8)(encoding@0.1.13)(react@18.3.1)(utf-8-validate@6.0.3))(react@18.3.1)': + '@op-engineering/op-sqlite@2.0.22(react-native@0.74.1(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(@types/react@18.3.1)(bufferutil@4.0.8)(encoding@0.1.13)(react@18.3.1))(react@18.3.1)': dependencies: react: 18.3.1 - react-native: 0.74.1(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(@types/react@18.3.1)(bufferutil@4.0.8)(encoding@0.1.13)(react@18.3.1)(utf-8-validate@6.0.3) + react-native: 0.74.1(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(@types/react@18.3.1)(bufferutil@4.0.8)(encoding@0.1.13)(react@18.3.1) '@opentelemetry/api@1.8.0': {} @@ -12311,6 +13522,8 @@ snapshots: '@pkgjs/parseargs@0.11.0': optional: true + '@pkgr/core@0.1.1': {} + '@planetscale/database@1.18.0': {} '@polka/url@1.0.0-next.25': {} @@ -12431,7 +13644,7 @@ snapshots: transitivePeerDependencies: - encoding - '@react-native-community/cli-server-api@13.6.6(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)': + '@react-native-community/cli-server-api@13.6.6(bufferutil@4.0.8)(encoding@0.1.13)': dependencies: '@react-native-community/cli-debugger-ui': 13.6.6 '@react-native-community/cli-tools': 13.6.6(encoding@0.1.13) @@ -12441,7 +13654,7 @@ snapshots: nocache: 3.0.4 pretty-format: 26.6.2 serve-static: 1.15.0 - ws: 6.2.2(bufferutil@4.0.8)(utf-8-validate@6.0.3) + ws: 6.2.2(bufferutil@4.0.8) transitivePeerDependencies: - bufferutil - encoding @@ -12468,14 +13681,14 @@ snapshots: dependencies: joi: 17.13.1 - '@react-native-community/cli@13.6.6(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)': + '@react-native-community/cli@13.6.6(bufferutil@4.0.8)(encoding@0.1.13)': dependencies: '@react-native-community/cli-clean': 13.6.6(encoding@0.1.13) '@react-native-community/cli-config': 13.6.6(encoding@0.1.13) '@react-native-community/cli-debugger-ui': 13.6.6 '@react-native-community/cli-doctor': 13.6.6(encoding@0.1.13) '@react-native-community/cli-hermes': 13.6.6(encoding@0.1.13) - '@react-native-community/cli-server-api': 13.6.6(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + '@react-native-community/cli-server-api': 13.6.6(bufferutil@4.0.8)(encoding@0.1.13) '@react-native-community/cli-tools': 13.6.6(encoding@0.1.13) '@react-native-community/cli-types': 13.6.6 chalk: 4.1.2 @@ -12564,16 +13777,16 @@ snapshots: transitivePeerDependencies: - supports-color - '@react-native/community-cli-plugin@0.74.83(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)': + '@react-native/community-cli-plugin@0.74.83(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)': dependencies: - '@react-native-community/cli-server-api': 13.6.6(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + '@react-native-community/cli-server-api': 13.6.6(bufferutil@4.0.8)(encoding@0.1.13) '@react-native-community/cli-tools': 13.6.6(encoding@0.1.13) - '@react-native/dev-middleware': 0.74.83(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + '@react-native/dev-middleware': 0.74.83(bufferutil@4.0.8)(encoding@0.1.13) '@react-native/metro-babel-transformer': 0.74.83(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6)) chalk: 4.1.2 execa: 5.1.1 - metro: 0.80.9(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) - metro-config: 0.80.9(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + metro: 0.80.9(bufferutil@4.0.8)(encoding@0.1.13) + metro-config: 0.80.9(bufferutil@4.0.8)(encoding@0.1.13) metro-core: 0.80.9 node-fetch: 2.7.0(encoding@0.1.13) querystring: 0.2.1 @@ -12588,7 +13801,7 @@ snapshots: '@react-native/debugger-frontend@0.74.83': {} - '@react-native/dev-middleware@0.74.83(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)': + '@react-native/dev-middleware@0.74.83(bufferutil@4.0.8)(encoding@0.1.13)': dependencies: '@isaacs/ttlcache': 1.4.1 '@react-native/debugger-frontend': 0.74.83 @@ -12602,7 +13815,7 @@ snapshots: selfsigned: 2.4.1 serve-static: 1.15.0 temp-dir: 2.0.0 - ws: 6.2.2(bufferutil@4.0.8)(utf-8-validate@6.0.3) + ws: 6.2.2(bufferutil@4.0.8) transitivePeerDependencies: - bufferutil - encoding @@ -12625,12 +13838,12 @@ snapshots: '@react-native/normalize-colors@0.74.83': {} - '@react-native/virtualized-lists@0.74.83(@types/react@18.3.1)(react-native@0.74.1(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(@types/react@18.3.1)(bufferutil@4.0.8)(encoding@0.1.13)(react@18.3.1)(utf-8-validate@6.0.3))(react@18.3.1)': + '@react-native/virtualized-lists@0.74.83(@types/react@18.3.1)(react-native@0.74.1(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(@types/react@18.3.1)(bufferutil@4.0.8)(encoding@0.1.13)(react@18.3.1))(react@18.3.1)': dependencies: invariant: 2.2.4 nullthrows: 1.1.1 react: 18.3.1 - react-native: 0.74.1(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(@types/react@18.3.1)(bufferutil@4.0.8)(encoding@0.1.13)(react@18.3.1)(utf-8-validate@6.0.3) + react-native: 0.74.1(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(@types/react@18.3.1)(bufferutil@4.0.8)(encoding@0.1.13)(react@18.3.1) optionalDependencies: '@types/react': 18.3.1 @@ -12698,51 +13911,99 @@ snapshots: '@rollup/rollup-android-arm-eabi@4.18.0': optional: true + '@rollup/rollup-android-arm-eabi@4.18.1': + optional: true + '@rollup/rollup-android-arm64@4.18.0': optional: true + '@rollup/rollup-android-arm64@4.18.1': + optional: true + '@rollup/rollup-darwin-arm64@4.18.0': optional: true + '@rollup/rollup-darwin-arm64@4.18.1': + optional: true + '@rollup/rollup-darwin-x64@4.18.0': optional: true + '@rollup/rollup-darwin-x64@4.18.1': + optional: true + '@rollup/rollup-linux-arm-gnueabihf@4.18.0': optional: true + '@rollup/rollup-linux-arm-gnueabihf@4.18.1': + optional: true + '@rollup/rollup-linux-arm-musleabihf@4.18.0': optional: true + '@rollup/rollup-linux-arm-musleabihf@4.18.1': + optional: true + '@rollup/rollup-linux-arm64-gnu@4.18.0': optional: true + '@rollup/rollup-linux-arm64-gnu@4.18.1': + optional: true + '@rollup/rollup-linux-arm64-musl@4.18.0': optional: true + '@rollup/rollup-linux-arm64-musl@4.18.1': + optional: true + '@rollup/rollup-linux-powerpc64le-gnu@4.18.0': optional: true + '@rollup/rollup-linux-powerpc64le-gnu@4.18.1': + optional: true + '@rollup/rollup-linux-riscv64-gnu@4.18.0': optional: true + '@rollup/rollup-linux-riscv64-gnu@4.18.1': + optional: true + '@rollup/rollup-linux-s390x-gnu@4.18.0': optional: true + '@rollup/rollup-linux-s390x-gnu@4.18.1': + optional: true + '@rollup/rollup-linux-x64-gnu@4.18.0': optional: true + '@rollup/rollup-linux-x64-gnu@4.18.1': + optional: true + '@rollup/rollup-linux-x64-musl@4.18.0': optional: true + '@rollup/rollup-linux-x64-musl@4.18.1': + optional: true + '@rollup/rollup-win32-arm64-msvc@4.18.0': optional: true + '@rollup/rollup-win32-arm64-msvc@4.18.1': + optional: true + '@rollup/rollup-win32-ia32-msvc@4.18.0': optional: true + '@rollup/rollup-win32-ia32-msvc@4.18.1': + optional: true + '@rollup/rollup-win32-x64-msvc@4.18.0': optional: true + '@rollup/rollup-win32-x64-msvc@4.18.1': + optional: true + '@segment/loosely-validate-event@2.0.0': dependencies: component-type: 1.2.2 @@ -13390,6 +14651,11 @@ snapshots: '@types/jsonfile': 6.1.4 '@types/node': 20.12.12 + '@types/glob@8.1.0': + dependencies: + '@types/minimatch': 5.1.2 + '@types/node': 20.12.12 + '@types/http-errors@2.0.4': {} '@types/istanbul-lib-coverage@2.0.6': {} @@ -13402,6 +14668,8 @@ snapshots: dependencies: '@types/istanbul-lib-report': 3.0.3 + '@types/json-diff@1.0.3': {} + '@types/json-schema@7.0.13': {} '@types/json5@0.0.29': {} @@ -13412,6 +14680,8 @@ snapshots: '@types/mime@1.3.5': {} + '@types/minimatch@5.1.2': {} + '@types/minimist@1.2.2': {} '@types/node-fetch@2.6.11': @@ -13451,6 +14721,8 @@ snapshots: pg-protocol: 1.6.1 pg-types: 2.2.0 + '@types/pluralize@0.0.33': {} + '@types/prop-types@15.7.12': {} '@types/ps-tree@1.1.2': {} @@ -13466,7 +14738,7 @@ snapshots: '@types/retry@0.12.5': {} - '@types/semver@7.5.3': {} + '@types/semver@7.5.8': {} '@types/send@0.17.4': dependencies: @@ -13494,7 +14766,7 @@ snapshots: '@types/which@3.0.0': {} - '@types/ws@8.5.4': + '@types/ws@8.5.11': dependencies: '@types/node': 20.12.12 @@ -13521,13 +14793,31 @@ snapshots: graphemer: 1.4.0 ignore: 5.2.4 natural-compare: 1.4.0 - semver: 7.5.4 + semver: 7.6.2 ts-api-utils: 1.0.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) optionalDependencies: typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) transitivePeerDependencies: - supports-color + '@typescript-eslint/eslint-plugin@7.16.1(@typescript-eslint/parser@7.16.1(eslint@8.57.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint@8.57.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': + dependencies: + '@eslint-community/regexpp': 4.11.0 + '@typescript-eslint/parser': 7.16.1(eslint@8.57.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + '@typescript-eslint/scope-manager': 7.16.1 + '@typescript-eslint/type-utils': 7.16.1(eslint@8.57.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + '@typescript-eslint/utils': 7.16.1(eslint@8.57.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + '@typescript-eslint/visitor-keys': 7.16.1 + eslint: 8.57.0 + graphemer: 1.4.0 + ignore: 5.3.1 + natural-compare: 1.4.0 + ts-api-utils: 1.3.0(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + optionalDependencies: + typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + transitivePeerDependencies: + - supports-color + '@typescript-eslint/experimental-utils@5.62.0(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': dependencies: '@typescript-eslint/utils': 5.62.0(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) @@ -13562,6 +14852,19 @@ snapshots: transitivePeerDependencies: - supports-color + '@typescript-eslint/parser@7.16.1(eslint@8.57.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': + dependencies: + '@typescript-eslint/scope-manager': 7.16.1 + '@typescript-eslint/types': 7.16.1 + '@typescript-eslint/typescript-estree': 7.16.1(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + '@typescript-eslint/visitor-keys': 7.16.1 + debug: 4.3.4 + eslint: 8.57.0 + optionalDependencies: + typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + transitivePeerDependencies: + - supports-color + '@typescript-eslint/rule-tester@6.10.0(@eslint/eslintrc@3.1.0)(eslint@8.53.0)(typescript@5.2.2)': dependencies: '@eslint/eslintrc': 3.1.0 @@ -13570,7 +14873,7 @@ snapshots: ajv: 6.12.6 eslint: 8.53.0 lodash.merge: 4.6.2 - semver: 7.5.4 + semver: 7.6.2 transitivePeerDependencies: - supports-color - typescript @@ -13590,6 +14893,11 @@ snapshots: '@typescript-eslint/types': 6.7.3 '@typescript-eslint/visitor-keys': 6.7.3 + '@typescript-eslint/scope-manager@7.16.1': + dependencies: + '@typescript-eslint/types': 7.16.1 + '@typescript-eslint/visitor-keys': 7.16.1 + '@typescript-eslint/type-utils@6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': dependencies: '@typescript-eslint/typescript-estree': 6.7.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) @@ -13602,12 +14910,26 @@ snapshots: transitivePeerDependencies: - supports-color + '@typescript-eslint/type-utils@7.16.1(eslint@8.57.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': + dependencies: + '@typescript-eslint/typescript-estree': 7.16.1(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + '@typescript-eslint/utils': 7.16.1(eslint@8.57.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + debug: 4.3.4 + eslint: 8.57.0 + ts-api-utils: 1.3.0(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + optionalDependencies: + typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + transitivePeerDependencies: + - supports-color + '@typescript-eslint/types@5.62.0': {} '@typescript-eslint/types@6.10.0': {} '@typescript-eslint/types@6.7.3': {} + '@typescript-eslint/types@7.16.1': {} + '@typescript-eslint/typescript-estree@5.62.0(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': dependencies: '@typescript-eslint/types': 5.62.0 @@ -13615,7 +14937,7 @@ snapshots: debug: 4.3.4 globby: 11.1.0 is-glob: 4.0.3 - semver: 7.5.4 + semver: 7.6.2 tsutils: 3.21.0(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) optionalDependencies: typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) @@ -13629,7 +14951,7 @@ snapshots: debug: 4.3.4 globby: 11.1.0 is-glob: 4.0.3 - semver: 7.5.4 + semver: 7.6.2 ts-api-utils: 1.0.3(typescript@5.2.2) optionalDependencies: typescript: 5.2.2 @@ -13643,24 +14965,39 @@ snapshots: debug: 4.3.4 globby: 11.1.0 is-glob: 4.0.3 - semver: 7.5.4 + semver: 7.6.2 ts-api-utils: 1.0.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) optionalDependencies: typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) transitivePeerDependencies: - supports-color + '@typescript-eslint/typescript-estree@7.16.1(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': + dependencies: + '@typescript-eslint/types': 7.16.1 + '@typescript-eslint/visitor-keys': 7.16.1 + debug: 4.3.4 + globby: 11.1.0 + is-glob: 4.0.3 + minimatch: 9.0.4 + semver: 7.6.2 + ts-api-utils: 1.3.0(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + optionalDependencies: + typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + transitivePeerDependencies: + - supports-color + '@typescript-eslint/utils@5.62.0(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': dependencies: '@eslint-community/eslint-utils': 4.4.0(eslint@8.50.0) '@types/json-schema': 7.0.13 - '@types/semver': 7.5.3 + '@types/semver': 7.5.8 '@typescript-eslint/scope-manager': 5.62.0 '@typescript-eslint/types': 5.62.0 '@typescript-eslint/typescript-estree': 5.62.0(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) eslint: 8.50.0 eslint-scope: 5.1.1 - semver: 7.5.4 + semver: 7.6.2 transitivePeerDependencies: - supports-color - typescript @@ -13669,12 +15006,12 @@ snapshots: dependencies: '@eslint-community/eslint-utils': 4.4.0(eslint@8.53.0) '@types/json-schema': 7.0.13 - '@types/semver': 7.5.3 + '@types/semver': 7.5.8 '@typescript-eslint/scope-manager': 6.10.0 '@typescript-eslint/types': 6.10.0 '@typescript-eslint/typescript-estree': 6.10.0(typescript@5.2.2) eslint: 8.53.0 - semver: 7.5.4 + semver: 7.6.2 transitivePeerDependencies: - supports-color - typescript @@ -13683,12 +15020,23 @@ snapshots: dependencies: '@eslint-community/eslint-utils': 4.4.0(eslint@8.50.0) '@types/json-schema': 7.0.13 - '@types/semver': 7.5.3 + '@types/semver': 7.5.8 '@typescript-eslint/scope-manager': 6.7.3 '@typescript-eslint/types': 6.7.3 '@typescript-eslint/typescript-estree': 6.7.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) eslint: 8.50.0 - semver: 7.5.4 + semver: 7.6.2 + transitivePeerDependencies: + - supports-color + - typescript + + '@typescript-eslint/utils@7.16.1(eslint@8.57.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': + dependencies: + '@eslint-community/eslint-utils': 4.4.0(eslint@8.57.0) + '@typescript-eslint/scope-manager': 7.16.1 + '@typescript-eslint/types': 7.16.1 + '@typescript-eslint/typescript-estree': 7.16.1(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + eslint: 8.57.0 transitivePeerDependencies: - supports-color - typescript @@ -13708,6 +15056,11 @@ snapshots: '@typescript-eslint/types': 6.7.3 eslint-visitor-keys: 3.4.3 + '@typescript-eslint/visitor-keys@7.16.1': + dependencies: + '@typescript-eslint/types': 7.16.1 + eslint-visitor-keys: 3.4.3 + '@typescript/analyze-trace@0.10.1': dependencies: chalk: 4.1.2 @@ -13771,7 +15124,7 @@ snapshots: pathe: 1.1.2 picocolors: 1.0.1 sirv: 2.0.4 - vitest: 1.6.0(@types/node@20.12.12)(@vitest/ui@1.6.0)(lightningcss@1.25.1)(terser@5.31.0) + vitest: 1.6.0(@types/node@18.19.33)(@vitest/ui@1.6.0)(lightningcss@1.25.1)(terser@5.31.0) '@vitest/utils@1.6.0': dependencies: @@ -13984,6 +15337,10 @@ snapshots: arrify@3.0.0: {} + as-table@1.0.55: + dependencies: + printable-characters: 1.0.42 + asap@2.0.6: {} asn1@0.2.6: @@ -14068,6 +15425,9 @@ snapshots: dependencies: possible-typed-array-names: 1.0.0 + aws-ssl-profiles@1.1.1: + optional: true + axios@1.6.8: dependencies: follow-redirects: 1.15.6 @@ -14140,13 +15500,12 @@ snapshots: dependencies: open: 8.4.2 - better-sqlite3@10.0.0: + better-sqlite3@8.7.0: dependencies: bindings: 1.5.0 prebuild-install: 7.1.2 - optional: true - better-sqlite3@8.7.0: + better-sqlite3@9.6.0: dependencies: bindings: 1.5.0 prebuild-install: 7.1.2 @@ -14165,6 +15524,8 @@ snapshots: inherits: 2.0.4 readable-stream: 3.6.2 + blake3-wasm@2.1.5: {} + blueimp-md5@2.19.0: {} body-parser@1.20.2: @@ -14255,7 +15616,7 @@ snapshots: builtins@5.1.0: dependencies: - semver: 7.6.1 + semver: 7.6.2 bun-types@0.6.14: {} @@ -14266,6 +15627,11 @@ snapshots: esbuild: 0.18.20 load-tsconfig: 0.2.5 + bundle-require@5.0.0(esbuild@0.23.0): + dependencies: + esbuild: 0.23.0 + load-tsconfig: 0.2.5 + busboy@1.6.0: dependencies: streamsearch: 1.1.0 @@ -14350,6 +15716,13 @@ snapshots: caniuse-lite@1.0.30001624: {} + capnp-ts@0.7.0: + dependencies: + debug: 4.3.4 + tslib: 2.6.2 + transitivePeerDependencies: + - supports-color + cardinal@2.1.1: dependencies: ansicolors: 0.3.2 @@ -14402,6 +15775,18 @@ snapshots: optionalDependencies: fsevents: 2.3.3 + chokidar@3.6.0: + dependencies: + anymatch: 3.1.3 + braces: 3.0.3 + glob-parent: 5.1.2 + is-binary-path: 2.1.0 + is-glob: 4.0.3 + normalize-path: 3.0.0 + readdirp: 3.6.0 + optionalDependencies: + fsevents: 2.3.3 + chownr@1.1.4: {} chownr@2.0.0: {} @@ -14517,6 +15902,8 @@ snapshots: colorette@2.0.19: {} + colors@1.4.0: {} + combined-stream@1.0.8: dependencies: delayed-stream: 1.0.0 @@ -14527,6 +15914,8 @@ snapshots: commander@11.0.0: {} + commander@12.1.0: {} + commander@2.20.3: {} commander@4.1.1: {} @@ -14567,7 +15956,7 @@ snapshots: js-string-escape: 1.0.1 lodash: 4.17.21 md5-hex: 3.0.1 - semver: 7.5.1 + semver: 7.6.2 well-known-symbols: 2.0.0 concurrently@8.2.1: @@ -14593,6 +15982,8 @@ snapshots: transitivePeerDependencies: - supports-color + consola@3.2.3: {} + console-control-strings@1.1.0: optional: true @@ -14608,8 +15999,14 @@ snapshots: cookie-signature@1.0.6: {} + cookie@0.5.0: {} + cookie@0.6.0: {} + copy-anything@3.0.5: + dependencies: + is-what: 4.1.16 + core-js-compat@3.37.1: dependencies: browserslist: 4.23.0 @@ -14696,6 +16093,8 @@ snapshots: dag-map@1.0.2: {} + data-uri-to-buffer@2.0.2: {} + data-uri-to-buffer@4.0.1: {} data-view-buffer@1.0.1: @@ -14720,6 +16119,8 @@ snapshots: dependencies: '@babel/runtime': 7.22.10 + date-fns@3.6.0: {} + date-time@3.1.0: dependencies: time-zone: 1.0.0 @@ -14738,6 +16139,10 @@ snapshots: dependencies: ms: 2.1.2 + debug@4.3.5: + dependencies: + ms: 2.1.2 + decamelize@1.2.0: {} decompress-response@6.0.0: @@ -14782,6 +16187,8 @@ snapshots: has-property-descriptors: 1.0.2 object-keys: 1.1.1 + defu@6.1.4: {} + del@6.1.1: dependencies: globby: 11.1.0 @@ -14883,7 +16290,7 @@ snapshots: chalk: 5.3.0 commander: 9.5.0 esbuild: 0.18.20 - esbuild-register: 3.4.2(esbuild@0.18.20) + esbuild-register: 3.5.0(esbuild@0.18.20) glob: 8.1.0 hanji: 0.0.5 json-diff: 0.9.0 @@ -14892,11 +16299,25 @@ snapshots: transitivePeerDependencies: - supports-color - drizzle-orm@0.27.2(@aws-sdk/client-rds-data@3.583.0)(@cloudflare/workers-types@4.20240524.0)(@libsql/client@0.6.0)(@neondatabase/serverless@0.9.3)(@opentelemetry/api@1.8.0)(@planetscale/database@1.18.0)(@types/better-sqlite3@7.6.10)(@types/pg@8.11.6)(@types/sql.js@1.4.9)(@vercel/postgres@0.8.0)(better-sqlite3@10.0.0)(bun-types@1.0.3)(knex@3.1.0(better-sqlite3@10.0.0)(mysql2@3.9.8)(pg@8.11.5)(sqlite3@5.1.7))(kysely@0.27.3)(mysql2@3.9.8)(pg@8.11.5)(postgres@3.4.4)(sql.js@1.10.3)(sqlite3@5.1.7): + drizzle-kit@0.21.2: + dependencies: + '@esbuild-kit/esm-loader': 2.5.5 + commander: 9.5.0 + env-paths: 3.0.0 + esbuild: 0.19.12 + esbuild-register: 3.5.0(esbuild@0.19.12) + glob: 8.1.0 + hanji: 0.0.5 + json-diff: 0.9.0 + zod: 3.23.7 + transitivePeerDependencies: + - supports-color + + drizzle-orm@0.27.2(@aws-sdk/client-rds-data@3.583.0)(@cloudflare/workers-types@4.20240524.0)(@libsql/client@0.5.6)(@neondatabase/serverless@0.9.3)(@opentelemetry/api@1.8.0)(@planetscale/database@1.18.0)(@types/better-sqlite3@7.6.10)(@types/pg@8.11.6)(@types/sql.js@1.4.9)(@vercel/postgres@0.8.0)(better-sqlite3@9.6.0)(bun-types@1.0.3)(knex@2.5.1(better-sqlite3@9.6.0)(mysql2@3.11.0)(pg@8.11.5)(sqlite3@5.1.7))(kysely@0.25.0)(mysql2@3.11.0)(pg@8.11.5)(postgres@3.4.4)(sql.js@1.10.3)(sqlite3@5.1.7): optionalDependencies: '@aws-sdk/client-rds-data': 3.583.0 '@cloudflare/workers-types': 4.20240524.0 - '@libsql/client': 0.6.0 + '@libsql/client': 0.5.6(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) '@neondatabase/serverless': 0.9.3 '@opentelemetry/api': 1.8.0 '@planetscale/database': 1.18.0 @@ -14904,11 +16325,11 @@ snapshots: '@types/pg': 8.11.6 '@types/sql.js': 1.4.9 '@vercel/postgres': 0.8.0 - better-sqlite3: 10.0.0 + better-sqlite3: 9.6.0 bun-types: 1.0.3 - knex: 3.1.0(better-sqlite3@10.0.0)(mysql2@3.9.8)(pg@8.11.5)(sqlite3@5.1.7) - kysely: 0.27.3 - mysql2: 3.9.8 + knex: 2.5.1(better-sqlite3@9.6.0)(mysql2@3.11.0)(pg@8.11.5)(sqlite3@5.1.7) + kysely: 0.25.0 + mysql2: 3.11.0 pg: 8.11.5 postgres: 3.4.4 sql.js: 1.10.3 @@ -14950,6 +16371,8 @@ snapshots: env-paths@2.2.1: optional: true + env-paths@3.0.0: {} + envinfo@7.13.0: {} eol@0.9.1: {} @@ -15162,16 +16585,29 @@ snapshots: esbuild-netbsd-64@0.14.54: optional: true + esbuild-node-externals@1.14.0(esbuild@0.19.12): + dependencies: + esbuild: 0.19.12 + find-up: 5.0.0 + tslib: 2.6.2 + esbuild-openbsd-64@0.14.54: optional: true - esbuild-register@3.4.2(esbuild@0.18.20): + esbuild-register@3.5.0(esbuild@0.18.20): dependencies: debug: 4.3.4 esbuild: 0.18.20 transitivePeerDependencies: - supports-color + esbuild-register@3.5.0(esbuild@0.19.12): + dependencies: + debug: 4.3.4 + esbuild: 0.19.12 + transitivePeerDependencies: + - supports-color + esbuild-sunos-64@0.14.54: optional: true @@ -15258,6 +16694,32 @@ snapshots: '@esbuild/win32-ia32': 0.18.20 '@esbuild/win32-x64': 0.18.20 + esbuild@0.19.12: + optionalDependencies: + '@esbuild/aix-ppc64': 0.19.12 + '@esbuild/android-arm': 0.19.12 + '@esbuild/android-arm64': 0.19.12 + '@esbuild/android-x64': 0.19.12 + '@esbuild/darwin-arm64': 0.19.12 + '@esbuild/darwin-x64': 0.19.12 + '@esbuild/freebsd-arm64': 0.19.12 + '@esbuild/freebsd-x64': 0.19.12 + '@esbuild/linux-arm': 0.19.12 + '@esbuild/linux-arm64': 0.19.12 + '@esbuild/linux-ia32': 0.19.12 + '@esbuild/linux-loong64': 0.19.12 + '@esbuild/linux-mips64el': 0.19.12 + '@esbuild/linux-ppc64': 0.19.12 + '@esbuild/linux-riscv64': 0.19.12 + '@esbuild/linux-s390x': 0.19.12 + '@esbuild/linux-x64': 0.19.12 + '@esbuild/netbsd-x64': 0.19.12 + '@esbuild/openbsd-x64': 0.19.12 + '@esbuild/sunos-x64': 0.19.12 + '@esbuild/win32-arm64': 0.19.12 + '@esbuild/win32-ia32': 0.19.12 + '@esbuild/win32-x64': 0.19.12 + esbuild@0.20.2: optionalDependencies: '@esbuild/aix-ppc64': 0.20.2 @@ -15310,6 +16772,33 @@ snapshots: '@esbuild/win32-ia32': 0.21.5 '@esbuild/win32-x64': 0.21.5 + esbuild@0.23.0: + optionalDependencies: + '@esbuild/aix-ppc64': 0.23.0 + '@esbuild/android-arm': 0.23.0 + '@esbuild/android-arm64': 0.23.0 + '@esbuild/android-x64': 0.23.0 + '@esbuild/darwin-arm64': 0.23.0 + '@esbuild/darwin-x64': 0.23.0 + '@esbuild/freebsd-arm64': 0.23.0 + '@esbuild/freebsd-x64': 0.23.0 + '@esbuild/linux-arm': 0.23.0 + '@esbuild/linux-arm64': 0.23.0 + '@esbuild/linux-ia32': 0.23.0 + '@esbuild/linux-loong64': 0.23.0 + '@esbuild/linux-mips64el': 0.23.0 + '@esbuild/linux-ppc64': 0.23.0 + '@esbuild/linux-riscv64': 0.23.0 + '@esbuild/linux-s390x': 0.23.0 + '@esbuild/linux-x64': 0.23.0 + '@esbuild/netbsd-x64': 0.23.0 + '@esbuild/openbsd-arm64': 0.23.0 + '@esbuild/openbsd-x64': 0.23.0 + '@esbuild/sunos-x64': 0.23.0 + '@esbuild/win32-arm64': 0.23.0 + '@esbuild/win32-ia32': 0.23.0 + '@esbuild/win32-x64': 0.23.0 + escalade@3.1.1: {} escalade@3.1.2: {} @@ -15324,6 +16813,10 @@ snapshots: escape-string-regexp@5.0.0: {} + eslint-config-prettier@9.1.0(eslint@8.57.0): + dependencies: + eslint: 8.57.0 + eslint-import-resolver-node@0.3.9: dependencies: debug: 3.2.7 @@ -15371,6 +16864,15 @@ snapshots: eslint-plugin-no-instanceof@1.0.1: {} + eslint-plugin-prettier@5.2.1(eslint-config-prettier@9.1.0(eslint@8.57.0))(eslint@8.57.0)(prettier@2.8.8): + dependencies: + eslint: 8.57.0 + prettier: 2.8.8 + prettier-linter-helpers: 1.0.0 + synckit: 0.9.1 + optionalDependencies: + eslint-config-prettier: 9.1.0(eslint@8.57.0) + eslint-plugin-unicorn@48.0.1(eslint@8.50.0): dependencies: '@babel/helper-validator-identifier': 7.22.5 @@ -15387,7 +16889,7 @@ snapshots: read-pkg-up: 7.0.1 regexp-tree: 0.1.27 regjsparser: 0.10.0 - semver: 7.5.4 + semver: 7.6.2 strip-indent: 3.0.0 eslint-plugin-unused-imports@3.0.0(@typescript-eslint/eslint-plugin@6.7.3(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint@8.50.0): @@ -15413,15 +16915,58 @@ snapshots: eslint-visitor-keys@4.0.0: {} - eslint@8.50.0: + eslint@8.50.0: + dependencies: + '@eslint-community/eslint-utils': 4.4.0(eslint@8.50.0) + '@eslint-community/regexpp': 4.9.0 + '@eslint/eslintrc': 2.1.2 + '@eslint/js': 8.50.0 + '@humanwhocodes/config-array': 0.11.11 + '@humanwhocodes/module-importer': 1.0.1 + '@nodelib/fs.walk': 1.2.8 + ajv: 6.12.6 + chalk: 4.1.2 + cross-spawn: 7.0.3 + debug: 4.3.4 + doctrine: 3.0.0 + escape-string-regexp: 4.0.0 + eslint-scope: 7.2.2 + eslint-visitor-keys: 3.4.3 + espree: 9.6.1 + esquery: 1.5.0 + esutils: 2.0.3 + fast-deep-equal: 3.1.3 + file-entry-cache: 6.0.1 + find-up: 5.0.0 + glob-parent: 6.0.2 + globals: 13.22.0 + graphemer: 1.4.0 + ignore: 5.2.4 + imurmurhash: 0.1.4 + is-glob: 4.0.3 + is-path-inside: 3.0.3 + js-yaml: 4.1.0 + json-stable-stringify-without-jsonify: 1.0.1 + levn: 0.4.1 + lodash.merge: 4.6.2 + minimatch: 3.1.2 + natural-compare: 1.4.0 + optionator: 0.9.3 + strip-ansi: 6.0.1 + text-table: 0.2.0 + transitivePeerDependencies: + - supports-color + + eslint@8.53.0: dependencies: - '@eslint-community/eslint-utils': 4.4.0(eslint@8.50.0) + '@eslint-community/eslint-utils': 4.4.0(eslint@8.53.0) '@eslint-community/regexpp': 4.9.0 - '@eslint/eslintrc': 2.1.2 - '@eslint/js': 8.50.0 - '@humanwhocodes/config-array': 0.11.11 + '@eslint/eslintrc': 2.1.3 + '@eslint/js': 8.53.0 + '@humanwhocodes/config-array': 0.11.13 '@humanwhocodes/module-importer': 1.0.1 '@nodelib/fs.walk': 1.2.8 + '@ungap/structured-clone': 1.2.0 ajv: 6.12.6 chalk: 4.1.2 cross-spawn: 7.0.3 @@ -15455,13 +17000,13 @@ snapshots: transitivePeerDependencies: - supports-color - eslint@8.53.0: + eslint@8.57.0: dependencies: - '@eslint-community/eslint-utils': 4.4.0(eslint@8.53.0) + '@eslint-community/eslint-utils': 4.4.0(eslint@8.57.0) '@eslint-community/regexpp': 4.9.0 - '@eslint/eslintrc': 2.1.3 - '@eslint/js': 8.53.0 - '@humanwhocodes/config-array': 0.11.13 + '@eslint/eslintrc': 2.1.4 + '@eslint/js': 8.57.0 + '@humanwhocodes/config-array': 0.11.14 '@humanwhocodes/module-importer': 1.0.1 '@nodelib/fs.walk': 1.2.8 '@ungap/structured-clone': 1.2.0 @@ -15482,7 +17027,7 @@ snapshots: glob-parent: 6.0.2 globals: 13.22.0 graphemer: 1.4.0 - ignore: 5.2.4 + ignore: 5.3.1 imurmurhash: 0.1.4 is-glob: 4.0.3 is-path-inside: 3.0.3 @@ -15526,6 +17071,8 @@ snapshots: estraverse@5.3.0: {} + estree-walker@0.6.1: {} + estree-walker@2.0.2: {} estree-walker@3.0.3: @@ -15601,39 +17148,41 @@ snapshots: signal-exit: 4.1.0 strip-final-newline: 3.0.0 + exit-hook@2.2.1: {} + exit@0.1.2: {} expand-template@2.0.3: {} - expo-asset@10.0.6(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)): + expo-asset@10.0.6(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)): dependencies: '@react-native/assets-registry': 0.74.83 - expo: 51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) - expo-constants: 16.0.1(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)) + expo: 51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13) + expo-constants: 16.0.1(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)) invariant: 2.2.4 md5-file: 3.2.3 transitivePeerDependencies: - supports-color - expo-constants@16.0.1(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)): + expo-constants@16.0.1(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)): dependencies: '@expo/config': 9.0.2 - expo: 51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + expo: 51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13) transitivePeerDependencies: - supports-color - expo-file-system@17.0.1(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)): + expo-file-system@17.0.1(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)): dependencies: - expo: 51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + expo: 51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13) - expo-font@12.0.5(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)): + expo-font@12.0.5(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)): dependencies: - expo: 51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + expo: 51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13) fontfaceobserver: 2.3.0 - expo-keep-awake@13.0.2(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)): + expo-keep-awake@13.0.2(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)): dependencies: - expo: 51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + expo: 51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13) expo-modules-autolinking@1.11.1: dependencies: @@ -15647,24 +17196,24 @@ snapshots: dependencies: invariant: 2.2.4 - expo-sqlite@13.4.0(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)): + expo-sqlite@13.4.0(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)): dependencies: '@expo/websql': 1.0.1 - expo: 51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + expo: 51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13) - expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3): + expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13): dependencies: '@babel/runtime': 7.24.6 - '@expo/cli': 0.18.13(bufferutil@4.0.8)(encoding@0.1.13)(expo-modules-autolinking@1.11.1)(utf-8-validate@6.0.3) + '@expo/cli': 0.18.13(bufferutil@4.0.8)(encoding@0.1.13)(expo-modules-autolinking@1.11.1) '@expo/config': 9.0.2 '@expo/config-plugins': 8.0.4 '@expo/metro-config': 0.18.4 '@expo/vector-icons': 14.0.2 babel-preset-expo: 11.0.6(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6)) - expo-asset: 10.0.6(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)) - expo-file-system: 17.0.1(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)) - expo-font: 12.0.5(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)) - expo-keep-awake: 13.0.2(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3)) + expo-asset: 10.0.6(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)) + expo-file-system: 17.0.1(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)) + expo-font: 12.0.5(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)) + expo-keep-awake: 13.0.2(expo@51.0.8(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)) expo-modules-autolinking: 1.11.1 expo-modules-core: 1.12.11 fbemitter: 3.0.0(encoding@0.1.13) @@ -16013,8 +17562,15 @@ snapshots: get-port@3.2.0: {} + get-port@6.1.2: {} + get-port@7.1.0: {} + get-source@2.0.12: + dependencies: + data-uri-to-buffer: 2.0.2 + source-map: 0.6.1 + get-stream@4.1.0: dependencies: pump: 3.0.0 @@ -16052,19 +17608,13 @@ snapshots: dependencies: is-glob: 4.0.3 - glob@10.2.2: - dependencies: - foreground-child: 3.1.1 - jackspeak: 2.1.0 - minimatch: 9.0.1 - minipass: 5.0.0 - path-scurry: 1.7.0 + glob-to-regexp@0.4.1: {} glob@10.3.10: dependencies: foreground-child: 3.1.1 jackspeak: 2.3.6 - minimatch: 9.0.1 + minimatch: 9.0.4 minipass: 5.0.0 path-scurry: 1.10.1 @@ -16133,7 +17683,7 @@ snapshots: array-union: 2.1.0 dir-glob: 3.0.1 fast-glob: 3.3.1 - ignore: 5.2.4 + ignore: 5.3.1 merge2: 1.4.1 slash: 3.0.0 @@ -16226,6 +17776,8 @@ snapshots: hono@4.0.1: {} + hono@4.5.0: {} + hosted-git-info@2.8.9: {} hosted-git-info@3.0.8: @@ -16519,6 +18071,8 @@ snapshots: dependencies: call-bind: 1.0.2 + is-what@4.1.16: {} + is-wsl@1.1.0: {} is-wsl@2.2.0: @@ -16533,12 +18087,6 @@ snapshots: isobject@3.0.1: {} - jackspeak@2.1.0: - dependencies: - cliui: 7.0.4 - optionalDependencies: - '@pkgjs/parseargs': 0.11.0 - jackspeak@2.3.6: dependencies: '@isaacs/cliui': 8.0.2 @@ -16688,6 +18236,12 @@ snapshots: difflib: 0.2.4 dreamopt: 0.8.0 + json-diff@1.0.6: + dependencies: + '@ewoudenberg/difflib': 0.1.0 + colors: 1.4.0 + dreamopt: 0.8.0 + json-parse-better-errors@1.0.2: {} json-parse-even-better-errors@2.3.1: {} @@ -16766,7 +18320,7 @@ snapshots: transitivePeerDependencies: - supports-color - knex@3.1.0(better-sqlite3@10.0.0)(mysql2@3.9.8)(pg@8.11.5)(sqlite3@5.1.7): + knex@2.5.1(better-sqlite3@9.6.0)(mysql2@3.11.0)(pg@8.11.5)(sqlite3@5.1.7): dependencies: colorette: 2.0.19 commander: 10.0.1 @@ -16777,14 +18331,14 @@ snapshots: getopts: 2.3.0 interpret: 2.2.0 lodash: 4.17.21 - pg-connection-string: 2.6.2 + pg-connection-string: 2.6.1 rechoir: 0.8.0 resolve-from: 5.0.0 tarn: 3.0.2 tildify: 2.0.0 optionalDependencies: - better-sqlite3: 10.0.0 - mysql2: 3.9.8 + better-sqlite3: 9.6.0 + mysql2: 3.11.0 pg: 8.11.5 sqlite3: 5.1.7 transitivePeerDependencies: @@ -16793,9 +18347,6 @@ snapshots: kysely@0.25.0: {} - kysely@0.27.3: - optional: true - leven@3.1.0: {} levn@0.4.1: @@ -16803,6 +18354,20 @@ snapshots: prelude-ls: 1.2.1 type-check: 0.4.0 + libsql@0.2.0: + dependencies: + '@neon-rs/load': 0.0.4 + detect-libc: 2.0.2 + optionalDependencies: + '@libsql/darwin-arm64': 0.2.0 + '@libsql/darwin-x64': 0.2.0 + '@libsql/linux-arm64-gnu': 0.2.0 + '@libsql/linux-arm64-musl': 0.2.0 + '@libsql/linux-x64-gnu': 0.2.0 + '@libsql/linux-x64-musl': 0.2.0 + '@libsql/win32-x64-msvc': 0.2.0 + optional: true + libsql@0.3.18: dependencies: '@neon-rs/load': 0.0.4 @@ -16904,6 +18469,8 @@ snapshots: lilconfig@2.1.0: {} + lilconfig@3.1.2: {} + lines-and-columns@1.2.4: {} load-json-file@7.0.1: {} @@ -16987,6 +18554,10 @@ snapshots: dependencies: es5-ext: 0.10.62 + magic-string@0.25.9: + dependencies: + sourcemap-codec: 1.4.8 + magic-string@0.30.10: dependencies: '@jridgewell/sourcemap-codec': 1.4.15 @@ -17118,12 +18689,12 @@ snapshots: metro-core: 0.80.9 rimraf: 3.0.2 - metro-config@0.80.9(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3): + metro-config@0.80.9(bufferutil@4.0.8)(encoding@0.1.13): dependencies: connect: 3.7.0 cosmiconfig: 5.2.1 jest-validate: 29.7.0 - metro: 0.80.9(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + metro: 0.80.9(bufferutil@4.0.8)(encoding@0.1.13) metro-cache: 0.80.9 metro-core: 0.80.9 metro-runtime: 0.80.9 @@ -17199,13 +18770,13 @@ snapshots: transitivePeerDependencies: - supports-color - metro-transform-worker@0.80.9(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3): + metro-transform-worker@0.80.9(bufferutil@4.0.8)(encoding@0.1.13): dependencies: '@babel/core': 7.24.6 '@babel/generator': 7.24.6 '@babel/parser': 7.24.6 '@babel/types': 7.24.6 - metro: 0.80.9(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + metro: 0.80.9(bufferutil@4.0.8)(encoding@0.1.13) metro-babel-transformer: 0.80.9 metro-cache: 0.80.9 metro-cache-key: 0.80.9 @@ -17219,7 +18790,7 @@ snapshots: - supports-color - utf-8-validate - metro@0.80.9(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3): + metro@0.80.9(bufferutil@4.0.8)(encoding@0.1.13): dependencies: '@babel/code-frame': 7.24.6 '@babel/core': 7.24.6 @@ -17245,7 +18816,7 @@ snapshots: metro-babel-transformer: 0.80.9 metro-cache: 0.80.9 metro-cache-key: 0.80.9 - metro-config: 0.80.9(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + metro-config: 0.80.9(bufferutil@4.0.8)(encoding@0.1.13) metro-core: 0.80.9 metro-file-map: 0.80.9 metro-resolver: 0.80.9 @@ -17253,7 +18824,7 @@ snapshots: metro-source-map: 0.80.9 metro-symbolicate: 0.80.9 metro-transform-plugins: 0.80.9 - metro-transform-worker: 0.80.9(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + metro-transform-worker: 0.80.9(bufferutil@4.0.8)(encoding@0.1.13) mime-types: 2.1.35 node-fetch: 2.7.0(encoding@0.1.13) nullthrows: 1.1.1 @@ -17262,7 +18833,7 @@ snapshots: source-map: 0.5.7 strip-ansi: 6.0.1 throat: 5.0.0 - ws: 7.5.9(bufferutil@4.0.8)(utf-8-validate@6.0.3) + ws: 7.5.9(bufferutil@4.0.8) yargs: 17.7.2 transitivePeerDependencies: - bufferutil @@ -17290,6 +18861,8 @@ snapshots: mime@2.6.0: {} + mime@3.0.0: {} + mimic-fn@1.2.0: {} mimic-fn@2.1.0: {} @@ -17300,6 +18873,25 @@ snapshots: min-indent@1.0.1: {} + miniflare@3.20240712.0(bufferutil@4.0.8)(utf-8-validate@6.0.3): + dependencies: + '@cspotcode/source-map-support': 0.8.1 + acorn: 8.11.3 + acorn-walk: 8.3.2 + capnp-ts: 0.7.0 + exit-hook: 2.2.1 + glob-to-regexp: 0.4.1 + stoppable: 1.1.0 + undici: 5.28.4 + workerd: 1.20240712.0 + ws: 8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.3) + youch: 3.3.3 + zod: 3.23.7 + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate + minimatch@3.1.2: dependencies: brace-expansion: 1.1.11 @@ -17312,10 +18904,6 @@ snapshots: dependencies: brace-expansion: 2.0.1 - minimatch@9.0.1: - dependencies: - brace-expansion: 2.0.1 - minimatch@9.0.4: dependencies: brace-expansion: 2.0.1 @@ -17391,6 +18979,8 @@ snapshots: ms@2.1.3: {} + mustache@4.2.0: {} + mv@2.1.1: dependencies: mkdirp: 0.5.6 @@ -17398,8 +18988,9 @@ snapshots: rimraf: 2.4.5 optional: true - mysql2@3.3.3: + mysql2@3.11.0: dependencies: + aws-ssl-profiles: 1.1.1 denque: 2.1.0 generate-function: 2.3.1 iconv-lite: 0.6.3 @@ -17408,8 +18999,9 @@ snapshots: named-placeholders: 1.1.3 seq-queue: 0.0.5 sqlstring: 2.3.3 + optional: true - mysql2@3.9.8: + mysql2@3.3.3: dependencies: denque: 2.1.0 generate-function: 2.3.1 @@ -17419,7 +19011,6 @@ snapshots: named-placeholders: 1.1.3 seq-queue: 0.0.5 sqlstring: 2.3.3 - optional: true mz@2.7.0: dependencies: @@ -17459,7 +19050,7 @@ snapshots: node-abi@3.62.0: dependencies: - semver: 7.6.1 + semver: 7.6.2 node-abort-controller@3.1.1: {} @@ -17478,6 +19069,8 @@ snapshots: emojilib: 2.4.0 skin-tone: 2.0.0 + node-fetch-native@1.6.4: {} + node-fetch@2.7.0(encoding@0.1.13): dependencies: whatwg-url: 5.0.0 @@ -17572,7 +19165,7 @@ snapshots: dependencies: execa: 6.1.0 parse-package-name: 1.0.0 - semver: 7.6.1 + semver: 7.6.2 validate-npm-package-name: 4.0.0 nullthrows@1.1.1: {} @@ -17828,13 +19421,10 @@ snapshots: lru-cache: 10.2.2 minipass: 7.1.2 - path-scurry@1.7.0: - dependencies: - lru-cache: 9.1.2 - minipass: 5.0.0 - path-to-regexp@0.1.7: {} + path-to-regexp@6.2.2: {} + path-type@4.0.0: {} pathe@1.1.2: {} @@ -17850,9 +19440,6 @@ snapshots: pg-connection-string@2.6.1: {} - pg-connection-string@2.6.2: - optional: true - pg-connection-string@2.6.4: {} pg-int8@1.0.1: {} @@ -17948,6 +19535,14 @@ snapshots: postcss: 8.4.39 ts-node: 10.9.2(@types/node@20.12.12)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + postcss-load-config@6.0.1(postcss@8.4.39)(tsx@3.14.0)(yaml@2.4.2): + dependencies: + lilconfig: 3.1.2 + optionalDependencies: + postcss: 8.4.39 + tsx: 3.14.0 + yaml: 2.4.2 + postcss@8.4.38: dependencies: nanoid: 3.3.7 @@ -18003,6 +19598,12 @@ snapshots: prelude-ls@1.2.1: {} + prettier-linter-helpers@1.0.0: + dependencies: + fast-diff: 1.3.0 + + prettier@2.8.8: {} + prettier@3.0.3: {} pretty-bytes@5.6.0: {} @@ -18024,6 +19625,8 @@ snapshots: dependencies: parse-ms: 3.0.0 + printable-characters@1.0.42: {} + prisma@5.14.0: dependencies: '@prisma/engines': 5.14.0 @@ -18114,10 +19717,10 @@ snapshots: minimist: 1.2.8 strip-json-comments: 2.0.1 - react-devtools-core@5.2.0(bufferutil@4.0.8)(utf-8-validate@6.0.3): + react-devtools-core@5.2.0(bufferutil@4.0.8): dependencies: shell-quote: 1.8.1 - ws: 7.5.9(bufferutil@4.0.8)(utf-8-validate@6.0.3) + ws: 7.5.9(bufferutil@4.0.8) transitivePeerDependencies: - bufferutil - utf-8-validate @@ -18130,19 +19733,19 @@ snapshots: react-is@18.3.1: {} - react-native@0.74.1(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(@types/react@18.3.1)(bufferutil@4.0.8)(encoding@0.1.13)(react@18.3.1)(utf-8-validate@6.0.3): + react-native@0.74.1(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(@types/react@18.3.1)(bufferutil@4.0.8)(encoding@0.1.13)(react@18.3.1): dependencies: '@jest/create-cache-key-function': 29.7.0 - '@react-native-community/cli': 13.6.6(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + '@react-native-community/cli': 13.6.6(bufferutil@4.0.8)(encoding@0.1.13) '@react-native-community/cli-platform-android': 13.6.6(encoding@0.1.13) '@react-native-community/cli-platform-ios': 13.6.6(encoding@0.1.13) '@react-native/assets-registry': 0.74.83 '@react-native/codegen': 0.74.83(@babel/preset-env@7.24.6(@babel/core@7.24.6)) - '@react-native/community-cli-plugin': 0.74.83(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.3) + '@react-native/community-cli-plugin': 0.74.83(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(bufferutil@4.0.8)(encoding@0.1.13) '@react-native/gradle-plugin': 0.74.83 '@react-native/js-polyfills': 0.74.83 '@react-native/normalize-colors': 0.74.83 - '@react-native/virtualized-lists': 0.74.83(@types/react@18.3.1)(react-native@0.74.1(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(@types/react@18.3.1)(bufferutil@4.0.8)(encoding@0.1.13)(react@18.3.1)(utf-8-validate@6.0.3))(react@18.3.1) + '@react-native/virtualized-lists': 0.74.83(@types/react@18.3.1)(react-native@0.74.1(@babel/core@7.24.6)(@babel/preset-env@7.24.6(@babel/core@7.24.6))(@types/react@18.3.1)(bufferutil@4.0.8)(encoding@0.1.13)(react@18.3.1))(react@18.3.1) abort-controller: 3.0.0 anser: 1.4.10 ansi-regex: 5.0.1 @@ -18161,14 +19764,14 @@ snapshots: pretty-format: 26.6.2 promise: 8.3.0 react: 18.3.1 - react-devtools-core: 5.2.0(bufferutil@4.0.8)(utf-8-validate@6.0.3) + react-devtools-core: 5.2.0(bufferutil@4.0.8) react-refresh: 0.14.2 react-shallow-renderer: 16.15.0(react@18.3.1) regenerator-runtime: 0.13.11 scheduler: 0.24.0-canary-efb381bbf-20230505 stacktrace-parser: 0.1.10 whatwg-fetch: 3.6.20 - ws: 6.2.2(bufferutil@4.0.8)(utf-8-validate@6.0.3) + ws: 6.2.2(bufferutil@4.0.8) yargs: 17.7.2 optionalDependencies: '@types/react': 18.3.1 @@ -18397,7 +20000,21 @@ snapshots: rimraf@5.0.0: dependencies: - glob: 10.2.2 + glob: 10.4.1 + + rollup-plugin-inject@3.0.2: + dependencies: + estree-walker: 0.6.1 + magic-string: 0.25.9 + rollup-pluginutils: 2.8.2 + + rollup-plugin-node-polyfills@0.2.1: + dependencies: + rollup-plugin-inject: 3.0.2 + + rollup-pluginutils@2.8.2: + dependencies: + estree-walker: 0.6.1 rollup@3.20.7: optionalDependencies: @@ -18429,6 +20046,28 @@ snapshots: '@rollup/rollup-win32-x64-msvc': 4.18.0 fsevents: 2.3.3 + rollup@4.18.1: + dependencies: + '@types/estree': 1.0.5 + optionalDependencies: + '@rollup/rollup-android-arm-eabi': 4.18.1 + '@rollup/rollup-android-arm64': 4.18.1 + '@rollup/rollup-darwin-arm64': 4.18.1 + '@rollup/rollup-darwin-x64': 4.18.1 + '@rollup/rollup-linux-arm-gnueabihf': 4.18.1 + '@rollup/rollup-linux-arm-musleabihf': 4.18.1 + '@rollup/rollup-linux-arm64-gnu': 4.18.1 + '@rollup/rollup-linux-arm64-musl': 4.18.1 + '@rollup/rollup-linux-powerpc64le-gnu': 4.18.1 + '@rollup/rollup-linux-riscv64-gnu': 4.18.1 + '@rollup/rollup-linux-s390x-gnu': 4.18.1 + '@rollup/rollup-linux-x64-gnu': 4.18.1 + '@rollup/rollup-linux-x64-musl': 4.18.1 + '@rollup/rollup-win32-arm64-msvc': 4.18.1 + '@rollup/rollup-win32-ia32-msvc': 4.18.1 + '@rollup/rollup-win32-x64-msvc': 4.18.1 + fsevents: 2.3.3 + run-parallel@1.2.0: dependencies: queue-microtask: 1.2.3 @@ -18491,16 +20130,6 @@ snapshots: semver@6.3.1: {} - semver@7.5.1: - dependencies: - lru-cache: 6.0.0 - - semver@7.5.4: - dependencies: - lru-cache: 6.0.0 - - semver@7.6.1: {} - semver@7.6.2: {} send@0.18.0: @@ -18685,6 +20314,8 @@ snapshots: dependencies: whatwg-url: 7.1.0 + sourcemap-codec@1.4.8: {} + spawn-command@0.0.2: {} spdx-correct@3.2.0: @@ -18776,12 +20407,19 @@ snapshots: dependencies: type-fest: 0.7.1 + stacktracey@2.1.8: + dependencies: + as-table: 1.0.55 + get-source: 2.0.12 + statuses@1.5.0: {} statuses@2.0.1: {} std-env@3.7.0: {} + stoppable@1.1.0: {} + stream-buffers@2.2.0: {} stream-combiner@0.0.4: @@ -18893,12 +20531,26 @@ snapshots: pirates: 4.0.6 ts-interface-checker: 0.1.13 + sucrase@3.35.0: + dependencies: + '@jridgewell/gen-mapping': 0.3.5 + commander: 4.1.1 + glob: 10.4.1 + lines-and-columns: 1.2.4 + mz: 2.7.0 + pirates: 4.0.6 + ts-interface-checker: 0.1.13 + sudo-prompt@8.2.5: {} sudo-prompt@9.1.1: {} sudo-prompt@9.2.1: {} + superjson@2.2.1: + dependencies: + copy-anything: 3.0.5 + supertap@3.0.1: dependencies: indent-string: 5.0.0 @@ -18930,6 +20582,11 @@ snapshots: supports-preserve-symlinks-flag@1.0.0: {} + synckit@0.9.1: + dependencies: + '@pkgr/core': 0.1.1 + tslib: 2.6.2 + tar-fs@2.0.1: dependencies: chownr: 1.1.4 @@ -19088,6 +20745,10 @@ snapshots: dependencies: typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + ts-api-utils@1.3.0(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)): + dependencies: + typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + ts-expose-internals-conditionally@1.0.0-empty.0: {} ts-interface-checker@0.1.13: {} @@ -19148,6 +20809,32 @@ snapshots: - supports-color - ts-node + tsup@8.1.2(postcss@8.4.39)(tsx@3.14.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))(yaml@2.4.2): + dependencies: + bundle-require: 5.0.0(esbuild@0.23.0) + cac: 6.7.14 + chokidar: 3.6.0 + consola: 3.2.3 + debug: 4.3.5 + esbuild: 0.23.0 + execa: 5.1.1 + globby: 11.1.0 + joycon: 3.1.1 + postcss-load-config: 6.0.1(postcss@8.4.39)(tsx@3.14.0)(yaml@2.4.2) + resolve-from: 5.0.0 + rollup: 4.18.1 + source-map: 0.8.0-beta.0 + sucrase: 3.35.0 + tree-kill: 1.2.2 + optionalDependencies: + postcss: 8.4.39 + typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + transitivePeerDependencies: + - jiti + - supports-color + - tsx + - yaml + tsutils@3.21.0(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)): dependencies: tslib: 1.14.1 @@ -19332,6 +21019,19 @@ snapshots: dependencies: '@fastify/busboy': 2.1.1 + undici@5.28.4: + dependencies: + '@fastify/busboy': 2.1.1 + + unenv-nightly@1.10.0-1717606461.a117952: + dependencies: + consola: 3.2.3 + defu: 6.1.4 + mime: 3.0.0 + node-fetch-native: 1.6.4 + pathe: 1.1.2 + ufo: 1.5.3 + unicode-canonical-property-names-ecmascript@2.0.0: {} unicode-emoji-modifier-base@1.0.0: {} @@ -19458,6 +21158,23 @@ snapshots: - supports-color - terser + vite-node@1.6.0(@types/node@18.19.33)(lightningcss@1.25.1)(terser@5.31.0): + dependencies: + cac: 6.7.14 + debug: 4.3.4 + pathe: 1.1.2 + picocolors: 1.0.1 + vite: 5.3.3(@types/node@18.19.33)(lightningcss@1.25.1)(terser@5.31.0) + transitivePeerDependencies: + - '@types/node' + - less + - lightningcss + - sass + - stylus + - sugarss + - supports-color + - terser + vite-node@1.6.0(@types/node@20.10.1)(lightningcss@1.25.1)(terser@5.31.0): dependencies: cac: 6.7.14 @@ -19503,6 +21220,17 @@ snapshots: - supports-color - typescript + vite-tsconfig-paths@4.3.2(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))(vite@5.3.3(@types/node@18.19.33)(lightningcss@1.25.1)(terser@5.31.0)): + dependencies: + debug: 4.3.4 + globrex: 0.1.2 + tsconfck: 3.0.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + optionalDependencies: + vite: 5.3.3(@types/node@18.19.33)(lightningcss@1.25.1)(terser@5.31.0) + transitivePeerDependencies: + - supports-color + - typescript + vite-tsconfig-paths@4.3.2(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))(vite@5.3.3(@types/node@20.12.12)(lightningcss@1.25.1)(terser@5.31.0)): dependencies: debug: 4.3.4 @@ -19525,6 +21253,17 @@ snapshots: lightningcss: 1.25.1 terser: 5.31.0 + vite@5.2.12(@types/node@18.19.33)(lightningcss@1.25.1)(terser@5.31.0): + dependencies: + esbuild: 0.20.2 + postcss: 8.4.38 + rollup: 4.18.0 + optionalDependencies: + '@types/node': 18.19.33 + fsevents: 2.3.3 + lightningcss: 1.25.1 + terser: 5.31.0 + vite@5.2.12(@types/node@20.10.1)(lightningcss@1.25.1)(terser@5.31.0): dependencies: esbuild: 0.20.2 @@ -19558,6 +21297,17 @@ snapshots: lightningcss: 1.25.1 terser: 5.31.0 + vite@5.3.3(@types/node@18.19.33)(lightningcss@1.25.1)(terser@5.31.0): + dependencies: + esbuild: 0.21.5 + postcss: 8.4.39 + rollup: 4.18.0 + optionalDependencies: + '@types/node': 18.19.33 + fsevents: 2.3.3 + lightningcss: 1.25.1 + terser: 5.31.0 + vite@5.3.3(@types/node@20.10.1)(lightningcss@1.25.1)(terser@5.31.0): dependencies: esbuild: 0.21.5 @@ -19614,6 +21364,40 @@ snapshots: - supports-color - terser + vitest@1.6.0(@types/node@18.19.33)(@vitest/ui@1.6.0)(lightningcss@1.25.1)(terser@5.31.0): + dependencies: + '@vitest/expect': 1.6.0 + '@vitest/runner': 1.6.0 + '@vitest/snapshot': 1.6.0 + '@vitest/spy': 1.6.0 + '@vitest/utils': 1.6.0 + acorn-walk: 8.3.2 + chai: 4.4.1 + debug: 4.3.4 + execa: 8.0.1 + local-pkg: 0.5.0 + magic-string: 0.30.10 + pathe: 1.1.2 + picocolors: 1.0.0 + std-env: 3.7.0 + strip-literal: 2.1.0 + tinybench: 2.8.0 + tinypool: 0.8.4 + vite: 5.2.12(@types/node@18.19.33)(lightningcss@1.25.1)(terser@5.31.0) + vite-node: 1.6.0(@types/node@18.19.33)(lightningcss@1.25.1)(terser@5.31.0) + why-is-node-running: 2.2.2 + optionalDependencies: + '@types/node': 18.19.33 + '@vitest/ui': 1.6.0(vitest@1.6.0) + transitivePeerDependencies: + - less + - lightningcss + - sass + - stylus + - sugarss + - supports-color + - terser + vitest@1.6.0(@types/node@20.10.1)(@vitest/ui@1.6.0)(lightningcss@1.25.1)(terser@5.31.0): dependencies: '@vitest/expect': 1.6.0 @@ -19775,6 +21559,40 @@ snapshots: wordwrap@1.0.0: {} + workerd@1.20240712.0: + optionalDependencies: + '@cloudflare/workerd-darwin-64': 1.20240712.0 + '@cloudflare/workerd-darwin-arm64': 1.20240712.0 + '@cloudflare/workerd-linux-64': 1.20240712.0 + '@cloudflare/workerd-linux-arm64': 1.20240712.0 + '@cloudflare/workerd-windows-64': 1.20240712.0 + + wrangler@3.65.0(@cloudflare/workers-types@4.20240524.0)(bufferutil@4.0.8)(utf-8-validate@6.0.3): + dependencies: + '@cloudflare/kv-asset-handler': 0.3.4 + '@esbuild-plugins/node-globals-polyfill': 0.2.3(esbuild@0.17.19) + '@esbuild-plugins/node-modules-polyfill': 0.2.2(esbuild@0.17.19) + blake3-wasm: 2.1.5 + chokidar: 3.5.3 + date-fns: 3.6.0 + esbuild: 0.17.19 + miniflare: 3.20240712.0(bufferutil@4.0.8)(utf-8-validate@6.0.3) + nanoid: 3.3.7 + path-to-regexp: 6.2.2 + resolve: 1.22.8 + resolve.exports: 2.0.2 + selfsigned: 2.4.1 + source-map: 0.6.1 + unenv: unenv-nightly@1.10.0-1717606461.a117952 + xxhash-wasm: 1.0.2 + optionalDependencies: + '@cloudflare/workers-types': 4.20240524.0 + fsevents: 2.3.3 + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate + wrap-ansi@6.2.0: dependencies: ansi-styles: 4.3.0 @@ -19806,17 +21624,15 @@ snapshots: imurmurhash: 0.1.4 signal-exit: 4.0.2 - ws@6.2.2(bufferutil@4.0.8)(utf-8-validate@6.0.3): + ws@6.2.2(bufferutil@4.0.8): dependencies: async-limiter: 1.0.1 optionalDependencies: bufferutil: 4.0.8 - utf-8-validate: 6.0.3 - ws@7.5.9(bufferutil@4.0.8)(utf-8-validate@6.0.3): + ws@7.5.9(bufferutil@4.0.8): optionalDependencies: bufferutil: 4.0.8 - utf-8-validate: 6.0.3 ws@8.14.2(bufferutil@4.0.8)(utf-8-validate@6.0.3): optionalDependencies: @@ -19828,6 +21644,11 @@ snapshots: bufferutil: 4.0.8 utf-8-validate: 6.0.3 + ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.3): + optionalDependencies: + bufferutil: 4.0.8 + utf-8-validate: 6.0.3 + xcode@3.0.1: dependencies: simple-plist: 1.3.1 @@ -19846,6 +21667,8 @@ snapshots: xtend@4.0.2: {} + xxhash-wasm@1.0.2: {} + y18n@4.0.3: {} y18n@5.0.8: {} @@ -19907,6 +21730,12 @@ snapshots: yocto-queue@1.0.0: {} + youch@3.3.3: + dependencies: + cookie: 0.5.0 + mustache: 4.2.0 + stacktracey: 2.1.8 + zod@3.21.4: {} zod@3.23.7: {} @@ -19927,4 +21756,4 @@ snapshots: ps-tree: 1.2.0 webpod: 0.0.2 which: 3.0.1 - yaml: 2.4.2 + yaml: 2.4.2 \ No newline at end of file diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index 1c10dc1ec..7cf196659 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -1,5 +1,6 @@ packages: - drizzle-orm + - drizzle-kit - drizzle-zod - drizzle-typebox - drizzle-valibot