diff --git a/eslint.config.mjs b/eslint.config.mjs index 4fe2260ec..7fa89d1e9 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -1,4 +1,5 @@ import typescriptEslint from "@typescript-eslint/eslint-plugin" +import pluginReact from "eslint-plugin-react" import globals from "globals" import tsParser from "@typescript-eslint/parser" import path from "node:path" @@ -76,7 +77,16 @@ export default [ "testSetup.ts", ], }, + { + settings: { + react: { + version: "^18.2.0", + }, + }, + }, js.configs.recommended, + pluginReact.configs.flat.recommended, + pluginReact.configs.flat["jsx-runtime"], ...compat.extends( "plugin:@typescript-eslint/eslint-recommended", "plugin:@typescript-eslint/recommended" diff --git a/examples/react-counter/package.json b/examples/react-counter/package.json index ab49f1fc1..c8107de92 100644 --- a/examples/react-counter/package.json +++ b/examples/react-counter/package.json @@ -2,7 +2,7 @@ "name": "@automerge/automerge-repo-demo-counter", "repository": "https://github.com/automerge/automerge-repo/tree/master/examples/react-counter", "private": true, - "version": "2.0.0-alpha.11", + "version": "2.0.0-alpha.22", "type": "module", "scripts": { "dev": "vite --open", @@ -15,7 +15,7 @@ "@automerge/automerge-repo-network-websocket": "workspace:*", "@automerge/automerge-repo-react-hooks": "workspace:*", "@automerge/automerge-repo-storage-indexeddb": "workspace:*", - "react": "^18.2.0", - "react-dom": "^18.2.0" + "react": "^18.3.0", + "react-dom": "^18.3.0" } } diff --git a/examples/react-counter/src/main.tsx b/examples/react-counter/src/main.tsx index 0c46b1fbc..c2291b978 100644 --- a/examples/react-counter/src/main.tsx +++ b/examples/react-counter/src/main.tsx @@ -38,7 +38,7 @@ declare global { const rootDocUrl = `${document.location.hash.substring(1)}` let handle if (isValidAutomergeUrl(rootDocUrl)) { - handle = repo.find(rootDocUrl) + handle = await repo.find(rootDocUrl) } else { handle = repo.create<{ count: number }>({ count: 0 }) } diff --git a/examples/react-todo/package.json b/examples/react-todo/package.json index a71c798a3..c5af40207 100644 --- a/examples/react-todo/package.json +++ b/examples/react-todo/package.json @@ -2,7 +2,7 @@ "name": "@automerge/automerge-repo-demo-todo", "repository": "https://github.com/automerge/automerge-repo/tree/master/examples/react-todo", "private": true, - "version": "2.0.0-alpha.11", + "version": "2.0.0-alpha.22", "type": "module", "scripts": { "dev": "vite --open", @@ -15,12 +15,12 @@ "@automerge/automerge-repo-network-websocket": "workspace:*", "@automerge/automerge-repo-react-hooks": "workspace:*", "@automerge/automerge-repo-storage-indexeddb": "workspace:*", - "@ibm/plex": "^6.1.1", "autoprefixer": "^10.4.13", "classnames": "^2.3.2", "postcss": "^8.4.21", - "react": "^18.2.0", - "react-dom": "^18.2.0" + "react": "^18.3.0", + "react-dom": "^18.3.0", + "react-error-boundary": "^5.0.0" }, "devDependencies": { "tailwindcss": "^3.2.4" diff --git a/examples/react-todo/src/App.tsx b/examples/react-todo/src/App.tsx index 3e197de08..367eb18fb 100644 --- a/examples/react-todo/src/App.tsx +++ b/examples/react-todo/src/App.tsx @@ -1,7 +1,7 @@ import { AutomergeUrl } from "@automerge/automerge-repo" import { useDocument, useRepo } from "@automerge/automerge-repo-react-hooks" import cx from "classnames" -import { useRef, useState } from "react" +import { Suspense, useRef, useState } from "react" import { Todo } from "./Todo.js" import { ExtendedArray, Filter, State, TodoData } from "./types.js" @@ -28,7 +28,7 @@ export function App({ url }: { url: AutomergeUrl }) { if (!state) return [] return state.todos.filter(async url => { if (filter === Filter.all) return true - const todo = await repo.find(url).doc() + const todo = (await repo.find(url)).doc() if (filter === Filter.completed) return todo.completed if (filter === Filter.incomplete) return !todo.completed return false @@ -38,7 +38,7 @@ export function App({ url }: { url: AutomergeUrl }) { const destroyCompleted = async () => { if (!state) return for (const url of await getFilteredTodos(Filter.completed)) { - const todo = await repo.find(url).doc() + const todo = (await repo.find(url)).doc() if (todo.completed) destroy(url) } } @@ -89,16 +89,18 @@ export function App({ url }: { url: AutomergeUrl }) { {/* todos */}
-
    - {state.todos.map(url => ( - destroy(url)} - filter={filter} - /> - ))} -
+ Loading todo items...}> +
    + {state.todos.map(url => ( + destroy(url)} + filter={filter} + /> + ))} +
+
{/* footer tools */} diff --git a/examples/react-todo/src/main.tsx b/examples/react-todo/src/main.tsx index 0dc510fde..38e03fddb 100644 --- a/examples/react-todo/src/main.tsx +++ b/examples/react-todo/src/main.tsx @@ -3,7 +3,8 @@ import { BroadcastChannelNetworkAdapter } from "@automerge/automerge-repo-networ import { WebSocketClientAdapter } from "@automerge/automerge-repo-network-websocket" import { RepoContext } from "@automerge/automerge-repo-react-hooks" import { IndexedDBStorageAdapter } from "@automerge/automerge-repo-storage-indexeddb" -import React from "react" +import React, { Suspense } from "react" +import { ErrorBoundary } from "react-error-boundary" import ReactDOM from "react-dom/client" import { App } from "./App.js" import { State } from "./types.js" @@ -27,7 +28,7 @@ declare global { const rootDocUrl = `${document.location.hash.substring(1)}` let handle if (isValidAutomergeUrl(rootDocUrl)) { - handle = repo.find(rootDocUrl) + handle = await repo.find(rootDocUrl) } else { handle = repo.create({ todos: [] }) } @@ -38,7 +39,11 @@ window.repo = repo ReactDOM.createRoot(document.getElementById("root") as HTMLElement).render( - + Something went wrong}> + Loading...}> + + + ) diff --git a/examples/react-todo/tailwind.config.cjs b/examples/react-todo/tailwind.config.cjs index bfc8638db..3779ae418 100644 --- a/examples/react-todo/tailwind.config.cjs +++ b/examples/react-todo/tailwind.config.cjs @@ -1,22 +1,10 @@ /** @type {import('tailwindcss').Config} */ const colors = require("tailwindcss/colors") -const emoji = "Segoe UI Emoji" -const mono = "IBM Plex Mono" -const sans = "IBM Plex Sans" -const condensed = "IBM Plex Sans Condensed" -const serif = "IBM Plex Serif" - module.exports = { content: ["./**/*.{html,tsx}"], theme: { extend: { - fontFamily: { - mono: [mono, emoji, "monospace"], - sans: [sans, emoji, "sans-serif"], - condensed: [condensed, emoji, "sans-serif"], - serif: [serif, emoji, "serif"], - }, zIndex: {}, colors: { primary: colors.blue, diff --git a/examples/react-use-awareness/package.json b/examples/react-use-awareness/package.json index 9232fec90..a7caf1ad1 100644 --- a/examples/react-use-awareness/package.json +++ b/examples/react-use-awareness/package.json @@ -1,6 +1,6 @@ { "name": "automerge-use-awareness-example-project", - "version": "2.0.0-alpha.11", + "version": "2.0.0-alpha.22", "private": true, "repository": "https://github.com/automerge/automerge-repo/tree/master/examples/react-use-awareness", "type": "module", @@ -14,8 +14,8 @@ "@automerge/automerge-repo-react-hooks": "workspace:*", "@automerge/automerge-repo-storage-indexeddb": "workspace:*", "eventemitter3": "^5.0.1", - "react": "^18.2.0", - "react-dom": "^18.2.0", + "react": "^18.3.0", + "react-dom": "^18.3.0", "react-usestateref": "^1.0.8", "uuid": "^9.0.0" }, diff --git a/examples/svelte-counter/package.json b/examples/svelte-counter/package.json index 88bfc0c61..81e4d4812 100644 --- a/examples/svelte-counter/package.json +++ b/examples/svelte-counter/package.json @@ -2,7 +2,7 @@ "name": "@automerge/automerge-repo-demo-counter-svelte", "repository": "https://github.com/automerge/automerge-repo/tree/master/examples/svelte-counter", "private": true, - "version": "2.0.0-alpha.11", + "version": "2.0.0-alpha.22", "type": "module", "scripts": { "dev": "vite", diff --git a/examples/sync-server/index.js b/examples/sync-server/index.js index 235607c67..941eabffc 100644 --- a/examples/sync-server/index.js +++ b/examples/sync-server/index.js @@ -5,8 +5,35 @@ import { WebSocketServer } from "ws" import { Repo } from "@automerge/automerge-repo" import { WebSocketServerAdapter } from "@automerge/automerge-repo-network-websocket" import { NodeFSStorageAdapter } from "@automerge/automerge-repo-storage-nodefs" +import { default as Prometheus } from "prom-client" import os from "os" +const registry = new Prometheus.Registry() +Prometheus.collectDefaultMetrics({ register: registry }) + +const buckets = Prometheus.linearBuckets(0, 1000, 60) + +const metrics = { + docLoaded: new Prometheus.Histogram({ + name: "automerge_repo_doc_loaded_duration_millis", + help: "Duration of loading a document", + buckets, + registers: [registry], + }), + receiveSyncMessage: new Prometheus.Histogram({ + name: "automerge_repo_receive_sync_message_duration_millis", + help: "Duration of receiving a sync message", + buckets, + registers: [registry], + }), + numOps: new Prometheus.Histogram({ + name: "automerge_repo_num_ops", + help: "Number of operations in a document", + buckets: Prometheus.exponentialBuckets(1, 2, 20), + registers: [registry], + }), +} + export class Server { /** @type WebSocketServer */ #socket @@ -45,12 +72,25 @@ export class Server { } const serverRepo = new Repo(config) + // Observe metrics for prometheus and also log the events so log aggregators like loki can pick them up + serverRepo.on("doc-metrics", (event) => { + console.log(JSON.stringify(event)) + metrics.numOps.observe(event.numOps) + if (event.type === "doc-loaded") { + metrics.docLoaded.observe(event.durationMillis) + } else if (event.type === "receive-sync-message") { + metrics.receiveSyncMessage.observe(event.durationMillis) + } + }) + app.get("/", (req, res) => { res.send(`👍 @automerge/example-sync-server is running`) }) - app.get("/metrics", (req, res) => { - res.json(serverRepo.metrics()) + // In a real server this endpoint would be authenticated or not event part of the same express app + app.get("/prometheus_metrics", async (req, res) => { + res.set("Content-Type", registry.contentType) + res.end(await registry.metrics()) }) this.#server = app.listen(PORT, () => { diff --git a/examples/sync-server/package.json b/examples/sync-server/package.json index e488c3ab2..aa316e873 100644 --- a/examples/sync-server/package.json +++ b/examples/sync-server/package.json @@ -2,7 +2,7 @@ "name": "@automerge/example-sync-server", "repository": "https://github.com/automerge/automerge-repo/tree/master/examples/sync-server", "private": true, - "version": "2.0.0-alpha.11", + "version": "2.0.0-alpha.22", "main": "index.js", "license": "MIT", "type": "module", @@ -15,6 +15,7 @@ "@automerge/automerge-repo-network-websocket": "workspace:*", "@automerge/automerge-repo-storage-nodefs": "workspace:*", "express": "^4.18.1", + "prom-client": "^15.1.3", "ws": "^8.7.0" }, "devDependencies": { diff --git a/lerna.json b/lerna.json index fa38e3112..7803ac5ce 100644 --- a/lerna.json +++ b/lerna.json @@ -1,5 +1,5 @@ { "$schema": "node_modules/lerna/schemas/lerna-schema.json", "npmClient": "pnpm", - "version": "2.0.0-alpha.11" + "version": "2.0.0-alpha.22" } diff --git a/package.json b/package.json index cfc646fc8..88d0d95ac 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@automerge/automerge-repo-monorepo", - "version": "2.0.0-alpha.11", + "version": "2.0.0-alpha.22", "description": "Automerge Repo monorepo", "main": "packages/automerge-repo/dist/index.js", "repository": "https://github.com/automerge/automerge-repo", @@ -26,7 +26,7 @@ "watch": "pnpm run --parallel --stream -r watch " }, "engines": { - "node": ">= 18.x" + "node": ">=18.2.0 <22.0.0" }, "packageManager": "pnpm@9.7.0", "prettier": { @@ -46,9 +46,9 @@ "@types/ws": "^8.5.10", "@typescript-eslint/eslint-plugin": "^7.13.1", "@typescript-eslint/parser": "^7.13.1", - "@vitejs/plugin-react": "^4.3.1", - "@vitest/coverage-v8": "^1.4.0", - "@vitest/ui": "^1.4.0", + "@vitejs/plugin-react": "^4.3.4", + "@vitest/coverage-v8": "^3.0.3", + "@vitest/ui": "^3.0.3", "c8": "^7.14.0", "cross-env": "^7.0.3", "dev-null-cli": "^2.0.0", @@ -68,8 +68,8 @@ "ts-node": "^10.9.2", "typedoc": "^0.25.12", "typescript": "^5.4.2", - "vite": "^5.2.0", - "vite-plugin-wasm": "^3.3.0", - "vitest": "^1.4.0" + "vite": "^6.0.11", + "vite-plugin-wasm": "^3.4.1", + "vitest": "^3.0.4" } } diff --git a/packages/automerge-repo-network-broadcastchannel/package.json b/packages/automerge-repo-network-broadcastchannel/package.json index c14d90148..204b4f8ed 100644 --- a/packages/automerge-repo-network-broadcastchannel/package.json +++ b/packages/automerge-repo-network-broadcastchannel/package.json @@ -1,6 +1,6 @@ { "name": "@automerge/automerge-repo-network-broadcastchannel", - "version": "2.0.0-alpha.11", + "version": "2.0.0-alpha.22", "description": "BroadcastChannel network adapter for Automerge Repo", "repository": "https://github.com/automerge/automerge-repo/tree/master/packages/automerge-repo-network-broadcastchannel", "author": "Peter van Hardenberg ", diff --git a/packages/automerge-repo-network-broadcastchannel/src/index.ts b/packages/automerge-repo-network-broadcastchannel/src/index.ts index 718381e27..7cbe86e5a 100644 --- a/packages/automerge-repo-network-broadcastchannel/src/index.ts +++ b/packages/automerge-repo-network-broadcastchannel/src/index.ts @@ -107,7 +107,12 @@ export class BroadcastChannelNetworkAdapter extends NetworkAdapter { if (!("data" in message)) { this.emit("message", message) } else { - const data = message.data as ArrayBufferLike + if (!message.data) { + throw new Error( + "We got a message without data, we can't send this." + ) + } + const data = message.data this.emit("message", { ...message, data: new Uint8Array(data), diff --git a/packages/automerge-repo-network-messagechannel/package.json b/packages/automerge-repo-network-messagechannel/package.json index 347c5fa27..cb45d8223 100644 --- a/packages/automerge-repo-network-messagechannel/package.json +++ b/packages/automerge-repo-network-messagechannel/package.json @@ -1,6 +1,6 @@ { "name": "@automerge/automerge-repo-network-messagechannel", - "version": "2.0.0-alpha.11", + "version": "2.0.0-alpha.22", "description": "MessageChannel network adapter for Automerge Repo", "repository": "https://github.com/automerge/automerge-repo/tree/master/packages/automerge-repo-network-messagechannel", "author": "Peter van Hardenberg ", diff --git a/packages/automerge-repo-network-websocket/package.json b/packages/automerge-repo-network-websocket/package.json index 6e7e8eee5..c8a37eef2 100644 --- a/packages/automerge-repo-network-websocket/package.json +++ b/packages/automerge-repo-network-websocket/package.json @@ -1,6 +1,6 @@ { "name": "@automerge/automerge-repo-network-websocket", - "version": "2.0.0-alpha.11", + "version": "2.0.0-alpha.22", "description": "isomorphic node/browser Websocket network adapter for Automerge Repo", "repository": "https://github.com/automerge/automerge-repo/tree/master/packages/automerge-repo-network-websocket", "author": "Peter van Hardenberg ", diff --git a/packages/automerge-repo-network-websocket/src/WebSocketServerAdapter.ts b/packages/automerge-repo-network-websocket/src/WebSocketServerAdapter.ts index fb15e7437..1647f9f00 100644 --- a/packages/automerge-repo-network-websocket/src/WebSocketServerAdapter.ts +++ b/packages/automerge-repo-network-websocket/src/WebSocketServerAdapter.ts @@ -123,7 +123,14 @@ export class WebSocketServerAdapter extends NetworkAdapter { } receiveMessage(messageBytes: Uint8Array, socket: WebSocket) { - const message: FromClientMessage = decode(messageBytes) + let message: FromClientMessage + try { + message = decode(messageBytes) + } catch (e) { + log("invalid message received, closing connection") + socket.close() + return + } const { type, senderId } = message diff --git a/packages/automerge-repo-network-websocket/test/Websocket.test.ts b/packages/automerge-repo-network-websocket/test/Websocket.test.ts index 2b11d7673..26d398da7 100644 --- a/packages/automerge-repo-network-websocket/test/Websocket.test.ts +++ b/packages/automerge-repo-network-websocket/test/Websocket.test.ts @@ -1,6 +1,7 @@ import { next as A } from "@automerge/automerge" import { AutomergeUrl, + DocHandle, DocumentId, PeerId, Repo, @@ -21,6 +22,7 @@ import { describe, it } from "vitest" import WebSocket from "ws" import { WebSocketClientAdapter } from "../src/WebSocketClientAdapter.js" import { WebSocketServerAdapter } from "../src/WebSocketServerAdapter.js" +import { encodeHeads } from "../../automerge-repo/dist/AutomergeUrl.js" describe("Websocket adapters", () => { const browserPeerId = "browser" as PeerId @@ -338,6 +340,43 @@ describe("Websocket adapters", () => { await eventPromise(serverAdapter, "peer-disconnected") }) + it("should disconnect from a client that sends an invalid CBOR message", async () => { + // Set up a server and wait for it to be ready + const port = await getPort() + const serverUrl = `ws://localhost:${port}` + const server = http.createServer() + const serverSocket = new WebSocket.Server({ server }) + await new Promise(resolve => server.listen(port, resolve)) + + // Create a repo listening on the socket + const serverAdapter = new WebSocketServerAdapter(serverSocket) + const serverRepo = new Repo({ + network: [serverAdapter], + peerId: serverPeerId, + }) + + // Create a new socket connected to the repo + const browserSocket = new WebSocket(serverUrl) + await new Promise(resolve => browserSocket.on("open", resolve)) + const disconnected = new Promise(resolve => + browserSocket.on("close", resolve) + ) + + // Send an invalid CBOR message, in this case we use a definite length + // array with too many elements. This test should actually work for any + // invalid message but this reproduces a specific issue we were seeing on + // the sycn server + // + // 0x9 (1001) is major type 4, for an array + // 0xB (1011) indicates that the length will be encoded in the next 8 bytes + // 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00 is 2**32, which is longer than allowed + const invalidLargeArray = new Uint8Array([ + 0x9b, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, + ]) + browserSocket.send(invalidLargeArray) + await disconnected + }) + it("should send the negotiated protocol version in its hello message", async () => { const response = await serverResponse({ type: "join", @@ -481,8 +520,7 @@ describe("Websocket adapters", () => { }) // make a change to the handle on the sync server - const handle = repo.find<{ foo: string }>(url) - await handle.whenReady() + const handle = await repo.find<{ foo: string }>(url) handle.change(d => (d.foo = "baz")) // Okay, so now there is a document on both the client and the server @@ -582,9 +620,10 @@ describe("Websocket adapters", () => { await pause(50) } + // we encode localHeads for consistency with URL formatted heads let localHeads = A.getHeads(clientDoc) let remoteHeads = handle.heads() - if (!headsAreSame(localHeads, remoteHeads)) { + if (!headsAreSame(encodeHeads(localHeads), remoteHeads)) { throw new Error("heads not equal") } }) diff --git a/packages/automerge-repo-react-hooks/README.md b/packages/automerge-repo-react-hooks/README.md index c3688c261..72a76a3fe 100644 --- a/packages/automerge-repo-react-hooks/README.md +++ b/packages/automerge-repo-react-hooks/README.md @@ -19,6 +19,6 @@ Most hooks depend on RepoContext being available. Return a document & updater fn, by ID. -#### [useHandle](./src/useHandle.ts) +#### [useDocHandle](./src/useDocHandle.ts) Return a handle, by ID. diff --git a/packages/automerge-repo-react-hooks/package.json b/packages/automerge-repo-react-hooks/package.json index aa6fc960a..8a9e62118 100644 --- a/packages/automerge-repo-react-hooks/package.json +++ b/packages/automerge-repo-react-hooks/package.json @@ -1,6 +1,6 @@ { "name": "@automerge/automerge-repo-react-hooks", - "version": "2.0.0-alpha.11", + "version": "2.0.0-alpha.22", "description": "Hooks to access an Automerge Repo from your react app.", "repository": "https://github.com/automerge/automerge-repo/tree/master/packages/automerge-repo-react-hooks", "author": "Peter van Hardenberg ", @@ -17,19 +17,23 @@ "@automerge/automerge": "^2.2.8", "@automerge/automerge-repo": "workspace:*", "eventemitter3": "^5.0.1", - "react": "^18.2.0", - "react-dom": "^18.2.0", + "react": "^18.3.0", + "react-dom": "^18.3.0", "react-usestateref": "^1.0.8" }, "devDependencies": { + "@testing-library/jest-dom": "^6.6.3", "@testing-library/react": "^14.0.0", + "eslint-plugin-react-hooks": "^5.1.0", "jsdom": "^22.1.0", + "react-error-boundary": "^5.0.0", "rollup-plugin-visualizer": "^5.9.3", + "vite": "^6.0.7", "vite-plugin-dts": "^3.9.1" }, "peerDependencies": { - "react": ">16.8.0", - "react-dom": ">16.8.0" + "react": ">18.3.0", + "react-dom": ">18.3.0" }, "watch": { "build": { diff --git a/packages/automerge-repo-react-hooks/src/index.ts b/packages/automerge-repo-react-hooks/src/index.ts index 0f177bbbb..21ff74a53 100644 --- a/packages/automerge-repo-react-hooks/src/index.ts +++ b/packages/automerge-repo-react-hooks/src/index.ts @@ -13,7 +13,7 @@ * #### {@link useDocument} * Return the current state of a document (or undefined) and a change function. * - * #### {@link useHandle} + * #### {@link useDocHandle} * Return a DocHandle by passing in a DocumentURL. * * #### {@link useLocalAwareness} & {@link useRemoteAwareness} @@ -25,7 +25,8 @@ */ export { useDocument } from "./useDocument.js" export { useDocuments } from "./useDocuments.js" -export { useHandle } from "./useHandle.js" +export { useDocHandle } from "./useDocHandle.js" +export { useDocHandles } from "./useDocHandles.js" export { RepoContext, useRepo } from "./useRepo.js" export { useLocalAwareness, diff --git a/packages/automerge-repo-react-hooks/src/useDocHandle.ts b/packages/automerge-repo-react-hooks/src/useDocHandle.ts new file mode 100644 index 000000000..6f43e160a --- /dev/null +++ b/packages/automerge-repo-react-hooks/src/useDocHandle.ts @@ -0,0 +1,76 @@ +import { AnyDocumentId, DocHandle } from "@automerge/automerge-repo/slim" +import { PromiseWrapper, wrapPromise } from "./wrapPromise.js" +import { useRepo } from "./useRepo.js" +import { useEffect, useRef, useState } from "react" + +// Shared with useDocHandles +export const wrapperCache = new Map< + AnyDocumentId, + PromiseWrapper> +>() +// NB: this is a global cache that isn't keyed on the Repo +// so if your app uses the same documents in two Repos +// this could cause problems. please let me know if you do. + +interface UseDocHandleSuspendingParams { + suspense: true +} +interface UseDocHandleSynchronousParams { + suspense: false +} + +type UseDocHandleParams = + | UseDocHandleSuspendingParams + | UseDocHandleSynchronousParams + +export function useDocHandle( + id: AnyDocumentId, + params: UseDocHandleSuspendingParams +): DocHandle +export function useDocHandle( + id: AnyDocumentId | undefined, + params?: UseDocHandleSynchronousParams +): DocHandle | undefined +export function useDocHandle( + id: AnyDocumentId | undefined, + { suspense }: UseDocHandleParams = { suspense: false } +): DocHandle | undefined { + const repo = useRepo() + const controllerRef = useRef() + const [handle, setHandle] = useState | undefined>() + + let wrapper = id ? wrapperCache.get(id) : undefined + if (!wrapper && id) { + controllerRef.current?.abort() + controllerRef.current = new AbortController() + + const promise = repo.find(id, { signal: controllerRef.current.signal }) + wrapper = wrapPromise(promise) + wrapperCache.set(id, wrapper) + } + + /* From here we split into two paths: suspense and not. + * In the suspense path, we return the wrapper directly. + * In the non-suspense path, we wait for the promise to resolve + * and then set the handle via setState. Suspense relies on + * re-running this function until it succeeds, whereas the synchronous + * form uses a setState to track the value. */ + useEffect(() => { + if (suspense || !wrapper) { + return + } + wrapper.promise + .then(handle => { + setHandle(handle as DocHandle) + }) + .catch(() => { + setHandle(undefined) + }) + }, [suspense, wrapper]) + + if (suspense && wrapper) { + return wrapper.read() as DocHandle + } else { + return handle + } +} diff --git a/packages/automerge-repo-react-hooks/src/useDocHandles.ts b/packages/automerge-repo-react-hooks/src/useDocHandles.ts new file mode 100644 index 000000000..264235177 --- /dev/null +++ b/packages/automerge-repo-react-hooks/src/useDocHandles.ts @@ -0,0 +1,82 @@ +import { AutomergeUrl, DocHandle } from "@automerge/automerge-repo/slim" +import { useState, useEffect } from "react" +import { useRepo } from "./useRepo.js" +import { PromiseWrapper, wrapPromise } from "./wrapPromise.js" +import { wrapperCache } from "./useDocHandle.js" + +interface UseDocHandlesParams { + suspense?: boolean +} + +type DocHandleMap = Map | undefined> + +export function useDocHandles( + ids: AutomergeUrl[], + { suspense = false }: UseDocHandlesParams = {} +): DocHandleMap { + const repo = useRepo() + const [handleMap, setHandleMap] = useState>(() => new Map()) + + const pendingPromises: PromiseWrapper>[] = [] + const nextHandleMap = new Map | undefined>() + + // Check if we need any new wrappers + for (const id of ids) { + let wrapper = wrapperCache.get(id) + if (!wrapper) { + try { + const promise = repo.find(id) + wrapper = wrapPromise(promise) + wrapperCache.set(id, wrapper) + } catch (e) { + continue + } + } + + // Try to read each wrapper. + // Update handleMap with any available handles, + // and collect any pending promises + try { + const handle = wrapper.read() as DocHandle + nextHandleMap.set(id, handle) + } catch (e) { + if (e instanceof Promise) { + pendingPromises.push(wrapper as PromiseWrapper>) + } else { + nextHandleMap.set(id, undefined) + } + } + } + + // Suspense is handled quasi-synchronously below by throwing if we still have + // unresolved promises. + useEffect(() => { + if (pendingPromises.length > 0) { + void Promise.allSettled(pendingPromises.map(p => p.promise)).then( + handles => { + handles.forEach(r => { + if (r.status === "fulfilled") { + const h = r.value as DocHandle + nextHandleMap.set(h.url, h) + } + }) + setHandleMap(nextHandleMap) + } + ) + } else { + setHandleMap(nextHandleMap) + } + }, [suspense, ids]) + + // If any promises are pending, suspend with Promise.all + // Note that this behaviour is different from the synchronous + // form where we get gradual load-in of child documents. + // I couldn't find an obvious way of incremental loading with + // a single hook for suspense. + // (But maybe with suspense this hook is less useful?) + if (suspense && pendingPromises.length > 0) { + throw Promise.all(pendingPromises.map(p => p.promise)) + } + + return handleMap +} diff --git a/packages/automerge-repo-react-hooks/src/useDocument.ts b/packages/automerge-repo-react-hooks/src/useDocument.ts index 49e20b115..309147ee2 100644 --- a/packages/automerge-repo-react-hooks/src/useDocument.ts +++ b/packages/automerge-repo-react-hooks/src/useDocument.ts @@ -1,63 +1,100 @@ -import { AnyDocumentId, DocHandle } from "@automerge/automerge-repo/slim" -import { ChangeFn, ChangeOptions, Doc } from "@automerge/automerge/slim/next" -import { useCallback, useEffect, useRef, useState } from "react" -import { useRepo } from "./useRepo.js" +import { AnyDocumentId } from "@automerge/automerge-repo/slim" +import { ChangeFn, ChangeOptions, Doc } from "@automerge/automerge/slim" +import { useCallback, useEffect, useState } from "react" +import { useDocHandle } from "./useDocHandle.js" -/** A hook which returns a document identified by a URL and a function to change the document. +/** + * A hook which returns a document and a function to change it. + * Uses React Suspense for loading states, returning a tuple matching useState pattern. * - * @returns a tuple of the document and a function to change the document. - * The document will be `undefined` if the document is not available in storage or from any peers + * @example + * ```tsx + * function Counter() { + * const [doc, changeDoc] = useDocument<{ count: number }>(docUrl) + * return ( + * + * ) + * } * - * @remarks - * This requires a {@link RepoContext} to be provided by a parent component. - * */ + * // Must be wrapped in Suspense boundary + * }> + * + * + * ``` + */ + +interface UseDocumentSuspendingParams { + suspense: true +} +interface UseDocumentSynchronousParams { + suspense: false +} + +type UseDocumentParams = + | UseDocumentSuspendingParams + | UseDocumentSynchronousParams + +export type UseDocumentReturn = [ + Doc, + (changeFn: ChangeFn, options?: ChangeOptions) => void +] + export function useDocument( - id?: AnyDocumentId -): [ - Doc | undefined, - (changeFn: ChangeFn, options?: ChangeOptions | undefined) => void -] { - const repo = useRepo() - const handle = id ? repo.find(id) : null - const handleRef = useRef | null>(handle) - if (handle !== handleRef.current) { - handleRef.current = handle - } + id: AnyDocumentId, + params: UseDocumentSuspendingParams +): UseDocumentReturn +export function useDocument( + id: AnyDocumentId | undefined, + params?: UseDocumentSynchronousParams +): UseDocumentReturn | [undefined, () => void] +export function useDocument( + id: AnyDocumentId | undefined, + params: UseDocumentParams = { suspense: false } +): UseDocumentReturn | [undefined, () => void] { + // @ts-expect-error -- typescript doesn't realize we're discriminating these types the same way in both functions + const handle = useDocHandle(id, params) + // Initialize with current doc state + const [doc, setDoc] = useState | undefined>(() => handle?.doc()) + const [deleteError, setDeleteError] = useState() - // a state value we use to trigger a re-render - const [, setGeneration] = useState(0) - const rerender = () => setGeneration(v => v + 1) + // Reinitialize doc when handle changes + useEffect(() => { + setDoc(handle?.doc()) + }, [handle]) useEffect(() => { - if (!id || !handle) { + if (!handle) { return } - - handleRef.current = handle - handle - .doc() - .then(() => { - rerender() - }) - .catch(e => console.error(e)) - - handle.on("change", rerender) - handle.on("delete", rerender) - const cleanup = () => { - handle.removeListener("change", rerender) - handle.removeListener("delete", rerender) + const onChange = () => setDoc(handle.doc()) + const onDelete = () => { + setDeleteError(new Error(`Document ${id} was deleted`)) } - return cleanup - }, [id, handle]) + handle.on("change", onChange) + handle.on("delete", onDelete) + + return () => { + handle.removeListener("change", onChange) + handle.removeListener("delete", onDelete) + } + }, [handle, id]) const changeDoc = useCallback( - (changeFn: ChangeFn, options?: ChangeOptions | undefined) => { - if (!handle) return - handle.change(changeFn, options) + (changeFn: ChangeFn, options?: ChangeOptions) => { + handle!.change(changeFn, options) }, [handle] ) - return [handle?.docSync(), changeDoc] as const + if (deleteError) { + throw deleteError + } + + if (!doc) { + return [undefined, () => {}] + } + return [doc, changeDoc] } diff --git a/packages/automerge-repo-react-hooks/src/useDocuments.ts b/packages/automerge-repo-react-hooks/src/useDocuments.ts index 5ae1f874a..70c47492a 100644 --- a/packages/automerge-repo-react-hooks/src/useDocuments.ts +++ b/packages/automerge-repo-react-hooks/src/useDocuments.ts @@ -1,137 +1,81 @@ -import { - AutomergeUrl, - DocHandle, - DocHandleChangePayload, - DocHandleDeletePayload, - DocumentId, - isValidAutomergeUrl, - parseAutomergeUrl, -} from "@automerge/automerge-repo/slim" -import { useEffect, useMemo, useRef, useState } from "react" -import { useRepo } from "./useRepo.js" +import { AutomergeUrl } from "@automerge/automerge-repo/slim" +import { ChangeFn, ChangeOptions, Doc } from "@automerge/automerge/slim" +import { useCallback, useEffect, useState } from "react" +import { useDocHandles } from "./useDocHandles.js" -/** - * Maintains a map of document states, keyed by DocumentId. Useful for collections of related - * documents. - * Accepts either URLs or document IDs in the input array, but all get converted to IDs - * for the output map. - */ -export const useDocuments = (idsOrUrls?: DocId[]) => { - const repo = useRepo() - const ids = useMemo( - () => - idsOrUrls?.map(idOrUrl => { - if (isValidAutomergeUrl(idOrUrl)) { - const { documentId } = parseAutomergeUrl(idOrUrl) - return documentId - } else { - return idOrUrl as DocumentId - } - }) ?? [], - [idsOrUrls] - ) - const prevIds = useRef([]) - const [documents, setDocuments] = useState(() => { - return ids.reduce((docs, id) => { - const handle = repo.find(id) - const doc = handle.docSync() - if (doc) { - docs[id] = doc - } - return docs - }, {} as Record) - }) +type DocMap = Map> +type ChangeDocFn = ( + id: AutomergeUrl, + changeFn: ChangeFn, + options?: ChangeOptions +) => void + +interface UseDocumentsOptions { + suspense?: boolean +} + +export function useDocuments( + ids: AutomergeUrl[], + { suspense = true }: UseDocumentsOptions = {} +): [DocMap, ChangeDocFn] { + const handleMap = useDocHandles(ids, { suspense }) + const [docMap, setDocMap] = useState>(() => new Map()) useEffect(() => { - // These listeners will live for the lifetime of this useEffect - // and be torn down when the useEffect is rerun. - const listeners = {} as Record> - const updateDocument = (id: DocId, doc?: T) => { - if (doc) setDocuments(docs => ({ ...docs, [id]: doc })) - } - const addListener = (handle: DocHandle) => { - const id = handle.documentId + const listeners = new Map void>() - // whenever a document changes, update our map - const listenersForDoc: Listeners = { - change: ({ doc }) => updateDocument(id, doc), - delete: () => removeDocument(id), - } - handle.on("change", listenersForDoc.change) - handle.on("delete", listenersForDoc.delete) + handleMap.forEach((handle, id) => { + if (handle) { + const onChange = () => { + setDocMap(prev => { + const next = new Map(prev) + next.set(id, handle.doc()) + return next + }) + } - // store the listener so we can remove it later - listeners[id] = listenersForDoc - } + // Initial state + setDocMap(prev => { + const next = new Map(prev) + next.set(id, handle.doc()) + return next + }) - const removeDocument = (id: DocumentId) => { - // remove the document from the document map - setDocuments(docs => { - const { [id]: _removedDoc, ...remainingDocs } = docs - return remainingDocs - }) - } + handle.on("change", onChange) + listeners.set(id, onChange) + } + }) - // Add a new document to our map - const addNewDocument = (id: DocumentId) => { - const handle = repo.find(id) - if (handle.docSync()) { - updateDocument(id, handle.docSync()) - addListener(handle) - } else { - // As each document loads, update our map - handle - .doc() - .then(doc => { - updateDocument(id, doc) - addListener(handle) - }) - .catch(err => { - console.error(`Error loading document ${id} in useDocuments: `, err) - }) + // Clear docs that are no longer in handleMap + setDocMap(prev => { + const next = new Map(prev) + for (const [id] of next) { + if (!handleMap.has(id)) { + next.delete(id) + } } - } + return next + }) - const teardown = () => { - Object.entries(listeners).forEach(([id, listeners]) => { - const handle = repo.find(id as DocId) - handle.off("change", listeners.change) - handle.off("delete", listeners.delete) + return () => { + handleMap.forEach((handle, id) => { + const listener = listeners.get(id) + if (handle && listener) { + handle.removeListener("change", listener) + } }) } + }, [handleMap]) - if (!ids) { - return teardown - } - - for (const id of ids) { - const handle = repo.find(id) - if (prevIds.current.includes(id)) { - // the document was already in our list before. - // we only need to register new listeners. - addListener(handle) - } else { - // This is a new document that was not in our list before. - // We need to update its state in the documents array and register - // new listeners. - addNewDocument(id) + const changeDoc = useCallback( + (id: AutomergeUrl, changeFn: ChangeFn, options?: ChangeOptions) => { + const handle = handleMap.get(id) + if (handle) { + handle.change(changeFn, options) } - } - - // remove any documents that are no longer in the list - const removedIds = prevIds.current.filter(id => !ids.includes(id)) - removedIds.forEach(removeDocument) - - // Update the ref so we remember the old IDs for next time - prevIds.current = ids - - return teardown - }, [ids, repo]) + }, + [handleMap] + ) - return documents + return [docMap, changeDoc] } - -type DocId = DocumentId | AutomergeUrl -type ChangeListener = (p: DocHandleChangePayload) => void -type DeleteListener = (p: DocHandleDeletePayload) => void -type Listeners = { change: ChangeListener; delete: DeleteListener } diff --git a/packages/automerge-repo-react-hooks/src/useHandle.ts b/packages/automerge-repo-react-hooks/src/useHandle.ts deleted file mode 100644 index 27ff2663f..000000000 --- a/packages/automerge-repo-react-hooks/src/useHandle.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { AnyDocumentId, DocHandle } from "@automerge/automerge-repo/slim" -import { useRepo } from "./useRepo.js" - -/** A hook which returns a {@link DocHandle} identified by a URL. - * - * @remarks - * This requires a {@link RepoContext} to be provided by a parent component. - */ -export function useHandle(id?: AnyDocumentId): DocHandle | undefined { - const repo = useRepo() - return id ? repo.find(id) : undefined -} diff --git a/packages/automerge-repo-react-hooks/src/wrapPromise.ts b/packages/automerge-repo-react-hooks/src/wrapPromise.ts new file mode 100644 index 000000000..778ded54e --- /dev/null +++ b/packages/automerge-repo-react-hooks/src/wrapPromise.ts @@ -0,0 +1,37 @@ +type Status = "pending" | "success" | "error" + +export type PromiseWrapper = { + promise: Promise + read(): T +} + +export function wrapPromise(promise: Promise): PromiseWrapper { + let status: Status = "pending" + let result: T + let error: Error + + const suspender = promise.then( + data => { + status = "success" + result = data + }, + err => { + status = "error" + error = err + } + ) + + return { + promise, + read(): T { + switch (status) { + case "pending": + throw suspender + case "error": + throw error + case "success": + return result + } + }, + } +} diff --git a/packages/automerge-repo-react-hooks/test/testSetup.ts b/packages/automerge-repo-react-hooks/test/testSetup.ts new file mode 100644 index 000000000..aaaa03c47 --- /dev/null +++ b/packages/automerge-repo-react-hooks/test/testSetup.ts @@ -0,0 +1,7 @@ +import "@testing-library/jest-dom" +import { cleanup } from "@testing-library/react" +import { afterEach } from "vitest" + +afterEach(() => { + cleanup() +}) diff --git a/packages/automerge-repo-react-hooks/test/useDocHandle.test.tsx b/packages/automerge-repo-react-hooks/test/useDocHandle.test.tsx new file mode 100644 index 000000000..d78213949 --- /dev/null +++ b/packages/automerge-repo-react-hooks/test/useDocHandle.test.tsx @@ -0,0 +1,348 @@ +import React, { Suspense } from "react" +import { + AutomergeUrl, + DocHandle, + generateAutomergeUrl, + PeerId, + Repo, +} from "@automerge/automerge-repo" +import { render, screen, waitFor } from "@testing-library/react" +import "@testing-library/jest-dom" + +import { describe, expect, it, vi } from "vitest" +import { useDocHandle } from "../src/useDocHandle" +import { RepoContext } from "../src/useRepo" +import { ErrorBoundary } from "react-error-boundary" + +interface ExampleDoc { + foo: string +} + +function getRepoWrapper(repo: Repo) { + return ({ children }) => ( + {children} + ) +} + +describe("useDocHandle", () => { + const repo = new Repo({ + peerId: "bob" as PeerId, + }) + + function setup() { + const handleA = repo.create() + handleA.change(doc => (doc.foo = "A")) + + const handleB = repo.create() + handleB.change(doc => (doc.foo = "B")) + + return { + repo, + handleA, + handleB, + wrapper: getRepoWrapper(repo), + } + } + + const Component = ({ + url, + onHandle, + }: { + url: AutomergeUrl + onHandle: (handle: DocHandle) => void + }) => { + const handle = useDocHandle(url, { suspense: true }) + onHandle(handle) + return null + } + + it("loads a handle", async () => { + const { handleA, wrapper } = setup() + const onHandle = vi.fn() + + render( + + + , + { wrapper } + ) + await waitFor(() => expect(onHandle).toHaveBeenLastCalledWith(handleA)) + }) + + it("updates the handle when the url changes", async () => { + const { wrapper, handleA, handleB } = setup() + const onHandle = vi.fn() + + const { rerender } = render( + , + { wrapper } + ) + await waitFor(() => expect(onHandle).toHaveBeenLastCalledWith(handleA)) + + // set url to doc B + rerender() + await waitFor(() => expect(onHandle).toHaveBeenLastCalledWith(handleB)) + }) + + it("does not return undefined after the url is updated", async () => { + const { wrapper, handleA, handleB } = setup() + const onHandle = vi.fn() + + const { rerender } = render( + , + { wrapper } + ) + await waitFor(() => expect(onHandle).toHaveBeenLastCalledWith(handleA)) + + const onHandle2 = vi.fn() + + // set url to doc B + rerender() + await waitFor(() => expect(onHandle2).not.toHaveBeenCalledWith(undefined)) + }) + + it("handles unavailable documents correctly", async () => { + // suppress console.error from the error boundary + const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => {}) + + const { repo, wrapper } = await setup() + const url = generateAutomergeUrl() + + render( + Error}> + Loading...}> + { + throw new Error("Should not reach here") + }} + /> + + , + { wrapper } + ) + + // Then wait for the error boundary to render its fallback + await waitFor(() => { + expect(screen.getByTestId("error")).toBeInTheDocument() + // Optional: verify loading is no longer shown + expect(screen.queryByTestId("loading")).not.toBeInTheDocument() + }) + + consoleSpy.mockRestore() + }) + + it("handles slow network correctly", async () => { + const { handleA, repo, wrapper } = await setup() + const onHandle = vi.fn() + + // Mock find to simulate slow network + const originalFind = repo.find.bind(repo) + repo.find = vi.fn().mockImplementation(async (...args) => { + await new Promise(resolve => setTimeout(resolve, 100)) + return originalFind(...args) + }) + + render( + Error}> + Loading...}> + + + , + { wrapper } + ) + + // Verify loading state is shown initially + expect(screen.getByTestId("loading")).toBeInTheDocument() + expect(onHandle).not.toHaveBeenCalled() + + // Wait for successful resolution + await waitFor(() => { + // Loading state should be gone + expect(screen.queryByTestId("loading")).not.toBeInTheDocument() + }) + + // Verify callback was called with correct handle + expect(onHandle).toHaveBeenCalledWith(handleA) + + // Verify error boundary never rendered + expect(screen.queryByTestId("error")).not.toBeInTheDocument() + }) + + it("suspends while loading a handle", async () => { + const { handleA, wrapper } = await setup() + const onHandle = vi.fn() + let promiseResolve: (value: DocHandle) => void + + // Mock find to return a delayed promise + const originalFind = repo.find.bind(repo) + repo.find = vi.fn().mockImplementation( + () => + new Promise(resolve => { + promiseResolve = resolve + }) + ) + + render( + Loading...}> + + , + { wrapper } + ) + + // Should show loading state + expect(screen.getByTestId("loading")).toBeInTheDocument() + expect(onHandle).not.toHaveBeenCalled() + + // Resolve the find + promiseResolve!(await originalFind(handleA.url)) + + // Should show content + await waitFor(() => { + expect(onHandle).toHaveBeenCalledWith(handleA) + // return repo.find to its natural state + repo.find = originalFind + }) + }) + + it("handles rapid url changes during loading", async () => { + const { handleA, handleB, wrapper } = await setup() + const onHandle = vi.fn() + const delays: Record = { + [handleA.url]: 100, + [handleB.url]: 50, + } + + // Mock find to simulate different network delays + const originalFind = repo.find.bind(repo) + repo.find = vi.fn().mockImplementation(async (url: string) => { + await new Promise(resolve => setTimeout(resolve, delays[url])) + return originalFind(url) + }) + + const { rerender } = render( + Loading...}> + + , + { wrapper } + ) + + // Quickly switch to B before A loads + rerender( + Loading...}> + + + ) + + // Should eventually resolve with B, not A + await waitFor(() => { + expect(onHandle).toHaveBeenLastCalledWith(handleB) + expect(onHandle).not.toHaveBeenCalledWith(handleA) + }) + }) + + describe("useDocHandle with suspense: false", () => { + it("returns undefined while loading then resolves to handle", async () => { + const { handleA, repo, wrapper } = await setup() + const onHandle = vi.fn() + + // Mock find to simulate network delay + const originalFind = repo.find.bind(repo) + repo.find = vi.fn().mockImplementation(async (...args) => { + await new Promise(resolve => setTimeout(resolve, 100)) + return originalFind(...args) + }) + + const NonSuspenseComponent = ({ + url, + onHandle, + }: { + url: AutomergeUrl + onHandle: (handle: DocHandle | undefined) => void + }) => { + const handle = useDocHandle(url, { suspense: false }) + onHandle(handle) + return null + } + + render(, { + wrapper, + }) + + // Initially should be called with undefined + expect(onHandle).toHaveBeenCalledWith(undefined) + + // Wait for handle to load + await waitFor(() => { + expect(onHandle).toHaveBeenLastCalledWith(handleA) + }) + + // Restore original find implementation + repo.find = originalFind + }) + + it("handles unavailable documents by returning undefined", async () => { + const { repo, wrapper } = await setup() + const url = generateAutomergeUrl() + const onHandle = vi.fn() + + const NonSuspenseComponent = ({ + url, + onHandle, + }: { + url: AutomergeUrl + onHandle: (handle: DocHandle | undefined) => void + }) => { + const handle = useDocHandle(url, { suspense: false }) + onHandle(handle) + return null + } + + render(, { + wrapper, + }) + + // Should start with undefined + expect(onHandle).toHaveBeenCalledWith(undefined) + + // Should continue to return undefined after attempted load + await waitFor(() => { + expect(onHandle).toHaveBeenLastCalledWith(undefined) + }) + }) + + it("updates the handle when url changes", async () => { + const { wrapper, handleA, handleB } = setup() + const onHandle = vi.fn() + + const NonSuspenseComponent = ({ + url, + onHandle, + }: { + url: AutomergeUrl + onHandle: (handle: DocHandle | undefined) => void + }) => { + const handle = useDocHandle(url, { suspense: false }) + onHandle(handle) + return null + } + + const { rerender } = render( + , + { wrapper } + ) + + // Wait for first handle to load + await waitFor(() => expect(onHandle).toHaveBeenLastCalledWith(handleA)) + + // Change URL + rerender() + + // Should temporarily return to undefined + expect(onHandle).toHaveBeenCalledWith(undefined) + + // Then resolve to new handle + await waitFor(() => expect(onHandle).toHaveBeenLastCalledWith(handleB)) + }) + }) +}) diff --git a/packages/automerge-repo-react-hooks/test/useDocument.test.tsx b/packages/automerge-repo-react-hooks/test/useDocument.test.tsx index 252b9bc4a..310a4209b 100644 --- a/packages/automerge-repo-react-hooks/test/useDocument.test.tsx +++ b/packages/automerge-repo-react-hooks/test/useDocument.test.tsx @@ -1,12 +1,22 @@ -import { AutomergeUrl, PeerId, Repo } from "@automerge/automerge-repo" -import { render, waitFor } from "@testing-library/react" -import React from "react" -import { act } from "react-dom/test-utils" +import { + AutomergeUrl, + Doc, + generateAutomergeUrl, + PeerId, + Repo, +} from "@automerge/automerge-repo" +import { render, screen, waitFor } from "@testing-library/react" +import React, { Suspense } from "react" import { describe, expect, it, vi } from "vitest" +import "@testing-library/jest-dom" + import { useDocument } from "../src/useDocument" import { RepoContext } from "../src/useRepo" +import { ErrorBoundary } from "react-error-boundary" -const SLOW_DOC_LOAD_TIME_MS = 10 +interface ExampleDoc { + foo: string +} describe("useDocument", () => { function setup() { @@ -20,30 +30,8 @@ describe("useDocument", () => { const handleB = repo.create() handleB.change(doc => (doc.foo = "B")) - // A doc that takes 10ms to load, to simulate a slow load. - // The time value isn't totally arbitrary; 1ms can cause flaky tests - // presumably because of interations with React's scheduler / batched - // renders, but 10ms seems safe empirically. - const handleSlow = repo.create() - handleSlow.change(doc => (doc.foo = "slow")) - const oldDoc = handleSlow.doc.bind(handleSlow) - let loaded = false - const delay = new Promise(resolve => - setTimeout(() => { - loaded = true - resolve(true) - }, SLOW_DOC_LOAD_TIME_MS) - ) - handleSlow.doc = async () => { - await delay - const result = await oldDoc() - return result - } - - const oldDocSync = handleSlow.docSync.bind(handleSlow) - handleSlow.docSync = () => { - return loaded ? oldDocSync() : undefined - } + const handleC = repo.create() + handleC.change(doc => (doc.foo = "C")) const wrapper = ({ children }) => { return ( @@ -55,7 +43,7 @@ describe("useDocument", () => { repo, handleA, handleB, - handleSlow, + handleC, wrapper, } } @@ -64,150 +52,278 @@ describe("useDocument", () => { url, onDoc, }: { - url: AutomergeUrl | undefined - onDoc: (doc: ExampleDoc) => void + url: AutomergeUrl + onDoc: (doc: Doc) => void }) => { - const [doc] = useDocument(url) + const [doc] = useDocument(url, { suspense: true }) onDoc(doc) - return null + return
{doc.foo}
} it("should load a document", async () => { const { handleA, wrapper } = setup() const onDoc = vi.fn() - render(, { wrapper }) - await waitFor(() => expect(onDoc).toHaveBeenLastCalledWith({ foo: "A" })) - }) + render( + Loading...}> + + , + { wrapper } + ) - it("should immediately return a document if it has already been loaded", async () => { - const { handleA, wrapper } = setup() - const onDoc = vi.fn() + // First we should see the loading state + expect(screen.getByTestId("loading")).toBeInTheDocument() + + // Wait for content to appear and check it's correct + await waitFor(() => { + expect(screen.getByTestId("content")).toHaveTextContent("A") + }) - render(, { wrapper }) - await waitFor(() => expect(onDoc).not.toHaveBeenCalledWith(undefined)) + // Now check our spy got called with the document + expect(onDoc).toHaveBeenCalledWith({ foo: "A" }) }) it("should update if the doc changes", async () => { const { wrapper, handleA } = setup() const onDoc = vi.fn() - render(, { wrapper }) - await waitFor(() => expect(onDoc).toHaveBeenLastCalledWith({ foo: "A" })) - - act(() => handleA.change(doc => (doc.foo = "new value"))) - await waitFor(() => - expect(onDoc).toHaveBeenLastCalledWith({ foo: "new value" }) + render( + Loading...}> + + , + { wrapper } ) + + // Wait for initial render + await waitFor(() => { + expect(screen.getByTestId("content")).toHaveTextContent("A") + }) + expect(onDoc).toHaveBeenCalledWith({ foo: "A" }) + + // Change the document + React.act(() => handleA.change(doc => (doc.foo = "new value"))) + + // Check the update + await waitFor(() => { + expect(screen.getByTestId("content")).toHaveTextContent("new value") + }) + expect(onDoc).toHaveBeenCalledWith({ foo: "new value" }) }) - it("should update if the doc is deleted", async () => { + it("should throw error if the doc is deleted", async () => { + // suppress console.error from the error boundary + const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => {}) + const { wrapper, handleA } = setup() const onDoc = vi.fn() + const onError = vi.fn() + + render( + Error} + onError={onError} + > + Loading...}> + + + , + { wrapper } + ) + + // Wait for initial render + await waitFor(() => { + expect(screen.getByTestId("content")).toHaveTextContent("A") + }) + + // Delete the document + React.act(() => handleA.delete()) - render(, { wrapper }) - await waitFor(() => expect(onDoc).toHaveBeenLastCalledWith({ foo: "A" })) + // Should trigger error boundary + expect(screen.getByTestId("error")).toHaveTextContent("Error") - act(() => handleA.delete()) - await waitFor(() => expect(onDoc).toHaveBeenLastCalledWith(undefined)) + consoleSpy.mockRestore() }) - it("should update if the url changes", async () => { + it("should switch documents when url changes", async () => { const { handleA, handleB, wrapper } = setup() const onDoc = vi.fn() - const { rerender } = render(, { - wrapper, + const { rerender } = render( + Loading...}> + + , + { wrapper } + ) + + // Wait for first document + await waitFor(() => { + expect(screen.getByTestId("content")).toHaveTextContent("A") }) - await waitFor(() => expect(onDoc).toHaveBeenLastCalledWith(undefined)) + expect(onDoc).toHaveBeenCalledWith({ foo: "A" }) - // set url to doc A - rerender() - await waitFor(() => expect(onDoc).toHaveBeenLastCalledWith({ foo: "A" })) + // Switch to second document + rerender( + Loading...}> + + + ) + + // Should show loading then new content + await waitFor(() => { + expect(screen.getByTestId("content")).toHaveTextContent("B") + }) + expect(onDoc).toHaveBeenCalledWith({ foo: "B" }) + }) - // set url to doc B - rerender() - await waitFor(() => expect(onDoc).toHaveBeenLastCalledWith({ foo: "B" })) + it("should handle unavailable documents", async () => { + // suppress console.error from the error boundary + const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => {}) - // set url to undefined - rerender() - await waitFor(() => expect(onDoc).toHaveBeenLastCalledWith(undefined)) + const { wrapper, repo } = setup() + + // Create handle for nonexistent document + const url = generateAutomergeUrl() + + render( + Error}> + Loading...}> + + + , + { wrapper } + ) + + waitFor(() => { + expect(screen.getByTestId("error")).toHaveTextContent("Error") + }) + + consoleSpy.mockRestore() }) - it("returns new doc on first render after url changes", async () => { - const { handleA, handleB, wrapper } = setup() + // Test slow-loading document + it("should handle slow-loading documents", async () => { + const { wrapper, repo } = setup() const onDoc = vi.fn() - const { rerender } = render(, { - wrapper, + // Create handle but delay its availability + const slowHandle = repo.create({ foo: "slow" }) + const originalFind = repo.find.bind(repo) + repo.find = vi.fn().mockImplementation(async (...args) => { + await new Promise(resolve => setTimeout(resolve, 100)) + return originalFind(...args) }) - await waitFor(() => expect(onDoc).toHaveBeenLastCalledWith(undefined)) - // set url to doc A - rerender() - await waitFor(() => expect(onDoc).toHaveBeenLastCalledWith({ foo: "A" })) + render( + Loading...}> + + , + { wrapper } + ) - const onDoc2 = vi.fn() + // Should show loading state + expect(screen.getByTestId("loading")).toBeInTheDocument() - // set url to doc B - rerender() + // Eventually shows content await waitFor(() => { - // no stale data - expect(onDoc2).not.toHaveBeenCalledWith({ foo: "A" }) - // no render with undefined data - expect(onDoc2).not.toHaveBeenCalledWith(undefined) - - // render with new data - expect(onDoc2).toHaveBeenCalledWith({ foo: "B" }) + expect(screen.getByTestId("content")).toHaveTextContent("slow") }) + expect(onDoc).toHaveBeenCalledWith({ foo: "slow" }) }) - it("sets the doc to undefined while the initial load is happening", async () => { - const { handleA, handleSlow, wrapper } = setup() + // Test concurrent document switches + it("should handle rapid document switches correctly", async () => { + const { wrapper, handleA, handleB, handleC } = setup() const onDoc = vi.fn() - const { rerender } = render(, { - wrapper, - }) - await waitFor(() => expect(onDoc).toHaveBeenLastCalledWith(undefined)) + const { rerender } = render( + Loading...}> + + , + { wrapper } + ) - // start by setting url to doc A - rerender() - await waitFor(() => expect(onDoc).toHaveBeenLastCalledWith({ foo: "A" })) + // Quick switches between documents + rerender( + Loading...}> + + + ) + rerender( + Loading...}> + + + ) - // Now we set the URL to a handle that's slow to load. - // The doc should be undefined while the load is happening. - rerender() - await waitFor(() => expect(onDoc).toHaveBeenCalledWith(undefined)) - await waitFor(() => expect(onDoc).toHaveBeenLastCalledWith({ foo: "slow" })) + // Should eventually settle on final document + await waitFor(() => { + expect(screen.getByTestId("content")).toHaveTextContent("C") + }) + expect(onDoc).toHaveBeenCalledWith({ foo: "C" }) }) - it("avoids showing stale data", async () => { - const { handleA, handleSlow, wrapper } = setup() + // Test document changes during loading + it("should handle document changes while loading", async () => { + const { wrapper, repo } = setup() const onDoc = vi.fn() - const { rerender } = render(, { - wrapper, + const handle = repo.create({ foo: "initial" }) + let resolveFind: (value: any) => void + const originalFind = repo.find.bind(repo) + repo.find = vi.fn().mockImplementation(async (...args) => { + return new Promise(resolve => { + resolveFind = resolve + }) }) - await waitFor(() => expect(onDoc).toHaveBeenLastCalledWith(undefined)) - // Set the URL to a slow doc and then a fast doc. - // We should see the fast doc forever, even after - // the slow doc has had time to finish loading. - rerender() - rerender() - await waitFor(() => expect(onDoc).toHaveBeenLastCalledWith({ foo: "A" })) + render( + Loading...}> + + , + { wrapper } + ) - // wait for the slow doc to finish loading... - await pause(SLOW_DOC_LOAD_TIME_MS * 2) + // Modify document while it's still loading + handle.change(doc => (doc.foo = "changed")) - // we didn't update the doc to the slow doc, so it should still be A - expect(onDoc).not.toHaveBeenCalledWith({ foo: "slow" }) + // Resolve the find + resolveFind!(await originalFind(handle.url)) + + // Should show final state + await waitFor(() => { + expect(screen.getByTestId("content")).toHaveTextContent("changed") + }) + expect(onDoc).toHaveBeenCalledWith({ foo: "changed" }) }) -}) -const pause = (ms: number) => new Promise(resolve => setTimeout(resolve, ms)) + // Test cleanup on unmount + it("should cleanup subscriptions on unmount", async () => { + const { wrapper, handleA } = setup() + const { unmount } = render( + Loading...}> + + , + { wrapper } + ) -interface ExampleDoc { - foo: string -} + // Wait for initial render + await waitFor(() => { + expect(screen.getByTestId("content")).toBeInTheDocument() + }) + + // Spy on removeListener + const removeListenerSpy = vi.spyOn(handleA, "removeListener") + + // Unmount component + unmount() + + // Should have cleaned up listeners + expect(removeListenerSpy).toHaveBeenCalledWith( + "change", + expect.any(Function) + ) + expect(removeListenerSpy).toHaveBeenCalledWith( + "delete", + expect.any(Function) + ) + }) +}) diff --git a/packages/automerge-repo-react-hooks/test/useDocuments.test.tsx b/packages/automerge-repo-react-hooks/test/useDocuments.test.tsx index 51b1ef16c..333f6bb82 100644 --- a/packages/automerge-repo-react-hooks/test/useDocuments.test.tsx +++ b/packages/automerge-repo-react-hooks/test/useDocuments.test.tsx @@ -1,250 +1,490 @@ -import { - AutomergeUrl, - DocumentId, - PeerId, - Repo, - stringifyAutomergeUrl, -} from "@automerge/automerge-repo" -import { DummyStorageAdapter } from "@automerge/automerge-repo/helpers/DummyStorageAdapter.js" -import { act, render, waitFor } from "@testing-library/react" -import React from "react" +import React, { Suspense } from "react" +import { AutomergeUrl, Repo, PeerId } from "@automerge/automerge-repo" +import { render, act, waitFor } from "@testing-library/react" import { describe, expect, it, vi } from "vitest" import { useDocuments } from "../src/useDocuments" import { RepoContext } from "../src/useRepo" +import { ErrorBoundary } from "react-error-boundary" + +interface ExampleDoc { + foo: string + counter?: number + nested?: { + value: string + } +} + +function getRepoWrapper(repo: Repo) { + return ({ children }) => ( + {children} + ) +} describe("useDocuments", () => { - const setup = () => { - const repo = new Repo({ - peerId: "alice" as PeerId, - network: [], - storage: new DummyStorageAdapter(), - }) + const repo = new Repo({ + peerId: "bob" as PeerId, + }) - const wrapper = ({ children }) => { - return ( - {children} - ) - } + function setup() { + const handleA = repo.create() + handleA.change(doc => (doc.foo = "A")) - let documentValues: Record = {} + const handleB = repo.create() + handleB.change(doc => (doc.foo = "B")) - const documentIds = range(10).map(i => { - const value = { foo: i } - const handle = repo.create(value) - documentValues[handle.documentId] = value - return handle.documentId - }) + const handleC = repo.create() + handleC.change(doc => (doc.foo = "C")) - return { repo, wrapper, documentIds, documentValues } + return { + repo, + handleA, + handleB, + handleC, + handles: [handleA, handleB, handleC], + urls: [handleA.url, handleB.url, handleC.url], + wrapper: getRepoWrapper(repo), + } } - const Component = ({ - idsOrUrls, - onDocs, + const DocumentsComponent = ({ + urls, + onState, }: { - idsOrUrls: (DocumentId | AutomergeUrl)[] - onDocs: (documents: Record) => void + urls: AutomergeUrl[] + onState: (docs: Map, change: any) => void }) => { - const documents = useDocuments(idsOrUrls) - onDocs(documents) + const [docs, change] = useDocuments(urls) + onState(docs, change) return null } - it("returns a collection of documents, given a list of ids", async () => { - const { documentIds, wrapper } = setup() - const onDocs = vi.fn() + it("should sync documents and handle changes", async () => { + const { handleA, wrapper } = setup() + const onState = vi.fn() - render(, { wrapper }) - await waitFor(() => - expect(onDocs).toHaveBeenCalledWith( - Object.fromEntries(documentIds.map((id, i) => [id, { foo: i }])) - ) + const Wrapped = () => ( + Error!}> + Loading...}> + + + ) - }) - it("returns a collection of loaded documents immediately, given a list of ids", async () => { - const { documentIds, wrapper } = setup() - const onDocs = vi.fn() + await act(async () => { + render(, { wrapper }) + }) + + await act(async () => { + await Promise.resolve() + }) - render(, { wrapper }) + // Initial state + expect(onState).toHaveBeenCalled() + const [docs] = onState.mock.lastCall || [] + expect(docs.get(handleA.url)?.foo).toBe("A") - expect(onDocs).not.toHaveBeenCalledWith({}) - expect(onDocs).toHaveBeenCalledWith( - Object.fromEntries(documentIds.map((id, i) => [id, { foo: i }])) - ) + // Make a change + const [, change] = onState.mock.lastCall || [] + await act(async () => { + change(handleA.url, doc => (doc.foo = "Changed")) + await Promise.resolve() + }) + + // Verify change was synced + const [finalDocs] = onState.mock.lastCall || [] + expect(finalDocs.get(handleA.url)?.foo).toBe("Changed") }) - it("cleans up listeners properly", async () => { - const { documentIds, wrapper, repo } = setup() - const onDocs = vi.fn() - - // The goal here is to check that we're not leaking listeners. - // We do this by mounting the component a set number of times and then - // checking the number of listeners on the handle at the end. - const numMounts = 5 // arbitrary number here - for (let i = 0; i < numMounts; i++) { - const { unmount } = render( - , - { wrapper } - ) - await waitFor(() => unmount()) - } + it("should handle multiple documents and parallel changes", async () => { + const { handleA, handleB, wrapper } = setup() + const onState = vi.fn() + + const Wrapped = () => ( + Error!}> + Loading...}> + + + + ) - for (const id of documentIds) { - const handle = repo.find(id) + await act(async () => { + render(, { wrapper }) + }) - // You might expect we'd check that it's equal to 0 here. - // but it turns out that automerge-repo registers an internal - // change handler which remain on the doc even after unmount, - // so we can't do that. - // By comparing to numMounts, we ensure that if mount+unmount - // does leak a listener, it'll fail this test. - expect(handle.listenerCount("change")).toBeLessThan(numMounts) - } + await act(async () => { + await Promise.resolve() + }) + + // Check initial state + const [docs, change] = onState.mock.lastCall || [] + expect(docs.get(handleA.url)?.foo).toBe("A") + expect(docs.get(handleB.url)?.foo).toBe("B") + + // Make parallel changes + await act(async () => { + change(handleA.url, doc => { + doc.counter = 1 + doc.nested = { value: "A1" } + }) + change(handleB.url, doc => { + doc.counter = 2 + doc.nested = { value: "B1" } + }) + await Promise.resolve() + }) + + // Verify both changes were synced + const [finalDocs] = onState.mock.lastCall || [] + expect(finalDocs.get(handleA.url)).toEqual({ + foo: "A", + counter: 1, + nested: { value: "A1" }, + }) + expect(finalDocs.get(handleB.url)).toEqual({ + foo: "B", + counter: 2, + nested: { value: "B1" }, + }) }) - it("updates documents when they change", async () => { - const { repo, documentIds, wrapper } = setup() - const onDocs = vi.fn() + it("should handle document removal and cleanup listeners", async () => { + const { handleA, handleB, wrapper } = setup() + const onState = vi.fn() - render(, { wrapper }) - await waitFor(() => - expect(onDocs).toHaveBeenCalledWith( - Object.fromEntries(documentIds.map((id, i) => [id, { foo: i }])) - ) + const Wrapped = ({ urls }: { urls: AutomergeUrl[] }) => ( + Error!}> + Loading...}> + + + ) - act(() => { - // multiply the value of foo in each document by 10 - documentIds.forEach(id => { - const handle = repo.find(id) - handle.change(s => (s.foo *= 10)) - }) + const { rerender, unmount } = render( + , + { wrapper } + ) + + await act(async () => { + await Promise.resolve() }) - await waitFor(() => - expect(onDocs).toHaveBeenCalledWith( - Object.fromEntries(documentIds.map((id, i) => [id, { foo: i * 10 }])) - ) + + // Initial state + let [docs] = onState.mock.lastCall || [] + expect(docs.size).toBe(2) + + // Remove one document + rerender() + + await act(async () => { + await Promise.resolve() + }) + + // Check document was removed + docs = onState.mock.lastCall?.[0] + expect(docs.size).toBe(1) + expect(docs.has(handleA.url)).toBe(true) + expect(docs.has(handleB.url)).toBe(false) + + // Test cleanup + unmount() + + // Make a change - should not trigger update + const callCount = onState.mock.calls.length + handleA.change(doc => (doc.foo = "Changed after unmount")) + expect(onState.mock.calls.length).toBe(callCount) + }) + + it("should handle rapid successive changes", async () => { + const { handleA, wrapper } = setup() + const onState = vi.fn() + + const Wrapped = () => ( + Error!}> + Loading...}> + + + ) + + await act(async () => { + render(, { wrapper }) + }) + + await act(async () => { + await Promise.resolve() + }) + + const [, change] = onState.mock.lastCall || [] + + // Make rapid changes + await act(async () => { + for (let i = 0; i < 5; i++) { + change(handleA.url, doc => { + doc.counter = i + }) + } + await Promise.resolve() + }) + + // Should have final value + const [finalDocs] = onState.mock.lastCall || [] + expect(finalDocs.get(handleA.url)?.counter).toBe(4) }) - it("updates documents when they change, if URLs are passed in", async () => { - const { repo, documentIds, wrapper } = setup() - const onDocs = vi.fn() - const documentUrls = documentIds.map(id => stringifyAutomergeUrl(id)) + describe("useDocuments with suspense: false", () => { + const repo = new Repo({ + peerId: "bob" as PeerId, + }) + + function setup() { + const handleA = repo.create() + handleA.change(doc => (doc.foo = "A")) + + const handleB = repo.create() + handleB.change(doc => (doc.foo = "B")) + + const handleC = repo.create() + handleC.change(doc => (doc.foo = "C")) + + return { + repo, + handleA, + handleB, + handleC, + handles: [handleA, handleB, handleC], + urls: [handleA.url, handleB.url, handleC.url], + wrapper: getRepoWrapper(repo), + } + } + + const NonSuspendingDocumentsComponent = ({ + urls, + onState, + }: { + urls: AutomergeUrl[] + onState: (docs: Map, change: any) => void + }) => { + const [docs, change] = useDocuments(urls, { suspense: false }) + onState(docs, change) + return null + } - render(, { wrapper }) - await waitFor(() => - expect(onDocs).toHaveBeenCalledWith( - Object.fromEntries(documentIds.map((id, i) => [id, { foo: i }])) + it("should start with empty map and load documents asynchronously", async () => { + const { handleA, wrapper } = setup() + const onState = vi.fn() + + const Wrapped = () => ( + Error!}> + + ) - ) - act(() => { - // multiply the value of foo in each document by 10 - documentIds.forEach(id => { - const handle = repo.find(id) - handle.change(s => (s.foo *= 10)) + render(, { wrapper }) + + // Initial state should be empty map + expect(onState).toHaveBeenCalled() + let [docs] = onState.mock.lastCall || [] + expect(docs.size).toBe(0) + + // Wait for document to load + await act(async () => { + await Promise.resolve() }) + + // Document should now be loaded + docs = onState.mock.lastCall?.[0] + expect(docs.get(handleA.url)?.foo).toBe("A") }) - await waitFor(() => - expect(onDocs).toHaveBeenCalledWith( - Object.fromEntries(documentIds.map((id, i) => [id, { foo: i * 10 }])) + + it("should handle loading multiple documents asynchronously", async () => { + const { handleA, handleB, wrapper } = setup() + const onState = vi.fn() + + const Wrapped = () => ( + Error!}> + + ) - ) - }) - it(`removes documents when they're removed from the list of ids`, async () => { - const { documentIds, wrapper } = setup() - const onDocs = vi.fn() + render(, { wrapper }) - const { rerender } = render( - , - { wrapper } - ) - await waitFor(() => - expect(onDocs).toHaveBeenCalledWith( - Object.fromEntries(documentIds.map((id, i) => [id, { foo: i }])) + // Initial state should be empty + let [docs] = onState.mock.lastCall || [] + expect(docs.size).toBe(0) + + // Wait for documents to load + await act(async () => { + await Promise.resolve() + }) + + // Check loaded state + docs = onState.mock.lastCall?.[0] + expect(docs.size).toBe(2) + expect(docs.get(handleA.url)?.foo).toBe("A") + expect(docs.get(handleB.url)?.foo).toBe("B") + + // Make changes after loading + const [, change] = onState.mock.lastCall || [] + await act(async () => { + change(handleA.url, doc => { + doc.counter = 1 + doc.nested = { value: "A1" } + }) + change(handleB.url, doc => { + doc.counter = 2 + doc.nested = { value: "B1" } + }) + }) + + // Verify changes + const [finalDocs] = onState.mock.lastCall || [] + expect(finalDocs.get(handleA.url)).toEqual({ + foo: "A", + counter: 1, + nested: { value: "A1" }, + }) + expect(finalDocs.get(handleB.url)).toEqual({ + foo: "B", + counter: 2, + nested: { value: "B1" }, + }) + }) + + it("should handle document removal with pending loads", async () => { + const { handleA, handleB, wrapper } = setup() + const onState = vi.fn() + + const Wrapped = ({ urls }: { urls: AutomergeUrl[] }) => ( + Error!}> + + ) - ) - // remove the first document - rerender() - // 👆 Note that this only works because documentIds.slice(1) is a different - // object from documentIds. If we modified documentIds directly, the hook - // wouldn't re-run. - await waitFor(() => - expect(onDocs).toHaveBeenCalledWith( - Object.fromEntries( - documentIds.map((id, i) => [id, { foo: i }]).slice(1) - ) + const { rerender } = render( + , + { wrapper } ) - ) - }) - it(`keeps updating documents after the list has changed`, async () => { - const { documentIds, wrapper, repo } = setup() - const onDocs = vi.fn() + // Initial state should be empty + let [docs] = onState.mock.lastCall || [] + expect(docs.size).toBe(0) - const { rerender } = render( - , - { wrapper } - ) - await waitFor(() => - expect(onDocs).toHaveBeenCalledWith( - Object.fromEntries(documentIds.map((id, i) => [id, { foo: i }])) - ) - ) + // Remove one document before load completes + rerender() - // remove the first document - act(() => { - rerender() - }) - // 👆 Note that this only works because documentIds.slice(1) is a different - // object from documentIds. If we modified documentIds directly, the hook - // wouldn't re-run. - await waitFor(() => - expect(onDocs).toHaveBeenCalledWith( - Object.fromEntries( - documentIds.map((id, i) => [id, { foo: i }]).slice(1) - ) + // Wait for remaining document to load + await act(async () => { + await Promise.resolve() + }) + + // Should only have loaded the remaining document + waitFor(() => { + docs = onState.mock.lastCall?.[0] + expect(docs.size).toBe(1) + expect(docs.has(handleA.url)).toBe(true) + expect(docs.has(handleB.url)).toBe(false) + }) + }) + + it("should cleanup listeners when unmounting with pending loads", async () => { + const { handleA, wrapper } = setup() + const onState = vi.fn() + + const Wrapped = () => ( + Error!}> + + ) - ) - // update all the docs that are still in the list + const { unmount } = render(, { wrapper }) + + // Initial state empty + expect(onState.mock.lastCall?.[0].size).toBe(0) - act(() => { - // multiply the value of foo in each document by 10 - documentIds.slice(1).forEach(id => { - const handle = repo.find(id) - handle.change(s => (s.foo *= 10)) + // Unmount before load completes + unmount() + + // Wait for what would have been load completion + await act(async () => { + await Promise.resolve() }) + + // Should not have received any updates after unmount + const callCount = onState.mock.calls.length + handleA.change(doc => (doc.foo = "Changed after unmount")) + expect(onState.mock.calls.length).toBe(callCount) }) - await waitFor(() => - expect(onDocs).toHaveBeenCalledWith( - Object.fromEntries( - documentIds.map((id, i) => [id, { foo: i * 10 }]).slice(1) - ) + it("should handle document changes during loading", async () => { + const { handleA, wrapper } = setup() + const onState = vi.fn() + + const Wrapped = () => ( + Error!}> + + ) - ) - act(() => { - // multiply the value of foo in each document by 10 - documentIds.slice(1).forEach(id => { - const handle = repo.find(id) - handle.change(s => (s.foo *= 10)) + render(, { wrapper }) + + // Make a change while document is loading + handleA.change(doc => (doc.counter = 1)) + + // Wait for load + await act(async () => { + await Promise.resolve() + }) + + // Should have latest state + const [docs] = onState.mock.lastCall || [] + expect(docs.get(handleA.url)).toEqual({ + foo: "A", + counter: 1, }) }) - await waitFor(() => { - expect(onDocs).toHaveBeenCalledWith( - Object.fromEntries( - documentIds.map((id, i) => [id, { foo: i * 100 }]).slice(1) - ) + it("should handle invalid urls with empty map", async () => { + const { wrapper } = setup() + const onState = vi.fn() + const invalidUrl = "invalid-url" as AutomergeUrl + + const Wrapped = () => ( + Error!}> + + ) + + render(, { wrapper }) + + // Initial state empty + let [docs] = onState.mock.lastCall || [] + expect(docs.size).toBe(0) + + // Should remain empty after attempted load + await act(async () => { + await Promise.resolve() + }) + + docs = onState.mock.lastCall?.[0] + expect(docs.size).toBe(0) }) }) }) - -const range = (n: number) => [...Array(n).keys()] diff --git a/packages/automerge-repo-react-hooks/test/useHandle.test.tsx b/packages/automerge-repo-react-hooks/test/useHandle.test.tsx deleted file mode 100644 index b49cea146..000000000 --- a/packages/automerge-repo-react-hooks/test/useHandle.test.tsx +++ /dev/null @@ -1,128 +0,0 @@ -import React from "react" -import { - AutomergeUrl, - DocHandle, - PeerId, - Repo, -} from "@automerge/automerge-repo" -import { DummyStorageAdapter } from "@automerge/automerge-repo/test/helpers/DummyStorageAdapter" -import { render, waitFor } from "@testing-library/react" -import { describe, expect, it, vi } from "vitest" -import { useHandle } from "../src/useHandle" -import { RepoContext } from "../src/useRepo" - -interface ExampleDoc { - foo: string -} - -function getRepoWrapper(repo: Repo) { - return ({ children }) => ( - {children} - ) -} - -describe("useHandle", () => { - const repo = new Repo({ - peerId: "bob" as PeerId, - }) - - function setup() { - const handleA = repo.create() - handleA.change(doc => (doc.foo = "A")) - - const handleB = repo.create() - handleB.change(doc => (doc.foo = "B")) - - return { - repo, - handleA, - handleB, - wrapper: getRepoWrapper(repo), - } - } - - const Component = ({ - url, - onHandle, - }: { - url: AutomergeUrl | undefined - onHandle: (handle: DocHandle | undefined) => void - }) => { - const handle = useHandle(url) - onHandle(handle) - return null - } - - it("loads a handle", async () => { - const { handleA, wrapper } = setup() - const onHandle = vi.fn() - - render(, { wrapper }) - await waitFor(() => expect(onHandle).toHaveBeenLastCalledWith(handleA)) - }) - - it("returns undefined when no url given", async () => { - const { wrapper } = setup() - const onHandle = vi.fn() - - render(, { wrapper }) - await waitFor(() => expect(onHandle).toHaveBeenLastCalledWith(undefined)) - }) - - it("updates the handle when the url changes", async () => { - const { wrapper, handleA, handleB } = setup() - const onHandle = vi.fn() - - const { rerender } = render( - , - { wrapper } - ) - await waitFor(() => expect(onHandle).toHaveBeenLastCalledWith(undefined)) - - // set url to doc A - rerender() - await waitFor(() => expect(onHandle).toHaveBeenLastCalledWith(handleA)) - - // set url to doc B - rerender() - await waitFor(() => expect(onHandle).toHaveBeenLastCalledWith(handleB)) - - // set url to undefined - rerender() - await waitFor(() => expect(onHandle).toHaveBeenLastCalledWith(undefined)) - }) - - it("does not return undefined after the url is updated", async () => { - const { wrapper, handleA, handleB } = setup() - const onHandle = vi.fn() - - const { rerender } = render( - , - { wrapper } - ) - await waitFor(() => expect(onHandle).toHaveBeenLastCalledWith(handleA)) - - const onHandle2 = vi.fn() - - // set url to doc B - rerender() - await waitFor(() => expect(onHandle2).not.toHaveBeenCalledWith(undefined)) - }) - - it("does not return a handle for a different url after the url is updated", async () => { - const { wrapper, handleA, handleB } = setup() - const onHandle = vi.fn() - - const { rerender } = render( - , - { wrapper } - ) - await waitFor(() => expect(onHandle).toHaveBeenLastCalledWith(handleA)) - - const onHandle2 = vi.fn() - - // set url to doc B - rerender() - await waitFor(() => expect(onHandle2).not.toHaveBeenCalledWith(handleA)) - }) -}) diff --git a/packages/automerge-repo-react-hooks/vitest.config.ts b/packages/automerge-repo-react-hooks/vitest.config.ts new file mode 100644 index 000000000..b40538709 --- /dev/null +++ b/packages/automerge-repo-react-hooks/vitest.config.ts @@ -0,0 +1,23 @@ +import { defineConfig } from "vitest/config" +import path from "path" + +export default defineConfig({ + test: { + globals: true, + setupFiles: [path.join(__dirname, "./test/testSetup.ts")], + environment: "jsdom", + coverage: { + provider: "v8", + reporter: ["lcov", "text", "html"], + skipFull: true, + exclude: [ + "**/fuzz", + "**/helpers", + "**/coverage", + "examples/**/*", + "docs/**/*", + "**/test/**/*", + ], + }, + }, +}) diff --git a/packages/automerge-repo-storage-indexeddb/package.json b/packages/automerge-repo-storage-indexeddb/package.json index b37e1d921..a5e8b778a 100644 --- a/packages/automerge-repo-storage-indexeddb/package.json +++ b/packages/automerge-repo-storage-indexeddb/package.json @@ -1,6 +1,6 @@ { "name": "@automerge/automerge-repo-storage-indexeddb", - "version": "2.0.0-alpha.11", + "version": "2.0.0-alpha.22", "description": "IndexedDB storage adapter for Automerge Repo", "repository": "https://github.com/automerge/automerge-repo/tree/master/packages/automerge-repo-storage-indexeddb", "author": "Peter van Hardenberg ", diff --git a/packages/automerge-repo-storage-nodefs/package.json b/packages/automerge-repo-storage-nodefs/package.json index 41920faaa..7977cf670 100644 --- a/packages/automerge-repo-storage-nodefs/package.json +++ b/packages/automerge-repo-storage-nodefs/package.json @@ -1,6 +1,6 @@ { "name": "@automerge/automerge-repo-storage-nodefs", - "version": "2.0.0-alpha.11", + "version": "2.0.0-alpha.22", "description": "Simple Node filesystem storage adapter for Automerge Repo", "repository": "https://github.com/automerge/automerge-repo/tree/master/packages/automerge-repo-storage-nodefs", "author": "Peter van Hardenberg ", diff --git a/packages/automerge-repo-svelte-store/package.json b/packages/automerge-repo-svelte-store/package.json index 9b04b3129..5a422e06d 100644 --- a/packages/automerge-repo-svelte-store/package.json +++ b/packages/automerge-repo-svelte-store/package.json @@ -1,13 +1,13 @@ { "name": "@automerge/automerge-repo-svelte-store", - "version": "2.0.0-alpha.11", - "description": "A Svelte store containing your automerge documentsj", + "version": "2.0.0-alpha.22", + "description": "A Svelte store containing your automerge documents", "repository": "https://github.com/automerge/automerge-repo/tree/master/packages/automerge-repo-svelte-store", "license": "MIT", "type": "module", "main": "dist/index.js", "scripts": { - "build": "tsc", + "build": "", "watch": "npm-watch build" }, "peerDependencies": { diff --git a/packages/automerge-repo-svelte-store/src/index.ts b/packages/automerge-repo-svelte-store/src/index.ts index 18dec430d..79b1a9c0e 100644 --- a/packages/automerge-repo-svelte-store/src/index.ts +++ b/packages/automerge-repo-svelte-store/src/index.ts @@ -87,7 +87,7 @@ export function setContextRepo(repo: Repo) { export function document(documentId: AutomergeUrl, repo?: Repo) { repo = repo ?? getContextRepo() const handle = repo.find(documentId) - const { set, subscribe } = writable>(handle.docSync(), () => { + const { set, subscribe } = writable>(handle.doc(), () => { const onChange = (h: DocHandleChangePayload) => set(h.doc) handle.addListener("change", onChange) return () => handle.removeListener("change", onChange) diff --git a/packages/automerge-repo/README.md b/packages/automerge-repo/README.md index c6f202855..e412ed5e6 100644 --- a/packages/automerge-repo/README.md +++ b/packages/automerge-repo/README.md @@ -40,9 +40,9 @@ A `Repo` exposes these methods: - `create(initialValue: T?)` Creates a new `Automerge.Doc` and returns a `DocHandle` for it. Accepts an optional initial value for the document. Produces an empty document (potentially violating the type!) otherwise. -- `find(docId: DocumentId)` +- `find(docId: DocumentId): Promise>` Looks up a given document either on the local machine or (if necessary) over any configured - networks. + networks. Returns a promise that resolves when the document is loaded or throws if load fails. - `delete(docId: DocumentId)` Deletes the local copy of a document from the local cache and local storage. _This does not currently delete the document from any other peers_. - `import(binary: Uint8Array)` @@ -57,10 +57,9 @@ A `Repo` exposes these methods: A `DocHandle` is a wrapper around an `Automerge.Doc`. Its primary function is to dispatch changes to the document. -- `handle.doc()` or `handle.docSync()` - Returns a `Promise>` that will contain the current value of the document. - it waits until the document has finished loading and/or synchronizing over the network before - returning a value. +- `handle.doc()` + Returns a `Doc` that will contain the current value of the document. + Throws an error if the document is deleted. - `handle.change((doc: T) => void)` Calls the provided callback with an instrumented mutable object representing the document. Any changes made to the document will be recorded and distributed to diff --git a/packages/automerge-repo/fuzz/fuzz.ts b/packages/automerge-repo/fuzz/fuzz.ts index 37d732846..55e79b4eb 100644 --- a/packages/automerge-repo/fuzz/fuzz.ts +++ b/packages/automerge-repo/fuzz/fuzz.ts @@ -107,9 +107,9 @@ for (let i = 0; i < 100000; i++) { }) await pause(0) - const a = await aliceRepo.find(doc.url).doc() - const b = await bobRepo.find(doc.url).doc() - const c = await charlieRepo.find(doc.url).doc() + const a = (await aliceRepo.find(doc.url)).doc() + const b = (await bobRepo.find(doc.url)).doc() + const c = (await charlieRepo.find(doc.url)).doc() assert.deepStrictEqual(a, b, "A and B should be equal") assert.deepStrictEqual(b, c, "B and C should be equal") diff --git a/packages/automerge-repo/package.json b/packages/automerge-repo/package.json index 60b27cea0..599f5a480 100644 --- a/packages/automerge-repo/package.json +++ b/packages/automerge-repo/package.json @@ -1,6 +1,6 @@ { "name": "@automerge/automerge-repo", - "version": "2.0.0-alpha.11", + "version": "2.0.0-alpha.22", "description": "A repository object to manage a collection of automerge documents", "repository": "https://github.com/automerge/automerge-repo/tree/master/packages/automerge-repo", "author": "Peter van Hardenberg ", diff --git a/packages/automerge-repo/src/AutomergeUrl.ts b/packages/automerge-repo/src/AutomergeUrl.ts index 1bc6641a1..32d317ed0 100644 --- a/packages/automerge-repo/src/AutomergeUrl.ts +++ b/packages/automerge-repo/src/AutomergeUrl.ts @@ -4,26 +4,54 @@ import type { BinaryDocumentId, DocumentId, AnyDocumentId, + UrlHeads, } from "./types.js" + import * as Uuid from "uuid" import bs58check from "bs58check" +import { + uint8ArrayFromHexString, + uint8ArrayToHexString, +} from "./helpers/bufferFromHex.js" + +import type { Heads as AutomergeHeads } from "@automerge/automerge/slim" export const urlPrefix = "automerge:" +interface ParsedAutomergeUrl { + /** unencoded DocumentId */ + binaryDocumentId: BinaryDocumentId + /** bs58 encoded DocumentId */ + documentId: DocumentId + /** Optional array of heads, if specified in URL */ + heads?: UrlHeads + /** Optional hex array of heads, in Automerge core format */ + hexHeads?: string[] // AKA: heads +} + /** Given an Automerge URL, returns the DocumentId in both base58check-encoded form and binary form */ -export const parseAutomergeUrl = (url: AutomergeUrl) => { +export const parseAutomergeUrl = (url: AutomergeUrl): ParsedAutomergeUrl => { + const [baseUrl, headsSection, ...rest] = url.split("#") + if (rest.length > 0) { + throw new Error("Invalid URL: contains multiple heads sections") + } const regex = new RegExp(`^${urlPrefix}(\\w+)$`) - const [, docMatch] = url.match(regex) || [] + const [, docMatch] = baseUrl.match(regex) || [] const documentId = docMatch as DocumentId const binaryDocumentId = documentIdToBinary(documentId) if (!binaryDocumentId) throw new Error("Invalid document URL: " + url) - return { - /** unencoded DocumentId */ - binaryDocumentId, - /** encoded DocumentId */ - documentId, - } + if (headsSection === undefined) return { binaryDocumentId, documentId } + + const heads = (headsSection === "" ? [] : headsSection.split("|")) as UrlHeads + const hexHeads = heads.map(head => { + try { + return uint8ArrayToHexString(bs58check.decode(head)) + } catch (e) { + throw new Error(`Invalid head in URL: ${head}`) + } + }) + return { binaryDocumentId, hexHeads, documentId, heads } } /** @@ -32,38 +60,78 @@ export const parseAutomergeUrl = (url: AutomergeUrl) => { */ export const stringifyAutomergeUrl = ( arg: UrlOptions | DocumentId | BinaryDocumentId -) => { - const documentId = - arg instanceof Uint8Array || typeof arg === "string" - ? arg - : "documentId" in arg - ? arg.documentId - : undefined +): AutomergeUrl => { + if (arg instanceof Uint8Array || typeof arg === "string") { + return (urlPrefix + + (arg instanceof Uint8Array + ? binaryToDocumentId(arg) + : arg)) as AutomergeUrl + } + + const { documentId, heads = undefined } = arg + + if (documentId === undefined) + throw new Error("Invalid documentId: " + documentId) const encodedDocumentId = documentId instanceof Uint8Array ? binaryToDocumentId(documentId) - : typeof documentId === "string" - ? documentId - : undefined + : documentId + + let url = `${urlPrefix}${encodedDocumentId}` + + if (heads !== undefined) { + heads.forEach(head => { + try { + bs58check.decode(head) + } catch (e) { + throw new Error(`Invalid head: ${head}`) + } + }) + url += "#" + heads.join("|") + } - if (encodedDocumentId === undefined) - throw new Error("Invalid documentId: " + documentId) + return url as AutomergeUrl +} - return (urlPrefix + encodedDocumentId) as AutomergeUrl +/** Helper to extract just the heads from a URL if they exist */ +export const getHeadsFromUrl = (url: AutomergeUrl): string[] | undefined => { + const { heads } = parseAutomergeUrl(url) + return heads } +export const anyDocumentIdToAutomergeUrl = (id: AnyDocumentId) => + isValidAutomergeUrl(id) + ? id + : isValidDocumentId(id) + ? stringifyAutomergeUrl({ documentId: id }) + : isValidUuid(id) + ? parseLegacyUUID(id) + : undefined + /** * Given a string, returns true if it is a valid Automerge URL. This function also acts as a type * discriminator in Typescript. */ export const isValidAutomergeUrl = (str: unknown): str is AutomergeUrl => { - if (typeof str !== "string") return false - if (!str || !str.startsWith(urlPrefix)) return false - const automergeUrl = str as AutomergeUrl + if (typeof str !== "string" || !str || !str.startsWith(urlPrefix)) + return false try { - const { documentId } = parseAutomergeUrl(automergeUrl) - return isValidDocumentId(documentId) + const { documentId, heads } = parseAutomergeUrl(str as AutomergeUrl) + if (!isValidDocumentId(documentId)) return false + if ( + heads && + !heads.every(head => { + try { + bs58check.decode(head) + return true + } catch { + return false + } + }) + ) + return false + return true } catch { return false } @@ -97,6 +165,12 @@ export const documentIdToBinary = (docId: DocumentId) => export const binaryToDocumentId = (docId: BinaryDocumentId) => bs58check.encode(docId) as DocumentId +export const encodeHeads = (heads: AutomergeHeads): UrlHeads => + heads.map(h => bs58check.encode(uint8ArrayFromHexString(h))) as UrlHeads + +export const decodeHeads = (heads: UrlHeads): AutomergeHeads => + heads.map(h => uint8ArrayToHexString(bs58check.decode(h))) as AutomergeHeads + export const parseLegacyUUID = (str: string) => { if (!Uuid.validate(str)) return undefined const documentId = Uuid.parse(str) as BinaryDocumentId @@ -141,4 +215,5 @@ export const interpretAsDocumentId = (id: AnyDocumentId) => { type UrlOptions = { documentId: DocumentId | BinaryDocumentId + heads?: UrlHeads } diff --git a/packages/automerge-repo/src/DocHandle.ts b/packages/automerge-repo/src/DocHandle.ts index 4e6266da6..2b86c2caa 100644 --- a/packages/automerge-repo/src/DocHandle.ts +++ b/packages/automerge-repo/src/DocHandle.ts @@ -2,11 +2,15 @@ import * as A from "@automerge/automerge/slim/next" import debug from "debug" import { EventEmitter } from "eventemitter3" import { assertEvent, assign, createActor, setup, waitFor } from "xstate" -import { stringifyAutomergeUrl } from "./AutomergeUrl.js" +import { + decodeHeads, + encodeHeads, + stringifyAutomergeUrl, +} from "./AutomergeUrl.js" import { encode } from "./helpers/cbor.js" import { headsAreSame } from "./helpers/headsAreSame.js" import { withTimeout } from "./helpers/withTimeout.js" -import type { AutomergeUrl, DocumentId, PeerId } from "./types.js" +import type { AutomergeUrl, DocumentId, PeerId, UrlHeads } from "./types.js" import { StorageId } from "./storage/types.js" /** @@ -28,6 +32,9 @@ export class DocHandle extends EventEmitter> { /** The XState actor running our state machine. */ #machine + /** If set, this handle will only show the document at these heads */ + #fixedHeads?: UrlHeads + /** The last known state of our document. */ #prevDocState: T = A.init() @@ -36,7 +43,7 @@ export class DocHandle extends EventEmitter> { #timeoutDelay = 60_000 /** A dictionary mapping each peer to the last heads we know they have. */ - #remoteHeads: Record = {} + #remoteHeads: Record = {} /** @hidden */ constructor( @@ -49,6 +56,10 @@ export class DocHandle extends EventEmitter> { this.#timeoutDelay = options.timeoutDelay } + if ("heads" in options) { + this.#fixedHeads = options.heads + } + const doc = A.init() this.#log = debug(`automerge-repo:dochandle:${this.documentId.slice(0, 5)}`) @@ -72,12 +83,12 @@ export class DocHandle extends EventEmitter> { this.emit("delete", { handle: this }) return { doc: A.init() } }), + onUnavailable: assign(() => { + return { doc: A.init() } + }), onUnload: assign(() => { return { doc: A.init() } }), - onUnavailable: () => { - this.emit("unavailable", { handle: this }) - }, }, }).createMachine({ /** @xstate-layout N4IgpgJg5mDOIC5QAoC2BDAxgCwJYDswBKAYgFUAFAEQEEAVAUQG0AGAXUVAAcB7WXAC64e+TiAAeiAOwAOAKwA6ACxSAzKqks1ATjlTdAGhABPRAFolAJksKN2y1KtKAbFLla5AX09G0WPISkVAwAMgyMrBxIILz8QiJikggAjCzOijKqLEqqybJyLizaRqYIFpbJtro5Uo7J2o5S3r4YOATECrgQADZgJADCAEoM9MzsYrGCwqLRSeoyCtra8pa5adquySXmDjY5ac7JljLJeepKzSB+bYGdPX0AYgCSAHJUkRN8UwmziM7HCgqyVcUnqcmScmcMm2ZV2yiyzkOx1OalUFx8V1aAQ63R46AgBCgJGGAEUyAwAMp0D7RSbxGagJKHFgKOSWJTJGRSCosCpKaEmRCqbQKU5yXINeTaer6LwY67YogKXH4wkkKgAeX6AH1hjQqABNGncL70xKIJQ5RY5BHOJag6wwpRyEWImQVeT1aWrVSXBXtJUqgn4Ik0ADqNCedG1L3CYY1gwA0saYqbpuaEG4pKLksKpFDgcsCjDhTnxTKpTLdH6sQGFOgAO7oKYhl5gAQNngAJwA1iRY3R40ndSNDSm6enfpm5BkWAVkvy7bpuTCKq7ndZnfVeSwuTX-HWu2AAI4AVzgQhD6q12rILxoADVIyEaAAhMLjtM-RmIE4LVSQi4nLLDIGzOCWwLKA0cgyLBoFWNy+43B0R5nheaqajqepjuMtJfgyEh-FoixqMCoKqOyhzgYKCDOq6UIeuCSxHOoSGKgop74OgABuzbdOgABGvTXlho5GrhJpxJOP4pLulT6KoMhpJY2hzsWNF0QobqMV6LG+pc+A8BAcBiP6gSfFJ36EQgKksksKxrHamwwmY7gLKB85QjBzoAWxdZdL0FnfARST8ooLC7qoTnWBU4pyC5ViVMKBQaHUDQuM4fm3EGhJBWaU7-CysEAUp3LpEpWw0WYRw2LmqzgqciIsCxWUdI2zaXlAbYdt2PZ5dJ1n5jY2iJY1ikOIcMJHCyUWHC62hRZkUVNPKta3Kh56wJ1-VWUyzhFc64JWJCtQNBBzhQW4cHwbsrVKpxPF8YJgV4ZZIWIKkiKiiNSkqZYWjzCWaQ5hFh0AcCuR3QoR74qUknBRmzholpv3OkpRQNNRpTzaKTWKbIWR5FDxm9AIkA7e9skUYCWayLILBZGoLkUSKbIyIdpxHPoyTeN4QA */ @@ -176,7 +187,10 @@ export class DocHandle extends EventEmitter> { #checkForChanges(before: A.Doc, after: A.Doc) { const beforeHeads = A.getHeads(before) const afterHeads = A.getHeads(after) - const docChanged = !headsAreSame(afterHeads, beforeHeads) + const docChanged = !headsAreSame( + encodeHeads(afterHeads), + encodeHeads(beforeHeads) + ) if (docChanged) { this.emit("heads-changed", { handle: this, doc: after }) @@ -202,7 +216,10 @@ export class DocHandle extends EventEmitter> { /** Our documentId in Automerge URL form. */ get url(): AutomergeUrl { - return stringifyAutomergeUrl({ documentId: this.documentId }) + return stringifyAutomergeUrl({ + documentId: this.documentId, + heads: this.#fixedHeads, + }) } /** @@ -264,7 +281,7 @@ export class DocHandle extends EventEmitter> { * This is the recommended way to access a handle's document. Note that this waits for the handle * to be ready if necessary. If loading (or synchronization) fails, this will never resolve. */ - async doc( + async legacyAsyncDoc( /** states to wait for, such as "LOADING". mostly for internal use. */ awaitStates: HandleState[] = ["ready", "unavailable"] ) { @@ -280,21 +297,28 @@ export class DocHandle extends EventEmitter> { } /** - * Synchronously returns the current state of the Automerge document this handle manages, or - * undefined. Consider using `await handle.doc()` instead. Check `isReady()`, or use `whenReady()` - * if you want to make sure loading is complete first. - * - * Not to be confused with the SyncState of the document, which describes the state of the - * synchronization process. + * Returns the current state of the Automerge document this handle manages. * - * Note that `undefined` is not a valid Automerge document, so the return from this function is - * unambigous. + * @returns the current document + * @throws on deleted and unavailable documents * - * @returns the current document, or undefined if the document is not ready. */ + doc() { + if (!this.isReady()) throw new Error("DocHandle is not ready") + if (this.#fixedHeads) { + return A.view(this.#doc, decodeHeads(this.#fixedHeads)) + } + return this.#doc + } + + /** + * + * @deprecated */ docSync() { - if (!this.isReady()) return undefined - else return this.#doc + console.warn( + "docSync is deprecated. Use doc() instead. This function will be removed as part of the 2.0 release." + ) + return this.doc() } /** @@ -302,11 +326,12 @@ export class DocHandle extends EventEmitter> { * This precisely defines the state of a document. * @returns the current document's heads, or undefined if the document is not ready */ - heads(): A.Heads | undefined { - if (!this.isReady()) { - return undefined + heads(): UrlHeads { + if (!this.isReady()) throw new Error("DocHandle is not ready") + if (this.#fixedHeads) { + return this.#fixedHeads } - return A.getHeads(this.#doc) + return encodeHeads(A.getHeads(this.#doc)) } begin() { @@ -314,9 +339,7 @@ export class DocHandle extends EventEmitter> { } /** - * Creates a fixed "view" of an automerge document at the given point in time represented - * by the `heads` passed in. The return value is the same type as docSync() and will return - * undefined if the object hasn't finished loading. + * Returns an array of all past "heads" for the document in topological order. * * @remarks * A point-in-time in an automerge document is an *array* of heads since there may be @@ -325,20 +348,22 @@ export class DocHandle extends EventEmitter> { * history views would be quite large under concurrency (every thing in each branch against each other). * There might be a clever way to think about this, but we haven't found it yet, so for now at least * we present a single traversable view which excludes concurrency. - * @returns The individual heads for every change in the document. + * @returns UrlHeads[] - The individual heads for every change in the document. Each item is a tagged string[1]. */ - history(): A.Heads[] | undefined { + history(): UrlHeads[] | undefined { if (!this.isReady()) { return undefined } // This just returns all the heads as individual strings. - return A.topoHistoryTraversal(this.#doc).map(h => [h]) as A.Heads[] + return A.topoHistoryTraversal(this.#doc).map(h => + encodeHeads([h]) + ) as UrlHeads[] } /** * Creates a fixed "view" of an automerge document at the given point in time represented - * by the `heads` passed in. The return value is the same type as docSync() and will return + * by the `heads` passed in. The return value is the same type as doc() and will return * undefined if the object hasn't finished loading. * * @remarks @@ -346,13 +371,24 @@ export class DocHandle extends EventEmitter> { * of Automerge doesn't check types at runtime, so if you go back to an old set of heads * that doesn't match the heads here, Typescript will not save you. * - * @returns An Automerge.Doc at the point in time. + * @argument heads - The heads to view the document at. See history(). + * @returns DocHandle at the time of `heads` */ - view(heads: A.Heads): A.Doc | undefined { + view(heads: UrlHeads): DocHandle { if (!this.isReady()) { - return undefined + throw new Error( + `DocHandle#${this.documentId} is not ready. Check \`handle.isReady()\` before calling view().` + ) } - return A.view(this.#doc, heads) + // Create a new handle with the same documentId but fixed heads + const handle = new DocHandle(this.documentId, { + heads, + timeoutDelay: this.#timeoutDelay, + }) + handle.update(() => A.clone(this.#doc)) + handle.doneLoading() + + return handle } /** @@ -360,19 +396,46 @@ export class DocHandle extends EventEmitter> { * if applied. * * @remarks - * We allow specifying both a from/to heads or just a single comparison point, in which case - * the base will be the current document heads. + * We allow specifying either: + * - Two sets of heads to compare directly + * - A single set of heads to compare against our current heads + * - Another DocHandle to compare against (which must share history with this document) * - * @returns Automerge patches that go from one document state to the other. Use view() to get the full state. + * @throws Error if the documents don't share history or if either document is not ready + * @returns Automerge patches that go from one document state to the other */ - diff(first: A.Heads, second?: A.Heads): A.Patch[] | undefined { + diff(first: UrlHeads | DocHandle, second?: UrlHeads): A.Patch[] { if (!this.isReady()) { - return undefined + throw new Error( + `DocHandle#${this.documentId} is not ready. Check \`handle.isReady()\` before calling diff().` + ) + } + + const doc = this.#doc + if (!doc) throw new Error("Document not available") + + // If first argument is a DocHandle + if (first instanceof DocHandle) { + if (!first.isReady()) { + throw new Error("Cannot diff against a handle that isn't ready") + } + const otherHeads = first.heads() + if (!otherHeads) throw new Error("Other document's heads not available") + + // Create a temporary merged doc to verify shared history and compute diff + const mergedDoc = A.merge(A.clone(doc), first.doc()!) + // Use the merged doc to compute the diff + return A.diff( + mergedDoc, + decodeHeads(this.heads()!), + decodeHeads(otherHeads) + ) } - // We allow only one set of heads to be specified, in which case we use the doc's heads - const from = second ? first : this.heads() || [] // because we guard above this should always have useful data + + // Otherwise treat as heads + const from = second ? first : ((this.heads() || []) as UrlHeads) const to = second ? second : first - return A.diff(this.#doc, from, to) + return A.diff(doc, decodeHeads(from), decodeHeads(to)) } /** @@ -390,11 +453,15 @@ export class DocHandle extends EventEmitter> { if (!this.isReady()) { return undefined } + if (!change) { change = this.heads()![0] } // we return undefined instead of null by convention in this API - return A.inspectChange(this.#doc, change) || undefined + return ( + A.inspectChange(this.#doc, decodeHeads([change] as UrlHeads)[0]) || + undefined + ) } /** @@ -420,13 +487,13 @@ export class DocHandle extends EventEmitter> { * Called by the repo either when a doc handle changes or we receive new remote heads. * @hidden */ - setRemoteHeads(storageId: StorageId, heads: A.Heads) { + setRemoteHeads(storageId: StorageId, heads: UrlHeads) { this.#remoteHeads[storageId] = heads this.emit("remote-heads", { storageId, heads }) } /** Returns the heads of the storageId. */ - getRemoteHeads(storageId: StorageId): A.Heads | undefined { + getRemoteHeads(storageId: StorageId): UrlHeads | undefined { return this.#remoteHeads[storageId] } @@ -451,6 +518,13 @@ export class DocHandle extends EventEmitter> { `DocHandle#${this.documentId} is in ${this.state} and not ready. Check \`handle.isReady()\` before accessing the document.` ) } + + if (this.#fixedHeads) { + throw new Error( + `DocHandle#${this.documentId} is in view-only mode at specific heads. Use clone() to create a new document from this state.` + ) + } + this.#machine.send({ type: UPDATE, payload: { callback: doc => A.change(doc, options, callback) }, @@ -462,22 +536,29 @@ export class DocHandle extends EventEmitter> { * @returns A set of heads representing the concurrent change that was made. */ changeAt( - heads: A.Heads, + heads: UrlHeads, callback: A.ChangeFn, options: A.ChangeOptions = {} - ): string[] | undefined { + ): UrlHeads[] | undefined { if (!this.isReady()) { throw new Error( `DocHandle#${this.documentId} is not ready. Check \`handle.isReady()\` before accessing the document.` ) } - let resultHeads: string[] | undefined = undefined + if (this.#fixedHeads) { + throw new Error( + `DocHandle#${this.documentId} is in view-only mode at specific heads. Use clone() to create a new document from this state.` + ) + } + let resultHeads: UrlHeads | undefined = undefined this.#machine.send({ type: UPDATE, payload: { callback: doc => { - const result = A.changeAt(doc, heads, options, callback) - resultHeads = result.newHeads || undefined + const result = A.changeAt(doc, decodeHeads(heads), options, callback) + resultHeads = result.newHeads + ? encodeHeads(result.newHeads) + : undefined return result.newDoc }, }, @@ -502,10 +583,12 @@ export class DocHandle extends EventEmitter> { if (!this.isReady() || !otherHandle.isReady()) { throw new Error("Both handles must be ready to merge") } - const mergingDoc = otherHandle.docSync() - if (!mergingDoc) { - throw new Error("The document to be merged in is falsy, aborting.") + if (this.#fixedHeads) { + throw new Error( + `DocHandle#${this.documentId} is in view-only mode at specific heads. Use clone() to create a new document from this state.` + ) } + const mergingDoc = otherHandle.doc() this.update(doc => { return A.merge(doc, mergingDoc) @@ -577,6 +660,9 @@ export type DocHandleOptions = | { isNew?: false + // An optional point in time to lock the document to. + heads?: UrlHeads + /** The number of milliseconds before we mark this document as unavailable if we don't have it and nobody shares it with us. */ timeoutDelay?: number } @@ -588,7 +674,6 @@ export interface DocHandleEvents { "heads-changed": (payload: DocHandleEncodedChangePayload) => void change: (payload: DocHandleChangePayload) => void delete: (payload: DocHandleDeletePayload) => void - unavailable: (payload: DocHandleUnavailablePayload) => void "ephemeral-message": (payload: DocHandleEphemeralMessagePayload) => void "ephemeral-message-outbound": ( payload: DocHandleOutboundEphemeralMessagePayload @@ -640,7 +725,7 @@ export interface DocHandleOutboundEphemeralMessagePayload { /** Emitted when we have new remote heads for this document */ export interface DocHandleRemoteHeadsPayload { storageId: StorageId - heads: A.Heads + heads: UrlHeads } // STATE MACHINE TYPES & CONSTANTS diff --git a/packages/automerge-repo/src/FindProgress.ts b/packages/automerge-repo/src/FindProgress.ts new file mode 100644 index 000000000..c1842e54d --- /dev/null +++ b/packages/automerge-repo/src/FindProgress.ts @@ -0,0 +1,48 @@ +import { DocHandle } from "./DocHandle.js" + +export type FindProgressState = + | "loading" + | "ready" + | "failed" + | "aborted" + | "unavailable" + +interface FindProgressBase { + state: FindProgressState + handle: DocHandle +} + +interface FindProgressLoading extends FindProgressBase { + state: "loading" + progress: number +} + +interface FindProgressReady extends FindProgressBase { + state: "ready" +} + +interface FindProgressFailed extends FindProgressBase { + state: "failed" + error: Error +} + +interface FindProgressUnavailable extends FindProgressBase { + state: "unavailable" +} + +interface FindProgressAborted extends FindProgressBase { + state: "aborted" +} + +export type FindProgress = + | FindProgressLoading + | FindProgressReady + | FindProgressFailed + | FindProgressUnavailable + | FindProgressAborted + +export type FindProgressWithMethods = FindProgress & { + next: () => Promise> + // TODO: i don't like this allowableStates + untilReady: (allowableStates: string[]) => Promise> +} diff --git a/packages/automerge-repo/src/RemoteHeadsSubscriptions.ts b/packages/automerge-repo/src/RemoteHeadsSubscriptions.ts index 6862c7d5a..170ecace4 100644 --- a/packages/automerge-repo/src/RemoteHeadsSubscriptions.ts +++ b/packages/automerge-repo/src/RemoteHeadsSubscriptions.ts @@ -1,6 +1,5 @@ -import { next as A } from "@automerge/automerge/slim" import { EventEmitter } from "eventemitter3" -import { DocumentId, PeerId } from "./types.js" +import { DocumentId, PeerId, UrlHeads } from "./types.js" import { RemoteHeadsChanged, RemoteSubscriptionControlMessage, @@ -12,7 +11,7 @@ import debug from "debug" export type RemoteHeadsSubscriptionEventPayload = { documentId: DocumentId storageId: StorageId - remoteHeads: A.Heads + remoteHeads: UrlHeads timestamp: number } @@ -21,7 +20,7 @@ export type NotifyRemoteHeadsPayload = { targetId: PeerId documentId: DocumentId storageId: StorageId - heads: A.Heads + heads: UrlHeads timestamp: number } @@ -216,7 +215,7 @@ export class RemoteHeadsSubscriptions extends EventEmitter= timestamp) { continue } else { - remote.set(storageId as StorageId, { timestamp, heads }) + remote.set(storageId as StorageId, { + timestamp, + heads: heads as UrlHeads, + }) changedHeads.push({ documentId, storageId: storageId as StorageId, - remoteHeads: heads, + remoteHeads: heads as UrlHeads, timestamp, }) } @@ -371,5 +373,5 @@ export class RemoteHeadsSubscriptions extends EventEmitter { sharePolicy, isEphemeral = storage === undefined, enableRemoteHeadsGossiping = false, + denylist = [], }: RepoConfig = {}) { super() this.#remoteHeadsGossipingEnabled = enableRemoteHeadsGossiping @@ -89,7 +109,7 @@ export class Repo extends EventEmitter { // SYNCHRONIZER // The synchronizer uses the network subsystem to keep documents in sync with peers. - this.synchronizer = new CollectionSynchronizer(this) + this.synchronizer = new CollectionSynchronizer(this, denylist) // When the synchronizer emits messages, send them to peers this.synchronizer.on("message", message => { @@ -97,6 +117,9 @@ export class Repo extends EventEmitter { networkSubsystem.send(message) }) + // Forward metrics from doc synchronizers + this.synchronizer.on("metrics", event => this.emit("doc-metrics", event)) + if (this.#remoteHeadsGossipingEnabled) { this.synchronizer.on("open-doc", ({ peerId, documentId }) => { this.#remoteHeadsSubscriptions.subscribePeerToDoc(peerId, documentId) @@ -106,6 +129,12 @@ export class Repo extends EventEmitter { // STORAGE // The storage subsystem has access to some form of persistence, and deals with save and loading documents. const storageSubsystem = storage ? new StorageSubsystem(storage) : undefined + if (storageSubsystem) { + storageSubsystem.on("document-loaded", event => + this.emit("doc-metrics", { type: "doc-loaded", ...event }) + ) + } + this.storageSubsystem = storageSubsystem // NETWORK @@ -168,16 +197,20 @@ export class Repo extends EventEmitter { const heads = handle.getRemoteHeads(storageId) const haveHeadsChanged = message.syncState.theirHeads && - (!heads || !headsAreSame(heads, message.syncState.theirHeads)) + (!heads || + !headsAreSame(heads, encodeHeads(message.syncState.theirHeads))) if (haveHeadsChanged && message.syncState.theirHeads) { - handle.setRemoteHeads(storageId, message.syncState.theirHeads) + handle.setRemoteHeads( + storageId, + encodeHeads(message.syncState.theirHeads) + ) if (storageId && this.#remoteHeadsGossipingEnabled) { this.#remoteHeadsSubscriptions.handleImmediateRemoteHeadsChanged( message.documentId, storageId, - message.syncState.theirHeads + encodeHeads(message.syncState.theirHeads) ) } } @@ -229,18 +262,8 @@ export class Repo extends EventEmitter { handle.on("heads-changed", throttle(saveFn, this.saveDebounceRate)) } - handle.on("unavailable", () => { - this.#log("document unavailable", { documentId: handle.documentId }) - this.emit("unavailable-document", { - documentId: handle.documentId, - }) - }) - // Register the document with the synchronizer. This advertises our interest in the document. - this.synchronizer.addDocument(handle.documentId) - - // Preserve the old event in case anyone was using it. - this.emit("document", { handle }) + this.synchronizer.addDocument(handle) } #receiveMessage(message: RepoMessage) { @@ -371,8 +394,6 @@ export class Repo extends EventEmitter { * Any peers this `Repo` is connected to for whom `sharePolicy` returns `true` will * be notified of the newly created DocHandle. * - * @throws if the cloned handle is not yet ready or if - * `clonedHandle.docSync()` returns `undefined` (i.e. the handle is unavailable). */ clone(clonedHandle: DocHandle) { if (!clonedHandle.isReady()) { @@ -382,11 +403,7 @@ export class Repo extends EventEmitter { ) } - const sourceDoc = clonedHandle.docSync() - if (!sourceDoc) { - throw new Error("Cloned handle doesn't have a document.") - } - + const sourceDoc = clonedHandle.doc() const handle = this.create() handle.update(() => { @@ -397,60 +414,198 @@ export class Repo extends EventEmitter { return handle } - /** - * Retrieves a document by id. It gets data from the local system, but also emits a `document` - * event to advertise interest in the document. - */ - find( - /** The url or documentId of the handle to retrieve */ - id: AnyDocumentId - ): DocHandle { - const documentId = interpretAsDocumentId(id) + findWithProgress( + id: AnyDocumentId, + options: AbortOptions = {} + ): FindProgressWithMethods | FindProgress { + const { signal } = options + const abortPromise = abortable(signal) - // If we have the handle cached, return it + const { documentId, heads } = isValidAutomergeUrl(id) + ? parseAutomergeUrl(id) + : { documentId: interpretAsDocumentId(id), heads: undefined } + + // Check cache first - return plain FindStep for terminal states if (this.#handleCache[documentId]) { - if (this.#handleCache[documentId].isUnavailable()) { - // this ensures that the event fires after the handle has been returned - setTimeout(() => { - this.#handleCache[documentId].emit("unavailable", { - handle: this.#handleCache[documentId], - }) - }) + const handle = this.#handleCache[documentId] + if (handle.state === UNAVAILABLE) { + const result = { + state: "unavailable" as const, + error: new Error(`Document ${id} is unavailable`), + handle, + } + return result + } + if (handle.state === DELETED) { + return { + state: "failed", + error: new Error(`Document ${id} was deleted`), + handle, + } + } + if (handle.state === READY) { + // If we already have the handle, return it immediately (or a view of the handle if heads are specified) + return { + state: "ready", + // TODO: this handle needs to be cached (or at least avoid running clone) + handle: heads ? handle.view(heads) : handle, + } } - return this.#handleCache[documentId] } - // If we don't already have the handle, make an empty one and try loading it - const handle = this.#getHandle({ - documentId, - }) as DocHandle + // the generator takes over `this`, so we need an alias to the repo this + // eslint-disable-next-line @typescript-eslint/no-this-alias + const that = this + async function* progressGenerator(): AsyncGenerator> { + try { + const handle = that.#getHandle({ documentId }) + yield { state: "loading", progress: 25, handle } - // Loading & network is going to be asynchronous no matter what, - // but we want to return the handle immediately. - const attemptLoad = this.storageSubsystem - ? this.storageSubsystem.loadDoc(handle.documentId) - : Promise.resolve(null) + const loadingPromise = await (that.storageSubsystem + ? that.storageSubsystem.loadDoc(handle.documentId) + : Promise.resolve(null)) + + const loadedDoc = await Promise.race([loadingPromise, abortPromise]) - attemptLoad - .then(async loadedDoc => { if (loadedDoc) { - // uhhhh, sorry if you're reading this because we were lying to the type system handle.update(() => loadedDoc as Automerge.Doc) handle.doneLoading() + yield { state: "loading", progress: 50, handle } } else { - // we want to wait for the network subsystem to be ready before - // we request the document. this prevents entering unavailable during initialization. - await this.networkSubsystem.whenReady() + await Promise.race([that.networkSubsystem.whenReady(), abortPromise]) handle.request() + yield { state: "loading", progress: 75, handle } } - this.#registerHandleWithSubsystems(handle) - }) - .catch(err => { - this.#log("error waiting for network", { err }) - }) + + that.#registerHandleWithSubsystems(handle) + + await Promise.race([ + handle.whenReady([READY, UNAVAILABLE]), + abortPromise, + ]) + + if (handle.state === UNAVAILABLE) { + yield { state: "unavailable", handle } + } + if (handle.state === DELETED) { + throw new Error(`Document ${id} was deleted`) + } + + yield { state: "ready", handle } + } catch (error) { + yield { + state: "failed", + error: error instanceof Error ? error : new Error(String(error)), + handle, + } + } + } + + const iterator = progressGenerator() + + const next = async () => { + const result = await iterator.next() + return { ...result.value, next } + } + + const untilReady = async (allowableStates: string[]) => { + for await (const state of iterator) { + if (allowableStates.includes(state.handle.state)) { + return state.handle + } + if (state.state === "unavailable") { + throw new Error(`Document ${id} is unavailable`) + } + if (state.state === "ready") return state.handle + if (state.state === "failed") throw state.error + } + throw new Error("Iterator completed without reaching ready state") + } + + const handle = this.#getHandle({ documentId }) + const initial = { state: "loading" as const, progress: 0, handle } + return { ...initial, next, untilReady } + } + + async find( + id: AnyDocumentId, + options: RepoFindOptions & AbortOptions = {} + ): Promise> { + const { allowableStates = ["ready"], signal } = options + const progress = this.findWithProgress(id, { signal }) + + /*if (allowableStates.includes(progress.state)) { + console.log("returning early") + return progress.handle + }*/ + + if ("untilReady" in progress) { + this.#registerHandleWithSubsystems(progress.handle) + return progress.untilReady(allowableStates) + } else { + return progress.handle + } + } + + /** + * Loads a document without waiting for ready state + */ + async #loadDocument(documentId: DocumentId): Promise> { + // If we have the handle cached, return it + if (this.#handleCache[documentId]) { + return this.#handleCache[documentId] + } + + // If we don't already have the handle, make an empty one and try loading it + const handle = this.#getHandle({ documentId }) + const loadedDoc = await (this.storageSubsystem + ? this.storageSubsystem.loadDoc(handle.documentId) + : Promise.resolve(null)) + + if (loadedDoc) { + // We need to cast this to because loadDoc operates in . + // This is really where we ought to be validating the input matches . + handle.update(() => loadedDoc as Automerge.Doc) + handle.doneLoading() + } else { + // Because the network subsystem might still be booting up, we wait + // here so that we don't immediately give up loading because we're still + // making our initial connection to a sync server. + await this.networkSubsystem.whenReady() + handle.request() + } + + this.#registerHandleWithSubsystems(handle) return handle } + /** + * Retrieves a document by id. It gets data from the local system, but also emits a `document` + * event to advertise interest in the document. + */ + async findClassic( + /** The url or documentId of the handle to retrieve */ + id: AnyDocumentId, + options: RepoFindOptions & AbortOptions = {} + ): Promise> { + const documentId = interpretAsDocumentId(id) + const { allowableStates, signal } = options + + return Promise.race([ + (async () => { + const handle = await this.#loadDocument(documentId) + if (!allowableStates) { + await handle.whenReady([READY, UNAVAILABLE]) + if (handle.state === UNAVAILABLE && !signal?.aborted) { + throw new Error(`Document ${id} is unavailable`) + } + } + return handle + })(), + abortable(signal), + ]) + } + delete( /** The url or documentId of the handle to delete */ id: AnyDocumentId @@ -475,8 +630,7 @@ export class Repo extends EventEmitter { const documentId = interpretAsDocumentId(id) const handle = this.#getHandle({ documentId }) - const doc = await handle.doc() - if (!doc) return undefined + const doc = handle.doc() return Automerge.save(doc) } @@ -530,15 +684,46 @@ export class Repo extends EventEmitter { : Object.values(this.#handleCache) await Promise.all( handles.map(async handle => { - const doc = handle.docSync() - if (!doc) { - return - } - return this.storageSubsystem!.saveDoc(handle.documentId, doc) + return this.storageSubsystem!.saveDoc(handle.documentId, handle.doc()) }) ) } + /** + * Removes a DocHandle from the handleCache. + * @hidden this API is experimental and may change. + * @param documentId - documentId of the DocHandle to remove from handleCache, if present in cache. + * @returns Promise + */ + async removeFromCache(documentId: DocumentId) { + if (!this.#handleCache[documentId]) { + this.#log( + `WARN: removeFromCache called but handle not found in handleCache for documentId: ${documentId}` + ) + return + } + const handle = this.#getHandle({ documentId }) + await handle.whenReady([READY, UNLOADED, DELETED, UNAVAILABLE]) + const doc = handle.doc() + // because this is an internal-ish function, we'll be extra careful about undefined docs here + if (doc) { + if (handle.isReady()) { + handle.unload() + } else { + this.#log( + `WARN: removeFromCache called but handle for documentId: ${documentId} in unexpected state: ${handle.state}` + ) + } + delete this.#handleCache[documentId] + // TODO: remove document from synchronizer when removeDocument is implemented + // this.synchronizer.removeDocument(documentId) + } else { + this.#log( + `WARN: removeFromCache called but doc undefined for documentId: ${documentId}` + ) + } + } + shutdown(): Promise { this.networkSubsystem.adapters.forEach(adapter => { adapter.disconnect() @@ -575,6 +760,13 @@ export interface RepoConfig { * Whether to enable the experimental remote heads gossiping feature */ enableRemoteHeadsGossiping?: boolean + + /** + * A list of automerge URLs which should never be loaded regardless of what + * messages are received or what the share policy is. This is useful to avoid + * loading documents that are known to be too resource intensive. + */ + denylist?: AutomergeUrl[] } /** A function that determines whether we should share a document with a peer @@ -598,6 +790,11 @@ export interface RepoEvents { "delete-document": (arg: DeleteDocumentPayload) => void /** A document was marked as unavailable (we don't have it and none of our peers have it) */ "unavailable-document": (arg: DeleteDocumentPayload) => void + "doc-metrics": (arg: DocMetrics) => void +} + +export interface RepoFindOptions { + allowableStates?: string[] } export interface DocumentPayload { @@ -607,3 +804,17 @@ export interface DocumentPayload { export interface DeleteDocumentPayload { documentId: DocumentId } + +export type DocMetrics = + | DocSyncMetrics + | { + type: "doc-loaded" + documentId: DocumentId + durationMillis: number + numOps: number + numChanges: number + } + | { + type: "doc-denied" + documentId: DocumentId + } diff --git a/packages/automerge-repo/src/helpers/abortable.ts b/packages/automerge-repo/src/helpers/abortable.ts new file mode 100644 index 000000000..adf2092ad --- /dev/null +++ b/packages/automerge-repo/src/helpers/abortable.ts @@ -0,0 +1,61 @@ +/** + * Creates a promise that rejects when the signal is aborted. + * + * @remarks + * This utility creates a promise that rejects when the provided AbortSignal is aborted. + * It's designed to be used with Promise.race() to make operations abortable. + * + * @example + * ```typescript + * const controller = new AbortController(); + * + * try { + * const result = await Promise.race([ + * fetch('https://api.example.com/data'), + * abortable(controller.signal) + * ]); + * } catch (err) { + * if (err.name === 'AbortError') { + * console.log('The operation was aborted'); + * } + * } + * + * // Later, to abort: + * controller.abort(); + * ``` + * + * @param signal - An AbortSignal that can be used to abort the operation + * @param cleanup - Optional cleanup function that will be called if aborted + * @returns A promise that rejects with AbortError when the signal is aborted + * @throws {DOMException} With name "AbortError" when aborted + */ +export function abortable( + signal?: AbortSignal, + cleanup?: () => void +): Promise { + if (signal?.aborted) { + throw new DOMException("Operation aborted", "AbortError") + } + + if (!signal) { + return new Promise(() => {}) // Never resolves + } + + return new Promise((_, reject) => { + signal.addEventListener( + "abort", + () => { + cleanup?.() + reject(new DOMException("Operation aborted", "AbortError")) + }, + { once: true } + ) + }) +} + +/** + * Include this type in an options object to pass an AbortSignal to a function. + */ +export interface AbortOptions { + signal?: AbortSignal +} diff --git a/packages/automerge-repo/src/helpers/bufferFromHex.ts b/packages/automerge-repo/src/helpers/bufferFromHex.ts new file mode 100644 index 000000000..293223ce0 --- /dev/null +++ b/packages/automerge-repo/src/helpers/bufferFromHex.ts @@ -0,0 +1,14 @@ +export const uint8ArrayFromHexString = (hexString: string): Uint8Array => { + if (hexString.length % 2 !== 0) { + throw new Error("Hex string must have an even length") + } + const bytes = new Uint8Array(hexString.length / 2) + for (let i = 0; i < hexString.length; i += 2) { + bytes[i >> 1] = parseInt(hexString.slice(i, i + 2), 16) + } + return bytes +} + +export const uint8ArrayToHexString = (data: Uint8Array): string => { + return Array.from(data, byte => byte.toString(16).padStart(2, "0")).join("") +} diff --git a/packages/automerge-repo/src/helpers/headsAreSame.ts b/packages/automerge-repo/src/helpers/headsAreSame.ts index 0924cf3ac..3002baaad 100644 --- a/packages/automerge-repo/src/helpers/headsAreSame.ts +++ b/packages/automerge-repo/src/helpers/headsAreSame.ts @@ -1,6 +1,6 @@ -import { Heads } from "@automerge/automerge/slim/next" import { arraysAreEqual } from "./arraysAreEqual.js" +import type { UrlHeads } from "../types.js" -export const headsAreSame = (a: Heads, b: Heads) => { +export const headsAreSame = (a: UrlHeads, b: UrlHeads) => { return arraysAreEqual(a, b) } diff --git a/packages/automerge-repo/src/helpers/tests/network-adapter-tests.ts b/packages/automerge-repo/src/helpers/tests/network-adapter-tests.ts index 447827845..636dffbb1 100644 --- a/packages/automerge-repo/src/helpers/tests/network-adapter-tests.ts +++ b/packages/automerge-repo/src/helpers/tests/network-adapter-tests.ts @@ -49,9 +49,10 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void { // Alice creates a document const aliceHandle = aliceRepo.create() - // Bob receives the document - await eventPromise(bobRepo, "document") - const bobHandle = bobRepo.find(aliceHandle.url) + // TODO: ... let connections complete. this shouldn't be necessary. + await pause(50) + + const bobHandle = await bobRepo.find(aliceHandle.url) // Alice changes the document aliceHandle.change(d => { @@ -60,7 +61,7 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void { // Bob receives the change await eventPromise(bobHandle, "change") - assert.equal((await bobHandle.doc())?.foo, "bar") + assert.equal((await bobHandle).doc()?.foo, "bar") // Bob changes the document bobHandle.change(d => { @@ -69,7 +70,7 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void { // Alice receives the change await eventPromise(aliceHandle, "change") - assert.equal((await aliceHandle.doc())?.foo, "baz") + assert.equal(aliceHandle.doc().foo, "baz") } // Run the test in both directions, in case they're different types of adapters @@ -100,9 +101,9 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void { const docUrl = aliceHandle.url // Bob and Charlie receive the document - await eventPromises([bobRepo, charlieRepo], "document") - const bobHandle = bobRepo.find(docUrl) - const charlieHandle = charlieRepo.find(docUrl) + await pause(50) + const bobHandle = await bobRepo.find(docUrl) + const charlieHandle = await charlieRepo.find(docUrl) // Alice changes the document aliceHandle.change(d => { @@ -111,8 +112,8 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void { // Bob and Charlie receive the change await eventPromises([bobHandle, charlieHandle], "change") - assert.equal((await bobHandle.doc())?.foo, "bar") - assert.equal((await charlieHandle.doc())?.foo, "bar") + assert.equal(bobHandle.doc().foo, "bar") + assert.equal(charlieHandle.doc().foo, "bar") // Charlie changes the document charlieHandle.change(d => { @@ -121,8 +122,8 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void { // Alice and Bob receive the change await eventPromises([aliceHandle, bobHandle], "change") - assert.equal((await bobHandle.doc())?.foo, "baz") - assert.equal((await charlieHandle.doc())?.foo, "baz") + assert.equal(bobHandle.doc().foo, "baz") + assert.equal(charlieHandle.doc().foo, "baz") teardown() }) @@ -141,7 +142,7 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void { ) const aliceHandle = aliceRepo.create() - const charlieHandle = charlieRepo.find(aliceHandle.url) + const charlieHandle = await charlieRepo.find(aliceHandle.url) // pause to give charlie a chance to let alice know it wants the doc await pause(100) diff --git a/packages/automerge-repo/src/helpers/tests/storage-adapter-tests.ts b/packages/automerge-repo/src/helpers/tests/storage-adapter-tests.ts index 0291f1788..7b009e35a 100644 --- a/packages/automerge-repo/src/helpers/tests/storage-adapter-tests.ts +++ b/packages/automerge-repo/src/helpers/tests/storage-adapter-tests.ts @@ -1,4 +1,4 @@ -import { describe, expect, it } from "vitest" +import { describe, expect, beforeEach, it as _it } from "vitest" import type { StorageAdapterInterface } from "../../storage/StorageAdapterInterface.js" @@ -8,120 +8,90 @@ const PAYLOAD_C = () => new Uint8Array([2, 111, 74, 131, 236, 96, 142, 193]) const LARGE_PAYLOAD = new Uint8Array(100000).map(() => Math.random() * 256) -export function runStorageAdapterTests(_setup: SetupFn, title?: string): void { - const setup = async () => { - const { adapter, teardown = NO_OP } = await _setup() - return { adapter, teardown } - } +type AdapterTestContext = { + adapter: StorageAdapterInterface +} + +const it = _it + +export function runStorageAdapterTests(setup: SetupFn, title?: string): void { + beforeEach(async ctx => { + const { adapter, teardown = NO_OP } = await setup() + ctx.adapter = adapter + return teardown + }) describe(`Storage adapter acceptance tests ${ title ? `(${title})` : "" }`, () => { describe("load", () => { - it("should return undefined if there is no data", async () => { - const { adapter, teardown } = await setup() - + it("should return undefined if there is no data", async ({ adapter }) => { const actual = await adapter.load(["AAAAA", "sync-state", "xxxxx"]) expect(actual).toBeUndefined() - - teardown() }) }) describe("save and load", () => { - it("should return data that was saved", async () => { - const { adapter, teardown } = await setup() - + it("should return data that was saved", async ({ adapter }) => { await adapter.save(["storage-adapter-id"], PAYLOAD_A()) const actual = await adapter.load(["storage-adapter-id"]) expect(actual).toStrictEqual(PAYLOAD_A()) - - teardown() }) - it("should work with composite keys", async () => { - const { adapter, teardown } = await setup() - + it("should work with composite keys", async ({ adapter }) => { await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A()) const actual = await adapter.load(["AAAAA", "sync-state", "xxxxx"]) expect(actual).toStrictEqual(PAYLOAD_A()) - - teardown() }) - it("should work with a large payload", async () => { - const { adapter, teardown } = await setup() - + it("should work with a large payload", async ({ adapter }) => { await adapter.save(["AAAAA", "sync-state", "xxxxx"], LARGE_PAYLOAD) const actual = await adapter.load(["AAAAA", "sync-state", "xxxxx"]) expect(actual).toStrictEqual(LARGE_PAYLOAD) - - teardown() }) }) describe("loadRange", () => { - it("should return an empty array if there is no data", async () => { - const { adapter, teardown } = await setup() - + it("should return an empty array if there is no data", async ({ + adapter, + }) => { expect(await adapter.loadRange(["AAAAA"])).toStrictEqual([]) - - teardown() }) }) describe("save and loadRange", () => { - it("should return all the data that matches the key", async () => { - const { adapter, teardown } = await setup() - + it("should return all the data that matches the key", async ({ + adapter, + }) => { await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A()) await adapter.save(["AAAAA", "snapshot", "yyyyy"], PAYLOAD_B()) await adapter.save(["AAAAA", "sync-state", "zzzzz"], PAYLOAD_C()) - expect(await adapter.loadRange(["AAAAA"])).toStrictEqual( - expect.arrayContaining([ - { key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() }, - { key: ["AAAAA", "snapshot", "yyyyy"], data: PAYLOAD_B() }, - { key: ["AAAAA", "sync-state", "zzzzz"], data: PAYLOAD_C() }, - ]) - ) - - expect(await adapter.loadRange(["AAAAA", "sync-state"])).toStrictEqual( - expect.arrayContaining([ - { key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() }, - { key: ["AAAAA", "sync-state", "zzzzz"], data: PAYLOAD_C() }, - ]) - ) - - teardown() - }) + expect(await adapter.loadRange(["AAAAA"])).toStrictEqual([ + { key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() }, + { key: ["AAAAA", "snapshot", "yyyyy"], data: PAYLOAD_B() }, + { key: ["AAAAA", "sync-state", "zzzzz"], data: PAYLOAD_C() }, + ]) - it("should only load values that match they key", async () => { - const { adapter, teardown } = await setup() + expect(await adapter.loadRange(["AAAAA", "sync-state"])).toStrictEqual([ + { key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() }, + { key: ["AAAAA", "sync-state", "zzzzz"], data: PAYLOAD_C() }, + ]) + }) + it("should only load values that match they key", async ({ adapter }) => { await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A()) await adapter.save(["BBBBB", "sync-state", "zzzzz"], PAYLOAD_C()) const actual = await adapter.loadRange(["AAAAA"]) - expect(actual).toStrictEqual( - expect.arrayContaining([ - { key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() }, - ]) - ) - expect(actual).toStrictEqual( - expect.not.arrayContaining([ - { key: ["BBBBB", "sync-state", "zzzzz"], data: PAYLOAD_C() }, - ]) - ) - - teardown() + expect(actual).toStrictEqual([ + { key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() }, + ]) }) }) describe("save and remove", () => { - it("after removing, should be empty", async () => { - const { adapter, teardown } = await setup() - + it("after removing, should be empty", async ({ adapter }) => { await adapter.save(["AAAAA", "snapshot", "xxxxx"], PAYLOAD_A()) await adapter.remove(["AAAAA", "snapshot", "xxxxx"]) @@ -129,30 +99,24 @@ export function runStorageAdapterTests(_setup: SetupFn, title?: string): void { expect( await adapter.load(["AAAAA", "snapshot", "xxxxx"]) ).toBeUndefined() - - teardown() }) }) describe("save and save", () => { - it("should overwrite data saved with the same key", async () => { - const { adapter, teardown } = await setup() - + it("should overwrite data saved with the same key", async ({ + adapter, + }) => { await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A()) await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_B()) expect(await adapter.loadRange(["AAAAA", "sync-state"])).toStrictEqual([ { key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_B() }, ]) - - teardown() }) }) describe("removeRange", () => { - it("should remove a range of records", async () => { - const { adapter, teardown } = await setup() - + it("should remove a range of records", async ({ adapter }) => { await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A()) await adapter.save(["AAAAA", "snapshot", "yyyyy"], PAYLOAD_B()) await adapter.save(["AAAAA", "sync-state", "zzzzz"], PAYLOAD_C()) @@ -162,13 +126,9 @@ export function runStorageAdapterTests(_setup: SetupFn, title?: string): void { expect(await adapter.loadRange(["AAAAA"])).toStrictEqual([ { key: ["AAAAA", "snapshot", "yyyyy"], data: PAYLOAD_B() }, ]) - - teardown() }) - it("should not remove records that don't match", async () => { - const { adapter, teardown } = await setup() - + it("should not remove records that don't match", async ({ adapter }) => { await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A()) await adapter.save(["BBBBB", "sync-state", "zzzzz"], PAYLOAD_B()) @@ -178,8 +138,6 @@ export function runStorageAdapterTests(_setup: SetupFn, title?: string): void { expect(actual).toStrictEqual([ { key: ["BBBBB", "sync-state", "zzzzz"], data: PAYLOAD_B() }, ]) - - teardown() }) }) }) @@ -189,5 +147,5 @@ const NO_OP = () => {} export type SetupFn = () => Promise<{ adapter: StorageAdapterInterface - teardown?: () => void + teardown?: () => void | Promise }> diff --git a/packages/automerge-repo/src/index.ts b/packages/automerge-repo/src/index.ts index 175247af3..1da91befa 100644 --- a/packages/automerge-repo/src/index.ts +++ b/packages/automerge-repo/src/index.ts @@ -34,6 +34,8 @@ export { stringifyAutomergeUrl, interpretAsDocumentId, generateAutomergeUrl, + encodeHeads, + decodeHeads, } from "./AutomergeUrl.js" export { Repo } from "./Repo.js" export { NetworkAdapter } from "./network/NetworkAdapter.js" diff --git a/packages/automerge-repo/src/storage/StorageSubsystem.ts b/packages/automerge-repo/src/storage/StorageSubsystem.ts index 4cb36080d..02b7edd64 100644 --- a/packages/automerge-repo/src/storage/StorageSubsystem.ts +++ b/packages/automerge-repo/src/storage/StorageSubsystem.ts @@ -8,12 +8,23 @@ import { ChunkInfo, StorageKey, StorageId } from "./types.js" import { keyHash, headsHash } from "./keyHash.js" import { chunkTypeFromKey } from "./chunkTypeFromKey.js" import * as Uuid from "uuid" +import { EventEmitter } from "eventemitter3" +import { encodeHeads } from "../AutomergeUrl.js" + +type StorageSubsystemEvents = { + "document-loaded": (arg: { + documentId: DocumentId + durationMillis: number + numOps: number + numChanges: number + }) => void +} /** * The storage subsystem is responsible for saving and loading Automerge documents to and from * storage adapter. It also provides a generic key/value storage interface for other uses. */ -export class StorageSubsystem { +export class StorageSubsystem extends EventEmitter { /** The storage adapter to use for saving and loading documents */ #storageAdapter: StorageAdapterInterface @@ -29,6 +40,7 @@ export class StorageSubsystem { #log = debug(`automerge-repo:storage-subsystem`) constructor(storageAdapter: StorageAdapterInterface) { + super() this.#storageAdapter = storageAdapter } @@ -130,7 +142,14 @@ export class StorageSubsystem { if (binary.length === 0) return null // Load into an Automerge document + const start = performance.now() const newDoc = A.loadIncremental(A.init(), binary) as A.Doc + const end = performance.now() + this.emit("document-loaded", { + documentId, + durationMillis: end - start, + ...A.stats(newDoc), + }) // Record the latest heads for the document this.#storedHeads.set(documentId, A.getHeads(newDoc)) @@ -155,6 +174,7 @@ export class StorageSubsystem { } else { await this.#saveIncremental(documentId, doc) } + this.#storedHeads.set(documentId, A.getHeads(doc)) } @@ -261,7 +281,7 @@ export class StorageSubsystem { } const newHeads = A.getHeads(doc) - if (headsAreSame(newHeads, oldHeads)) { + if (headsAreSame(encodeHeads(newHeads), encodeHeads(oldHeads))) { // the document hasn't changed return false } diff --git a/packages/automerge-repo/src/synchronizer/CollectionSynchronizer.ts b/packages/automerge-repo/src/synchronizer/CollectionSynchronizer.ts index b9c531f48..f989d3333 100644 --- a/packages/automerge-repo/src/synchronizer/CollectionSynchronizer.ts +++ b/packages/automerge-repo/src/synchronizer/CollectionSynchronizer.ts @@ -1,9 +1,9 @@ import debug from "debug" import { DocHandle } from "../DocHandle.js" -import { stringifyAutomergeUrl } from "../AutomergeUrl.js" +import { parseAutomergeUrl } from "../AutomergeUrl.js" import { Repo } from "../Repo.js" import { DocMessage } from "../network/messages.js" -import { DocumentId, PeerId } from "../types.js" +import { AutomergeUrl, DocumentId, PeerId } from "../types.js" import { DocSynchronizer } from "./DocSynchronizer.js" import { Synchronizer } from "./Synchronizer.js" @@ -21,23 +21,27 @@ export class CollectionSynchronizer extends Synchronizer { /** Used to determine if the document is know to the Collection and a synchronizer exists or is being set up */ #docSetUp: Record = {} - constructor(private repo: Repo) { + #denylist: DocumentId[] + + constructor(private repo: Repo, denylist: AutomergeUrl[] = []) { super() + this.#denylist = denylist.map(url => parseAutomergeUrl(url).documentId) } /** Returns a synchronizer for the given document, creating one if it doesn't already exist. */ - #fetchDocSynchronizer(documentId: DocumentId) { - if (!this.docSynchronizers[documentId]) { - const handle = this.repo.find(stringifyAutomergeUrl({ documentId })) - this.docSynchronizers[documentId] = this.#initDocSynchronizer(handle) + #fetchDocSynchronizer(handle: DocHandle) { + if (!this.docSynchronizers[handle.documentId]) { + this.docSynchronizers[handle.documentId] = + this.#initDocSynchronizer(handle) } - return this.docSynchronizers[documentId] + return this.docSynchronizers[handle.documentId] } /** Creates a new docSynchronizer and sets it up to propagate messages */ #initDocSynchronizer(handle: DocHandle): DocSynchronizer { const docSynchronizer = new DocSynchronizer({ handle, + peerId: this.repo.networkSubsystem.peerId, onLoadSyncState: async peerId => { if (!this.repo.storageSubsystem) { return @@ -58,6 +62,7 @@ export class CollectionSynchronizer extends Synchronizer { docSynchronizer.on("message", event => this.emit("message", event)) docSynchronizer.on("open-doc", event => this.emit("open-doc", event)) docSynchronizer.on("sync-state", event => this.emit("sync-state", event)) + docSynchronizer.on("metrics", event => this.emit("metrics", event)) return docSynchronizer } @@ -90,15 +95,31 @@ export class CollectionSynchronizer extends Synchronizer { throw new Error("received a message with an invalid documentId") } + if (this.#denylist.includes(documentId)) { + this.emit("metrics", { + type: "doc-denied", + documentId, + }) + this.emit("message", { + type: "doc-unavailable", + documentId, + targetId: message.senderId, + }) + return + } + this.#docSetUp[documentId] = true - const docSynchronizer = this.#fetchDocSynchronizer(documentId) + const handle = await this.repo.find(documentId, { + allowableStates: ["ready", "unavailable", "requesting"], + }) + const docSynchronizer = this.#fetchDocSynchronizer(handle) docSynchronizer.receiveMessage(message) // Initiate sync with any new peers const peers = await this.#documentGenerousPeers(documentId) - docSynchronizer.beginSync( + void docSynchronizer.beginSync( peers.filter(peerId => !docSynchronizer.hasPeer(peerId)) ) } @@ -106,14 +127,14 @@ export class CollectionSynchronizer extends Synchronizer { /** * Starts synchronizing the given document with all peers that we share it generously with. */ - addDocument(documentId: DocumentId) { + addDocument(handle: DocHandle) { // HACK: this is a hack to prevent us from adding the same document twice - if (this.#docSetUp[documentId]) { + if (this.#docSetUp[handle.documentId]) { return } - const docSynchronizer = this.#fetchDocSynchronizer(documentId) - void this.#documentGenerousPeers(documentId).then(peers => { - docSynchronizer.beginSync(peers) + const docSynchronizer = this.#fetchDocSynchronizer(handle) + void this.#documentGenerousPeers(handle.documentId).then(peers => { + void docSynchronizer.beginSync(peers) }) } @@ -135,7 +156,7 @@ export class CollectionSynchronizer extends Synchronizer { for (const docSynchronizer of Object.values(this.docSynchronizers)) { const { documentId } = docSynchronizer void this.repo.sharePolicy(peerId, documentId).then(okToShare => { - if (okToShare) docSynchronizer.beginSync([peerId]) + if (okToShare) void docSynchronizer.beginSync([peerId]) }) } } diff --git a/packages/automerge-repo/src/synchronizer/DocSynchronizer.ts b/packages/automerge-repo/src/synchronizer/DocSynchronizer.ts index 01dea6e58..ce47e99f9 100644 --- a/packages/automerge-repo/src/synchronizer/DocSynchronizer.ts +++ b/packages/automerge-repo/src/synchronizer/DocSynchronizer.ts @@ -30,6 +30,7 @@ type PendingMessage = { interface DocSynchronizerConfig { handle: DocHandle + peerId: PeerId onLoadSyncState?: (peerId: PeerId) => Promise } @@ -56,13 +57,17 @@ export class DocSynchronizer extends Synchronizer { #pendingSyncMessages: Array = [] + // We keep this around at least in part for debugging. + // eslint-disable-next-line no-unused-private-class-members + #peerId: PeerId #syncStarted = false #handle: DocHandle #onLoadSyncState: (peerId: PeerId) => Promise - constructor({ handle, onLoadSyncState }: DocSynchronizerConfig) { + constructor({ handle, peerId, onLoadSyncState }: DocSynchronizerConfig) { super() + this.#peerId = peerId this.#handle = handle this.#onLoadSyncState = onLoadSyncState ?? (() => Promise.resolve(undefined)) @@ -81,7 +86,7 @@ export class DocSynchronizer extends Synchronizer { // Process pending sync messages immediately after the handle becomes ready. void (async () => { - await handle.doc([READY, REQUESTING]) + await handle.whenReady([READY, REQUESTING]) this.#processAllPendingSyncMessages() })() } @@ -97,8 +102,7 @@ export class DocSynchronizer extends Synchronizer { /// PRIVATE async #syncWithPeers() { - this.#log(`syncWithPeers`) - const doc = await this.#handle.doc() + const doc = await this.#handle.legacyAsyncDoc() // XXX THIS ONE IS WEIRD if (doc === undefined) return this.#peers.forEach(peerId => this.#sendSyncMessage(peerId, doc)) } @@ -226,16 +230,15 @@ export class DocSynchronizer extends Synchronizer { return this.#peers.includes(peerId) } - beginSync(peerIds: PeerId[]) { + async beginSync(peerIds: PeerId[]) { const noPeersWithDocument = peerIds.every( peerId => this.#peerDocumentStatuses[peerId] in ["unavailable", "wants"] ) // At this point if we don't have anything in our storage, we need to use an empty doc to sync // with; but we don't want to surface that state to the front end - - const docPromise = this.#handle - .doc([READY, REQUESTING, UNAVAILABLE]) + const docPromise = this.#handle // TODO THIS IS ALSO WEIRD + .legacyAsyncDoc([READY, REQUESTING, UNAVAILABLE]) .then(doc => { // we register out peers first, then say that sync has started this.#syncStarted = true @@ -251,7 +254,13 @@ export class DocSynchronizer extends Synchronizer { return doc ?? A.init() }) - this.#log(`beginSync: ${peerIds.join(", ")}`) + const peersWithDocument = this.#peers.some(peerId => { + return this.#peerDocumentStatuses[peerId] == "has" + }) + + if (peersWithDocument) { + await this.#handle.whenReady() + } peerIds.forEach(peerId => { this.#withSyncState(peerId, syncState => { @@ -351,11 +360,20 @@ export class DocSynchronizer extends Synchronizer { this.#withSyncState(message.senderId, syncState => { this.#handle.update(doc => { + const start = performance.now() + const [newDoc, newSyncState] = A.receiveSyncMessage( doc, syncState, message.data ) + const end = performance.now() + this.emit("metrics", { + type: "receive-sync-message", + documentId: this.#handle.documentId, + durationMillis: end - start, + ...A.stats(doc), + }) this.#setSyncState(message.senderId, newSyncState) diff --git a/packages/automerge-repo/src/synchronizer/Synchronizer.ts b/packages/automerge-repo/src/synchronizer/Synchronizer.ts index eb6963fc7..838cf797f 100644 --- a/packages/automerge-repo/src/synchronizer/Synchronizer.ts +++ b/packages/automerge-repo/src/synchronizer/Synchronizer.ts @@ -15,6 +15,7 @@ export interface SynchronizerEvents { message: (payload: MessageContents) => void "sync-state": (payload: SyncStatePayload) => void "open-doc": (arg: OpenDocMessage) => void + metrics: (arg: DocSyncMetrics) => void } /** Notify the repo that the sync state has changed */ @@ -23,3 +24,16 @@ export interface SyncStatePayload { documentId: DocumentId syncState: SyncState } + +export type DocSyncMetrics = + | { + type: "receive-sync-message" + documentId: DocumentId + durationMillis: number + numOps: number + numChanges: number + } + | { + type: "doc-denied" + documentId: DocumentId + } diff --git a/packages/automerge-repo/src/types.ts b/packages/automerge-repo/src/types.ts index 12b84ca9d..aa6052040 100644 --- a/packages/automerge-repo/src/types.ts +++ b/packages/automerge-repo/src/types.ts @@ -27,8 +27,11 @@ export type AnyDocumentId = | BinaryDocumentId | LegacyDocumentId +// We need to define our own version of heads because the AutomergeHeads type is not bs58check encoded +export type UrlHeads = string[] & { __automergeUrlHeads: unknown } + /** A branded type for peer IDs */ export type PeerId = string & { __peerId: true } /** A randomly generated string created when the {@link Repo} starts up */ -export type SessionId = string & { __SessionId: true } +export type SessionId = string & { __sessionId: true } diff --git a/packages/automerge-repo/test/AutomergeUrl.test.ts b/packages/automerge-repo/test/AutomergeUrl.test.ts index f2f18d7a4..b076e0cf3 100644 --- a/packages/automerge-repo/test/AutomergeUrl.test.ts +++ b/packages/automerge-repo/test/AutomergeUrl.test.ts @@ -3,9 +3,11 @@ import bs58check from "bs58check" import { describe, it } from "vitest" import { generateAutomergeUrl, + getHeadsFromUrl, isValidAutomergeUrl, parseAutomergeUrl, stringifyAutomergeUrl, + UrlHeads, } from "../src/AutomergeUrl.js" import type { AutomergeUrl, @@ -102,3 +104,131 @@ describe("AutomergeUrl", () => { }) }) }) + +describe("AutomergeUrl with heads", () => { + // Create some sample encoded heads for testing + const head1 = bs58check.encode(new Uint8Array([1, 2, 3, 4])) as string + const head2 = bs58check.encode(new Uint8Array([5, 6, 7, 8])) as string + const goodHeads = [head1, head2] as UrlHeads + const urlWithHeads = `${goodUrl}#${head1}|${head2}` as AutomergeUrl + const invalidHead = "not-base58-encoded" + const invalidHeads = [invalidHead] as UrlHeads + + describe("stringifyAutomergeUrl", () => { + it("should stringify a url with heads", () => { + const url = stringifyAutomergeUrl({ + documentId: goodDocumentId, + heads: goodHeads, + }) + assert.strictEqual(url, urlWithHeads) + }) + + it("should throw if heads are not valid base58check", () => { + assert.throws(() => + stringifyAutomergeUrl({ + documentId: goodDocumentId, + heads: invalidHeads, + }) + ) + }) + }) + + describe("parseAutomergeUrl", () => { + it("should parse a url with heads", () => { + const { documentId, heads } = parseAutomergeUrl(urlWithHeads) + assert.equal(documentId, goodDocumentId) + assert.deepEqual(heads, [head1, head2]) + }) + + it("should parse a url without heads", () => { + const { documentId, heads } = parseAutomergeUrl(goodUrl) + assert.equal(documentId, goodDocumentId) + assert.equal(heads, undefined) + }) + + it("should throw on url with invalid heads encoding", () => { + const badUrl = `${goodUrl}#${invalidHead}` as AutomergeUrl + assert.throws(() => parseAutomergeUrl(badUrl)) + }) + }) + + describe("isValidAutomergeUrl", () => { + it("should return true for a valid url with heads", () => { + assert(isValidAutomergeUrl(urlWithHeads) === true) + }) + + it("should return false for a url with invalid heads", () => { + const badUrl = `${goodUrl}#${invalidHead}` as AutomergeUrl + assert(isValidAutomergeUrl(badUrl) === false) + }) + }) + + describe("getHeadsFromUrl", () => { + it("should return heads from a valid url", () => { + const heads = getHeadsFromUrl(urlWithHeads) + assert.deepEqual(heads, [head1, head2]) + }) + + it("should return undefined for url without heads", () => { + const heads = getHeadsFromUrl(goodUrl) + assert.equal(heads, undefined) + }) + }) + it("should handle a single head correctly", () => { + const urlWithOneHead = `${goodUrl}#${head1}` as AutomergeUrl + const { heads } = parseAutomergeUrl(urlWithOneHead) + assert.deepEqual(heads, [head1]) + }) + + it("should round-trip urls with heads", () => { + const originalUrl = urlWithHeads + const parsed = parseAutomergeUrl(originalUrl) + const roundTripped = stringifyAutomergeUrl({ + documentId: parsed.documentId, + heads: parsed.heads, + }) + assert.equal(roundTripped, originalUrl) + }) + + describe("should reject malformed urls", () => { + it("should reject urls with trailing delimiter", () => { + assert(!isValidAutomergeUrl(`${goodUrl}#${head1}:` as AutomergeUrl)) + }) + + it("should reject urls with empty head", () => { + assert(!isValidAutomergeUrl(`${goodUrl}#|${head1}` as AutomergeUrl)) + }) + + it("should reject urls with multiple hash characters", () => { + assert( + !isValidAutomergeUrl(`${goodUrl}#${head1}#${head2}` as AutomergeUrl) + ) + }) + }) +}) + +describe("empty heads section", () => { + it("should treat bare # as empty heads array", () => { + const urlWithEmptyHeads = `${goodUrl}#` as AutomergeUrl + const { heads } = parseAutomergeUrl(urlWithEmptyHeads) + assert.deepEqual(heads, []) + }) + + it("should round-trip empty heads array", () => { + const original = `${goodUrl}#` as AutomergeUrl + const parsed = parseAutomergeUrl(original) + const roundTripped = stringifyAutomergeUrl({ + documentId: parsed.documentId, + heads: parsed.heads, + }) + assert.equal(roundTripped, original) + }) + + it("should distinguish between no heads and empty heads", () => { + const noHeads = parseAutomergeUrl(goodUrl) + const emptyHeads = parseAutomergeUrl(`${goodUrl}#` as AutomergeUrl) + + assert.equal(noHeads.heads, undefined) + assert.deepEqual(emptyHeads.heads, []) + }) +}) diff --git a/packages/automerge-repo/test/CollectionSynchronizer.test.ts b/packages/automerge-repo/test/CollectionSynchronizer.test.ts index 7bbf44a24..a62cb26f1 100644 --- a/packages/automerge-repo/test/CollectionSynchronizer.test.ts +++ b/packages/automerge-repo/test/CollectionSynchronizer.test.ts @@ -28,13 +28,13 @@ describe("CollectionSynchronizer", () => { done() }) - synchronizer.addDocument(handle.documentId) + synchronizer.addDocument(handle) })) it("starts synchronizing existing documents when a peer is added", () => new Promise(done => { const handle = repo.create() - synchronizer.addDocument(handle.documentId) + synchronizer.addDocument(handle) synchronizer.once("message", event => { const { targetId, documentId } = event as SyncMessage assert(targetId === "peer1") @@ -50,7 +50,7 @@ describe("CollectionSynchronizer", () => { repo.sharePolicy = async (peerId: PeerId) => peerId !== "peer1" - synchronizer.addDocument(handle.documentId) + synchronizer.addDocument(handle) synchronizer.once("message", () => { reject(new Error("Should not have sent a message")) }) @@ -71,7 +71,7 @@ describe("CollectionSynchronizer", () => { reject(new Error("Should not have sent a message")) }) - synchronizer.addDocument(handle.documentId) + synchronizer.addDocument(handle) setTimeout(done) })) diff --git a/packages/automerge-repo/test/DocHandle.test.ts b/packages/automerge-repo/test/DocHandle.test.ts index 389f38816..da0de4824 100644 --- a/packages/automerge-repo/test/DocHandle.test.ts +++ b/packages/automerge-repo/test/DocHandle.test.ts @@ -1,13 +1,16 @@ import * as A from "@automerge/automerge/next" import assert from "assert" import { decode } from "cbor-x" -import { describe, it, vi } from "vitest" -import { generateAutomergeUrl, parseAutomergeUrl } from "../src/AutomergeUrl.js" +import { describe, expect, it, vi } from "vitest" +import { + encodeHeads, + generateAutomergeUrl, + parseAutomergeUrl, +} from "../src/AutomergeUrl.js" import { eventPromise } from "../src/helpers/eventPromise.js" import { pause } from "../src/helpers/pause.js" import { DocHandle, DocHandleChangePayload } from "../src/index.js" import { TestDoc } from "./types.js" -import { UNLOADED } from "../src/DocHandle.js" describe("DocHandle", () => { const TEST_ID = parseAutomergeUrl(generateAutomergeUrl()).documentId @@ -35,7 +38,7 @@ describe("DocHandle", () => { handle.update(doc => docFromMockStorage(doc)) assert.equal(handle.isReady(), true) - const doc = await handle.doc() + const doc = handle.doc() assert.equal(doc?.foo, "bar") }) @@ -47,13 +50,13 @@ describe("DocHandle", () => { handle.update(doc => docFromMockStorage(doc)) assert.equal(handle.isReady(), true) - const doc = await handle.doc() - assert.deepEqual(doc, handle.docSync()) + const doc = handle.doc() + assert.deepEqual(doc, handle.doc()) }) - it("should return undefined if we access the doc before ready", async () => { + it("should throw an exception if we access the doc before ready", async () => { const handle = new DocHandle(TEST_ID) - assert.equal(handle.docSync(), undefined) + assert.throws(() => handle.doc()) }) it("should not return a doc until ready", async () => { @@ -63,7 +66,7 @@ describe("DocHandle", () => { // simulate loading from storage handle.update(doc => docFromMockStorage(doc)) - const doc = await handle.doc() + const doc = handle.doc() assert.equal(handle.isReady(), true) assert.equal(doc?.foo, "bar") @@ -83,15 +86,15 @@ describe("DocHandle", () => { handle.change(d => (d.foo = "bar")) assert.equal(handle.isReady(), true) - const heads = A.getHeads(handle.docSync()) + const heads = encodeHeads(A.getHeads(handle.doc())) assert.notDeepEqual(handle.heads(), []) assert.deepEqual(heads, handle.heads()) }) - it("should return undefined if the heads aren't loaded", async () => { + it("should throw an if the heads aren't loaded", async () => { const handle = new DocHandle(TEST_ID) assert.equal(handle.isReady(), false) - assert.deepEqual(handle.heads(), undefined) + expect(() => handle.heads()).toThrow("DocHandle is not ready") }) it("should return the history when requested", async () => { @@ -113,8 +116,45 @@ describe("DocHandle", () => { assert.equal(handle.isReady(), true) const history = handle.history() - const view = handle.view(history[1]) - assert.deepEqual(view, { foo: "one" }) + const viewHandle = handle.view(history[1]) + assert.deepEqual(await viewHandle.doc(), { foo: "one" }) + }) + + it("should support fixed heads from construction", async () => { + const handle = setup() + handle.change(d => (d.foo = "zero")) + handle.change(d => (d.foo = "one")) + + const history = handle.history() + const viewHandle = new DocHandle(TEST_ID, { heads: history[0] }) + viewHandle.update(() => A.clone(handle.doc()!)) + viewHandle.doneLoading() + + assert.deepEqual(await viewHandle.doc(), { foo: "zero" }) + }) + + it("should prevent changes on fixed-heads handles", async () => { + const handle = setup() + handle.change(d => (d.foo = "zero")) + const viewHandle = handle.view(handle.heads()!) + + assert.throws(() => viewHandle.change(d => (d.foo = "one"))) + assert.throws(() => + viewHandle.changeAt(handle.heads()!, d => (d.foo = "one")) + ) + assert.throws(() => viewHandle.merge(handle)) + }) + + it("should return fixed heads from heads()", async () => { + const handle = setup() + handle.change(d => (d.foo = "zero")) + const originalHeads = handle.heads()! + + handle.change(d => (d.foo = "one")) + const viewHandle = handle.view(originalHeads) + + assert.deepEqual(viewHandle.heads(), originalHeads) + assert.notDeepEqual(viewHandle.heads(), handle.heads()) }) it("should return diffs", async () => { @@ -154,6 +194,31 @@ describe("DocHandle", () => { ]) }) + it("should support diffing against another handle", async () => { + const handle = setup() + handle.change(d => (d.foo = "zero")) + const viewHandle = handle.view(handle.heads()!) + + handle.change(d => (d.foo = "one")) + + const patches = viewHandle.diff(handle) + assert.deepEqual(patches, [ + { action: "put", path: ["foo"], value: "" }, + { action: "splice", path: ["foo", 0], value: "one" }, + ]) + }) + + // TODO: alexg -- should i remove this test? should this fail or no? + it.skip("should fail diffing against unrelated handles", async () => { + const handle1 = setup() + const handle2 = setup() + + handle1.change(d => (d.foo = "zero")) + handle2.change(d => (d.foo = "one")) + + assert.throws(() => handle1.diff(handle2)) + }) + it("should allow direct access to decoded changes", async () => { const handle = setup() const time = Date.now() @@ -194,7 +259,7 @@ describe("DocHandle", () => { const handle = new DocHandle(TEST_ID) assert.equal(handle.isReady(), false) - handle.doc() + handle.legacyAsyncDoc() assert(vi.getTimerCount() > timerCount) @@ -220,7 +285,7 @@ describe("DocHandle", () => { assert.equal(handle.isReady(), true) handle.change(d => (d.foo = "pizza")) - const doc = await handle.doc() + const doc = handle.doc() assert.equal(doc?.foo, "pizza") }) @@ -230,7 +295,9 @@ describe("DocHandle", () => { // we don't have it in storage, so we request it from the network handle.request() - assert.equal(handle.docSync(), undefined) + await expect(() => { + handle.doc() + }).toThrowError("DocHandle is not ready") assert.equal(handle.isReady(), false) assert.throws(() => handle.change(_ => {})) }) @@ -246,7 +313,7 @@ describe("DocHandle", () => { return A.change(doc, d => (d.foo = "bar")) }) - const doc = await handle.doc() + const doc = handle.doc() assert.equal(handle.isReady(), true) assert.equal(doc?.foo, "bar") }) @@ -262,7 +329,7 @@ describe("DocHandle", () => { doc.foo = "bar" }) - const doc = await handle.doc() + const doc = handle.doc() assert.equal(doc?.foo, "bar") const changePayload = await p @@ -287,7 +354,7 @@ describe("DocHandle", () => { const p = new Promise(resolve => handle.once("change", ({ handle, doc }) => { - assert.equal(handle.docSync()?.foo, doc.foo) + assert.equal(handle.doc()?.foo, doc.foo) resolve() }) @@ -324,7 +391,7 @@ describe("DocHandle", () => { doc.foo = "baz" }) - const doc = await handle.doc() + const doc = handle.doc() assert.equal(doc?.foo, "baz") return p @@ -339,7 +406,7 @@ describe("DocHandle", () => { }) await p - const doc = await handle.doc() + const doc = handle.doc() assert.equal(doc?.foo, "bar") }) @@ -359,11 +426,7 @@ describe("DocHandle", () => { // set docHandle time out after 5 ms const handle = new DocHandle(TEST_ID, { timeoutDelay: 5 }) - const doc = await handle.doc() - - assert.equal(doc, undefined) - - assert.equal(handle.state, "unavailable") + expect(() => handle.doc()).toThrowError("DocHandle is not ready") }) it("should not time out if the document is loaded in time", async () => { @@ -374,11 +437,11 @@ describe("DocHandle", () => { handle.update(doc => docFromMockStorage(doc)) // now it should not time out - const doc = await handle.doc() + const doc = handle.doc() assert.equal(doc?.foo, "bar") }) - it("should be undefined if loading from the network times out", async () => { + it("should throw an exception if loading from the network times out", async () => { // set docHandle time out after 5 ms const handle = new DocHandle(TEST_ID, { timeoutDelay: 5 }) @@ -388,8 +451,7 @@ describe("DocHandle", () => { // there's no update await pause(10) - const doc = await handle.doc() - assert.equal(doc, undefined) + expect(() => handle.doc()).toThrowError("DocHandle is not ready") }) it("should not time out if the document is updated in time", async () => { @@ -407,7 +469,7 @@ describe("DocHandle", () => { // now it should not time out await pause(5) - const doc = await handle.doc() + const doc = handle.doc() assert.equal(doc?.foo, "bar") }) @@ -423,49 +485,6 @@ describe("DocHandle", () => { assert.equal(handle.isDeleted(), true) }) - it("should clear document reference when unloaded", async () => { - const handle = setup() - - handle.change(doc => { - doc.foo = "bar" - }) - const doc = await handle.doc() - assert.equal(doc?.foo, "bar") - - handle.unload() - assert.equal(handle.isUnloaded(), true) - - const clearedDoc = await handle.doc([UNLOADED]) - assert.notEqual(clearedDoc?.foo, "bar") - }) - - it("should allow reloading after unloading", async () => { - const handle = setup() - - handle.change(doc => { - doc.foo = "bar" - }) - const doc = await handle.doc() - assert.equal(doc?.foo, "bar") - - handle.unload() - - // reload to transition from unloaded to loading - handle.reload() - - // simulate requesting from the network - handle.request() - - // simulate updating from the network - handle.update(doc => { - return A.change(doc, d => (d.foo = "bar")) - }) - - const reloadedDoc = await handle.doc() - assert.equal(handle.isReady(), true) - assert.equal(reloadedDoc?.foo, "bar") - }) - it("should allow changing at old heads", async () => { const handle = setup() diff --git a/packages/automerge-repo/test/DocSynchronizer.test.ts b/packages/automerge-repo/test/DocSynchronizer.test.ts index 81f4ae2e4..037488015 100644 --- a/packages/automerge-repo/test/DocSynchronizer.test.ts +++ b/packages/automerge-repo/test/DocSynchronizer.test.ts @@ -1,7 +1,11 @@ import assert from "assert" import { describe, it } from "vitest" import { next as Automerge } from "@automerge/automerge" -import { generateAutomergeUrl, parseAutomergeUrl } from "../src/AutomergeUrl.js" +import { + encodeHeads, + generateAutomergeUrl, + parseAutomergeUrl, +} from "../src/AutomergeUrl.js" import { DocHandle } from "../src/DocHandle.js" import { eventPromise } from "../src/helpers/eventPromise.js" import { @@ -67,11 +71,14 @@ describe("DocSynchronizer", () => { assert.equal(message1.peerId, "alice") assert.equal(message1.documentId, handle.documentId) - assert.deepEqual(message1.syncState.lastSentHeads, []) + assert.deepStrictEqual(message1.syncState.lastSentHeads, []) assert.equal(message2.peerId, "alice") assert.equal(message2.documentId, handle.documentId) - assert.deepEqual(message2.syncState.lastSentHeads, handle.heads()) + assert.deepStrictEqual( + encodeHeads(message2.syncState.lastSentHeads), + handle.heads() + ) }) it("still syncs with a peer after it disconnects and reconnects", async () => { diff --git a/packages/automerge-repo/test/Repo.test.ts b/packages/automerge-repo/test/Repo.test.ts index d24593145..a43a6c873 100644 --- a/packages/automerge-repo/test/Repo.test.ts +++ b/packages/automerge-repo/test/Repo.test.ts @@ -3,8 +3,11 @@ import { MessageChannelNetworkAdapter } from "../../automerge-repo-network-messa import assert from "assert" import * as Uuid from "uuid" import { describe, expect, it } from "vitest" -import { parseAutomergeUrl } from "../src/AutomergeUrl.js" import { + encodeHeads, + getHeadsFromUrl, + isValidAutomergeUrl, + parseAutomergeUrl, generateAutomergeUrl, stringifyAutomergeUrl, } from "../src/AutomergeUrl.js" @@ -13,6 +16,7 @@ import { eventPromise } from "../src/helpers/eventPromise.js" import { pause } from "../src/helpers/pause.js" import { AnyDocumentId, + UrlHeads, AutomergeUrl, DocHandle, DocumentId, @@ -72,35 +76,34 @@ describe("Repo", () => { it("can create a document with an initial value", async () => { const { repo } = setup() const handle = repo.create({ foo: "bar" }) - await handle.doc() - assert.equal(handle.docSync().foo, "bar") + assert.equal(handle.doc().foo, "bar") }) - it("can find a document by url", () => { + it("can find a document by url", async () => { const { repo } = setup() const handle = repo.create() handle.change((d: TestDoc) => { d.foo = "bar" }) - const handle2 = repo.find(handle.url) + const handle2 = await repo.find(handle.url) assert.equal(handle, handle2) - assert.deepEqual(handle2.docSync(), { foo: "bar" }) + assert.deepEqual(handle2.doc(), { foo: "bar" }) }) - it("can find a document by its unprefixed document ID", () => { + it("can find a document by its unprefixed document ID", async () => { const { repo } = setup() const handle = repo.create() handle.change((d: TestDoc) => { d.foo = "bar" }) - const handle2 = repo.find(handle.documentId) + const handle2 = await repo.find(handle.documentId) assert.equal(handle, handle2) - assert.deepEqual(handle2.docSync(), { foo: "bar" }) + assert.deepEqual(handle2.doc(), { foo: "bar" }) }) - it("can find a document by legacy UUID (for now)", () => { + it("can find a document by legacy UUID (for now)", async () => { disableConsoleWarn() const { repo } = setup() @@ -113,9 +116,9 @@ describe("Repo", () => { const { binaryDocumentId } = parseAutomergeUrl(url) const legacyDocId = Uuid.stringify(binaryDocumentId) as LegacyDocumentId - const handle2 = repo.find(legacyDocId) + const handle2 = await repo.find(legacyDocId) assert.equal(handle, handle2) - assert.deepEqual(handle2.docSync(), { foo: "bar" }) + assert.deepEqual(handle2.doc(), { foo: "bar" }) reenableConsoleWarn() }) @@ -126,7 +129,7 @@ describe("Repo", () => { handle.change(d => { d.foo = "bar" }) - const v = await handle.doc() + const v = handle.doc() assert.equal(handle.isReady(), true) assert.equal(v.foo, "bar") }) @@ -140,8 +143,8 @@ describe("Repo", () => { const handle2 = repo.clone(handle) assert.equal(handle2.isReady(), true) assert.notEqual(handle.documentId, handle2.documentId) - assert.deepStrictEqual(handle.docSync(), handle2.docSync()) - assert.deepStrictEqual(handle2.docSync(), { foo: "bar" }) + assert.deepStrictEqual(handle.doc(), handle2.doc()) + assert.deepStrictEqual(handle2.doc(), { foo: "bar" }) }) it("the cloned documents are distinct", () => { @@ -159,9 +162,9 @@ describe("Repo", () => { d.baz = "baz" }) - assert.notDeepStrictEqual(handle.docSync(), handle2.docSync()) - assert.deepStrictEqual(handle.docSync(), { foo: "bar", bar: "bif" }) - assert.deepStrictEqual(handle2.docSync(), { foo: "bar", baz: "baz" }) + assert.notDeepStrictEqual(handle.doc(), handle2.doc()) + assert.deepStrictEqual(handle.doc(), { foo: "bar", bar: "bif" }) + assert.deepStrictEqual(handle2.doc(), { foo: "bar", baz: "baz" }) }) it("the cloned documents can merge", () => { @@ -181,59 +184,47 @@ describe("Repo", () => { handle.merge(handle2) - assert.deepStrictEqual(handle.docSync(), { + assert.deepStrictEqual(handle.doc(), { foo: "bar", bar: "bif", baz: "baz", }) // only the one handle should be changed - assert.deepStrictEqual(handle2.docSync(), { foo: "bar", baz: "baz" }) + assert.deepStrictEqual(handle2.doc(), { foo: "bar", baz: "baz" }) }) it("throws an error if we try to find a handle with an invalid AutomergeUrl", async () => { const { repo } = setup() - try { - repo.find("invalid-url" as unknown as AutomergeUrl) - } catch (e: any) { - assert.equal(e.message, "Invalid AutomergeUrl: 'invalid-url'") - } + await expect(async () => { + await repo.find("invalid-url" as unknown as AutomergeUrl) + }).rejects.toThrow("Invalid AutomergeUrl: 'invalid-url'") }) it("doesn't find a document that doesn't exist", async () => { const { repo } = setup() - const handle = repo.find(generateAutomergeUrl()) - - await handle.whenReady(["ready", "unavailable"]) - - assert.equal(handle.isReady(), false) - assert.equal(handle.state, "unavailable") - const doc = await handle.doc() - assert.equal(doc, undefined) - }) - - it("emits an unavailable event when you don't have the document locally and are not connected to anyone", async () => { - const { repo } = setup() - const url = generateAutomergeUrl() - const handle = repo.find(url) - assert.equal(handle.isReady(), false) - await eventPromise(handle, "unavailable") + await expect(async () => { + await repo.find(generateAutomergeUrl()) + }).rejects.toThrow(/Document (.*) is unavailable/) }) it("doesn't mark a document as unavailable until network adapters are ready", async () => { const { repo, networkAdapter } = setup({ startReady: false }) const url = generateAutomergeUrl() - const handle = repo.find(url) - let wasUnavailable = false - handle.on("unavailable", () => { - wasUnavailable = true - }) + const attemptedFind = repo.find(url) - await pause(50) - assert.equal(wasUnavailable, false) + // First verify it stays pending for 50ms + await expect( + Promise.race([attemptedFind, pause(50)]) + ).resolves.toBeUndefined() + // Trigger the rejection networkAdapter.forceReady() - await eventPromise(handle, "unavailable") + + // Now verify it rejects + await expect(attemptedFind).rejects.toThrow( + /Document (.*) is unavailable/ + ) }) it("can find a created document", async () => { @@ -244,18 +235,18 @@ describe("Repo", () => { }) assert.equal(handle.isReady(), true) - const bobHandle = repo.find(handle.url) + const bobHandle = await repo.find(handle.url) assert.equal(handle, bobHandle) assert.equal(handle.isReady(), true) - const v = await bobHandle.doc() + const v = bobHandle.doc() assert.equal(v?.foo, "bar") }) it("saves the document when creating it", async () => { const { repo, storageAdapter } = setup() - const handle = repo.create() + const handle = repo.create({ foo: "saved" }) const repo2 = new Repo({ storage: storageAdapter, @@ -263,9 +254,9 @@ describe("Repo", () => { await repo.flush() - const bobHandle = repo2.find(handle.url) + const bobHandle = await repo2.find(handle.url) await bobHandle.whenReady() - assert.equal(bobHandle.isReady(), true) + assert.deepEqual(bobHandle.doc(), { foo: "saved" }) }) it("saves the document when changed and can find it again", async () => { @@ -284,9 +275,9 @@ describe("Repo", () => { storage: storageAdapter, }) - const bobHandle = repo2.find(handle.url) + const bobHandle = await repo2.find(handle.url) - const v = await bobHandle.doc() + const v = bobHandle.doc() assert.equal(v?.foo, "bar") }) @@ -298,7 +289,7 @@ describe("Repo", () => { }) // we now have a snapshot and an incremental change in storage assert.equal(handle.isReady(), true) - const foo = await handle.doc() + const foo = handle.doc() assert.equal(foo?.foo, "bar") await pause() @@ -315,7 +306,7 @@ describe("Repo", () => { d.foo = "bar" }) assert.equal(handle.isReady(), true) - await handle.doc() + await handle.whenReady() await pause() repo.delete(handle.url) @@ -352,7 +343,7 @@ describe("Repo", () => { const exported = await repo.export(handle.documentId) const loaded = A.load(exported) - const doc = await handle.doc() + const doc = handle.doc() assert.deepEqual(doc, loaded) }) @@ -386,9 +377,7 @@ describe("Repo", () => { const repo2 = new Repo({ storage, }) - const handle2 = repo2.find(handle.url) - await handle2.doc() - + const handle2 = await repo2.find(handle.url) assert.deepEqual(storage.keys(), initialKeys) }) @@ -414,9 +403,7 @@ describe("Repo", () => { const repo2 = new Repo({ storage, }) - const handle2 = repo2.find(handle.url) - await handle2.doc() - + const handle2 = await repo2.find(handle.url) assert(storage.keys().length !== 0) } }) @@ -456,7 +443,7 @@ describe("Repo", () => { const handle = repo.import(saved) assert.equal(handle.isReady(), true) - const v = await handle.doc() + const v = handle.doc() assert.equal(v?.foo, "bar") expect(A.getHistory(v)).toEqual(A.getHistory(updatedDoc)) @@ -475,7 +462,7 @@ describe("Repo", () => { const { repo } = setup() // @ts-ignore - passing something other than UInt8Array const handle = repo.import(A.from({ foo: 123 })) - const doc = await handle.doc() + const doc = handle.doc() expect(doc).toEqual({}) }) @@ -483,9 +470,39 @@ describe("Repo", () => { const { repo } = setup() // @ts-ignore - passing something other than UInt8Array const handle = repo.import({ foo: 123 }) - const doc = await handle.doc() + const doc = handle.doc() expect(doc).toEqual({}) }) + + describe("handle cache", () => { + it("contains doc handle", async () => { + const { repo } = setup() + const handle = repo.create({ foo: "bar" }) + assert(repo.handles[handle.documentId]) + }) + + it("delete removes doc handle", async () => { + const { repo } = setup() + const handle = repo.create({ foo: "bar" }) + await repo.delete(handle.documentId) + assert(repo.handles[handle.documentId] === undefined) + }) + + it("removeFromCache removes doc handle", async () => { + const { repo } = setup() + const handle = repo.create({ foo: "bar" }) + await repo.removeFromCache(handle.documentId) + assert(repo.handles[handle.documentId] === undefined) + }) + + it("removeFromCache for documentId not found", async () => { + const { repo } = setup() + const badDocumentId = "badbadbad" as DocumentId + const handleCacheSize = Object.keys(repo.handles).length + await repo.removeFromCache(badDocumentId) + assert(Object.keys(repo.handles).length === handleCacheSize) + }) + }) }) describe("flush behaviour", () => { @@ -532,8 +549,8 @@ describe("Repo", () => { it("should not be in a new repo yet because the storage is slow", async () => { const { pausedStorage, repo, handle, handle2 } = setup() - expect((await handle.doc()).foo).toEqual("first") - expect((await handle2.doc()).foo).toEqual("second") + expect((await handle).doc().foo).toEqual("first") + expect((await handle2).doc().foo).toEqual("second") // Reload repo const repo2 = new Repo({ @@ -541,9 +558,10 @@ describe("Repo", () => { }) // Could not find the document that is not yet saved because of slow storage. - const reloadedHandle = repo2.find<{ foo: string }>(handle.url) + await expect(async () => { + const reloadedHandle = await repo2.find<{ foo: string }>(handle.url) + }).rejects.toThrow(/Document (.*) is unavailable/) expect(pausedStorage.keys()).to.deep.equal([]) - expect(await reloadedHandle.doc()).toEqual(undefined) }) it("should be visible to a new repo after flush()", async () => { @@ -563,10 +581,10 @@ describe("Repo", () => { }) expect( - (await repo.find<{ foo: string }>(handle.documentId).doc()).foo + (await repo.find<{ foo: string }>(handle.documentId)).doc().foo ).toEqual("first") expect( - (await repo.find<{ foo: string }>(handle2.documentId).doc()).foo + (await repo.find<{ foo: string }>(handle2.documentId)).doc().foo ).toEqual("second") } }) @@ -588,13 +606,13 @@ describe("Repo", () => { }) expect( - (await repo.find<{ foo: string }>(handle.documentId).doc()).foo + (await repo.find<{ foo: string }>(handle.documentId)).doc().foo ).toEqual("first") // Really, it's okay if the second one is also flushed but I'm forcing the issue // in the test storage engine above to make sure the behaviour is as documented - expect( - await repo.find<{ foo: string }>(handle2.documentId).doc() - ).toEqual(undefined) + await expect(async () => { + ;(await repo.find<{ foo: string }>(handle2.documentId)).doc() + }).rejects.toThrow(/Document (.*) is unavailable/) } }) @@ -642,7 +660,7 @@ describe("Repo", () => { if (idx < numberOfPeers - 1) { network.push(pair[0]) - pair[0].whenReady() + networkReady.push(pair[0].whenReady()) } const repo = new Repo({ @@ -673,7 +691,6 @@ describe("Repo", () => { } await connectedPromise - return { repos } } @@ -685,10 +702,14 @@ describe("Repo", () => { d.foo = "bar" }) - const handleN = repos[numberOfPeers - 1].find(handle0.url) + const handleN = await repos[numberOfPeers - 1].find(handle0.url) + assert.deepStrictEqual(handleN.doc(), { foo: "bar" }) - await handleN.whenReady() - assert.deepStrictEqual(handleN.docSync(), { foo: "bar" }) + const handleNBack = repos[numberOfPeers - 1].create({ + foo: "reverse-trip", + }) + const handle0Back = await repos[0].find(handleNBack.url) + assert.deepStrictEqual(handle0Back.doc(), { foo: "reverse-trip" }) }) const setup = async ({ @@ -815,9 +836,8 @@ describe("Repo", () => { it("changes are replicated from aliceRepo to bobRepo", async () => { const { bobRepo, aliceHandle, teardown } = await setup() - const bobHandle = bobRepo.find(aliceHandle.url) - await eventPromise(bobHandle, "change") - const bobDoc = await bobHandle.doc() + const bobHandle = await bobRepo.find(aliceHandle.url) + const bobDoc = bobHandle.doc() assert.deepStrictEqual(bobDoc, { foo: "bar" }) teardown() }) @@ -825,9 +845,8 @@ describe("Repo", () => { it("can load a document from aliceRepo on charlieRepo", async () => { const { charlieRepo, aliceHandle, teardown } = await setup() - const handle3 = charlieRepo.find(aliceHandle.url) - await eventPromise(handle3, "change") - const doc3 = await handle3.doc() + const handle3 = await charlieRepo.find(aliceHandle.url) + const doc3 = handle3.doc() assert.deepStrictEqual(doc3, { foo: "bar" }) teardown() }) @@ -846,12 +865,11 @@ describe("Repo", () => { await bobRepo2.flush() // Now, let's load it on the original bob repo (which shares a "disk") - const bobFoundIt = bobRepo.find(inStorageHandle.url) - await bobFoundIt.whenReady() + const bobFoundIt = await bobRepo.find(inStorageHandle.url) // Before checking if it syncs, make sure we have it! // (This behaviour is mostly test-validation, we are already testing load/save elsewhere.) - assert.deepStrictEqual(await bobFoundIt.doc(), { foo: "foundOnFakeDisk" }) + assert.deepStrictEqual(bobFoundIt.doc(), { foo: "foundOnFakeDisk" }) await pause(10) @@ -891,11 +909,8 @@ describe("Repo", () => { it("charlieRepo can request a document not initially shared with it", async () => { const { charlieRepo, notForCharlie, teardown } = await setup() - const handle = charlieRepo.find(notForCharlie) - - await pause(50) - - const doc = await handle.doc() + const handle = await charlieRepo.find(notForCharlie) + const doc = handle.doc() assert.deepStrictEqual(doc, { foo: "baz" }) @@ -905,11 +920,11 @@ describe("Repo", () => { it("charlieRepo can request a document across a network of multiple peers", async () => { const { charlieRepo, notForBob, teardown } = await setup() - const handle = charlieRepo.find(notForBob) + const handle = await charlieRepo.find(notForBob) await pause(50) - const doc = await handle.doc() + const doc = handle.doc() assert.deepStrictEqual(doc, { foo: "bap" }) teardown() @@ -918,42 +933,10 @@ describe("Repo", () => { it("doesn't find a document which doesn't exist anywhere on the network", async () => { const { charlieRepo, teardown } = await setup() const url = generateAutomergeUrl() - const handle = charlieRepo.find(url) - assert.equal(handle.isReady(), false) - - const doc = await handle.doc() - assert.equal(doc, undefined) - - teardown() - }) - - it("emits an unavailable event when it's not found on the network", async () => { - const { aliceRepo, teardown } = await setup() - const url = generateAutomergeUrl() - const handle = aliceRepo.find(url) - assert.equal(handle.isReady(), false) - await eventPromise(handle, "unavailable") - teardown() - }) - - it("emits an unavailable event every time an unavailable doc is requested", async () => { - const { charlieRepo, teardown } = await setup() - const url = generateAutomergeUrl() - const handle = charlieRepo.find(url) - assert.equal(handle.isReady(), false) - - await Promise.all([ - eventPromise(handle, "unavailable"), - eventPromise(charlieRepo, "unavailable-document"), - ]) - // make sure it emits a second time if the doc is still unavailable - const handle2 = charlieRepo.find(url) - assert.equal(handle2.isReady(), false) - await Promise.all([ - eventPromise(handle, "unavailable"), - eventPromise(charlieRepo, "unavailable-document"), - ]) + await expect(charlieRepo.find(url)).rejects.toThrow( + /Document (.*) is unavailable/ + ) teardown() }) @@ -968,21 +951,23 @@ describe("Repo", () => { } = await setup({ connectAlice: false }) const url = stringifyAutomergeUrl({ documentId: notForCharlie }) - const handle = charlieRepo.find(url) - assert.equal(handle.isReady(), false) - - await eventPromise(handle, "unavailable") + await expect(charlieRepo.find(url)).rejects.toThrow( + /Document (.*) is unavailable/ + ) connectAliceToBob() await eventPromise(aliceRepo.networkSubsystem, "peer") - const doc = await handle.doc(["ready"]) + // Not sure why we need this pause here, but... we do. + await pause(150) + const handle = await charlieRepo.find(url) + const doc = handle.doc() assert.deepStrictEqual(doc, { foo: "baz" }) // an additional find should also return the correct resolved document - const handle2 = charlieRepo.find(url) - const doc2 = await handle2.doc() + const handle2 = await charlieRepo.find(url) + const doc2 = handle2.doc() assert.deepStrictEqual(doc2, { foo: "baz" }) teardown() @@ -1018,11 +1003,9 @@ describe("Repo", () => { sharePolicy: async () => true, }) - const handle = a.find(url) - - // We expect this to be unavailable as there is no connected peer and - // the repo has no storage. - await eventPromise(handle, "unavailable") + await expect(a.find(url)).rejects.toThrow( + /Document (.*) is unavailable/ + ) // Now create a repo pointing at the storage containing the document and // connect it to the other end of the MessageChannel @@ -1032,9 +1015,14 @@ describe("Repo", () => { network: [new MessageChannelNetworkAdapter(ba)], }) + // We need a proper peer status API so we can tell when the + // peer is connected. For now we just wait a bit. + await pause(50) + // The empty repo should be notified of the new peer, send it a request // and eventually resolve the handle to "READY" - await handle.whenReady() + const handle = await a.find(url) + expect(handle.state).toBe("ready") }) it("a deleted document from charlieRepo can be refetched", async () => { @@ -1050,9 +1038,8 @@ describe("Repo", () => { }) await changePromise - const handle3 = charlieRepo.find(aliceHandle.url) - await eventPromise(handle3, "change") - const doc3 = await handle3.doc() + const handle3 = await charlieRepo.find(aliceHandle.url) + const doc3 = handle3.doc() assert.deepStrictEqual(doc3, { foo: "baz" }) @@ -1078,7 +1065,7 @@ describe("Repo", () => { // make sure the doc is ready if (!doc.isReady()) { - await doc.doc() + await doc.whenReady() } // make a random change to it @@ -1096,10 +1083,10 @@ describe("Repo", () => { const data = { presence: "alice" } - const aliceHandle = aliceRepo.find( + const aliceHandle = await aliceRepo.find( stringifyAutomergeUrl({ documentId: notForCharlie }) ) - const bobHandle = bobRepo.find( + const bobHandle = await bobRepo.find( stringifyAutomergeUrl({ documentId: notForCharlie }) ) @@ -1142,7 +1129,10 @@ describe("Repo", () => { bobHandle.documentId, await charlieRepo!.storageSubsystem.id() ) - assert.deepStrictEqual(storedSyncState.sharedHeads, bobHandle.heads()) + assert.deepStrictEqual( + encodeHeads(storedSyncState.sharedHeads), + bobHandle.heads() + ) teardown() }) @@ -1242,14 +1232,14 @@ describe("Repo", () => { const nextRemoteHeadsPromise = new Promise<{ storageId: StorageId - heads: A.Heads + heads: UrlHeads }>(resolve => { handle.on("remote-heads", ({ storageId, heads }) => { resolve({ storageId, heads }) }) }) - const charlieHandle = charlieRepo.find(handle.url) + const charlieHandle = await charlieRepo.find(handle.url) await charlieHandle.whenReady() // make a change on charlie @@ -1287,34 +1277,6 @@ describe("Repo", () => { }) }) - it("peer receives a document when connection is recovered", async () => { - const alice = "alice" as PeerId - const bob = "bob" as PeerId - const [aliceAdapter, bobAdapter] = DummyNetworkAdapter.createConnectedPair() - const aliceRepo = new Repo({ - network: [aliceAdapter], - peerId: alice, - }) - const bobRepo = new Repo({ - network: [bobAdapter], - peerId: bob, - }) - const aliceDoc = aliceRepo.create() - aliceDoc.change((doc: any) => (doc.text = "Hello world")) - - const bobDoc = bobRepo.find(aliceDoc.url) - await eventPromise(bobDoc, "unavailable") - - aliceAdapter.peerCandidate(bob) - // Bob isn't yet connected to Alice and can't respond to her sync message - await pause(100) - bobAdapter.peerCandidate(alice) - - await bobDoc.whenReady() - - assert.equal(bobDoc.isReady(), true) - }) - describe("with peers (mesh network)", () => { const setup = async () => { // Set up three repos; connect Alice to Bob, Bob to Charlie, and Alice to Charlie @@ -1376,8 +1338,8 @@ describe("Repo", () => { const aliceHandle = aliceRepo.create() - const bobHandle = bobRepo.find(aliceHandle.url) - const charlieHandle = charlieRepo.find(aliceHandle.url) + const bobHandle = await bobRepo.find(aliceHandle.url) + const charlieHandle = await charlieRepo.find(aliceHandle.url) // Alice should not receive her own ephemeral message aliceHandle.on("ephemeral-message", () => { @@ -1415,9 +1377,8 @@ describe("Repo", () => { // pause to let the sync happen await pause(50) - const charlieHandle = charlieRepo.find(handle2.url) - await charlieHandle.doc() - assert.deepStrictEqual(charlieHandle.docSync(), { foo: "bar" }) + const charlieHandle = await charlieRepo.find(handle2.url) + assert.deepStrictEqual(charlieHandle.doc(), { foo: "bar" }) teardown() }) @@ -1434,9 +1395,8 @@ describe("Repo", () => { // pause to let the sync happen await pause(50) - const charlieHandle = charlieRepo.find(handle2.url) - await charlieHandle.doc() - assert.deepStrictEqual(charlieHandle.docSync(), { foo: "bar" }) + const charlieHandle = await charlieRepo.find(handle2.url) + assert.deepStrictEqual(charlieHandle.doc(), { foo: "bar" }) // now make a change to doc2 on bobs side and merge it into doc1 handle2.change(d => { @@ -1447,12 +1407,198 @@ describe("Repo", () => { // wait for the network to do it's thang await pause(350) - await charlieHandle.doc() - assert.deepStrictEqual(charlieHandle.docSync(), { foo: "baz" }) + assert.deepStrictEqual(charlieHandle.doc(), { foo: "baz" }) teardown() }) }) + + describe("the denylist", () => { + it("should immediately return an unavailable message in response to a request for a denylisted document", async () => { + const storage = new DummyStorageAdapter() + + // first create the document in storage + const dummyRepo = new Repo({ network: [], storage }) + const doc = dummyRepo.create({ foo: "bar" }) + await dummyRepo.flush() + + // Check that the document actually is in storage + let docId = doc.documentId + assert(storage.keys().some((k: string) => k.includes(docId))) + + const channel = new MessageChannel() + const { port1: clientToServer, port2: serverToClient } = channel + const server = new Repo({ + network: [new MessageChannelNetworkAdapter(serverToClient)], + storage, + denylist: [doc.url], + }) + const client = new Repo({ + network: [new MessageChannelNetworkAdapter(clientToServer)], + }) + + await Promise.all([ + eventPromise(server.networkSubsystem, "peer"), + eventPromise(client.networkSubsystem, "peer"), + ]) + + await expect(async () => { + const clientDoc = await client.find(doc.url) + }).rejects.toThrow(/Document (.*) is unavailable/) + + const openDocs = Object.keys(server.metrics().documents).length + assert.deepEqual(openDocs, 0) + }) + }) +}) + +describe("Repo heads-in-URLs functionality", () => { + const setup = () => { + const repo = new Repo({}) + const handle = repo.create() + handle.change((doc: any) => (doc.title = "Hello World")) + return { repo, handle } + } + + it("finds a document view by URL with heads", async () => { + const { repo, handle } = setup() + const heads = handle.heads()! + const url = stringifyAutomergeUrl({ documentId: handle.documentId, heads }) + const view = await repo.find(url) + expect(view.doc()).toEqual({ title: "Hello World" }) + }) + + it("returns a view, not the actual handle, when finding by URL with heads", async () => { + const { repo, handle } = setup() + const heads = handle.heads()! + await handle.change((doc: any) => (doc.title = "Changed")) + const url = stringifyAutomergeUrl({ documentId: handle.documentId, heads }) + const view = await repo.find(url) + expect(view.doc()).toEqual({ title: "Hello World" }) + expect(handle.doc()).toEqual({ title: "Changed" }) + }) + + it("changes to a document view do not affect the original", async () => { + const { repo, handle } = setup() + const heads = handle.heads()! + const url = stringifyAutomergeUrl({ documentId: handle.documentId, heads }) + const view = await repo.find(url) + expect(() => + view.change((doc: any) => (doc.title = "Changed in View")) + ).toThrow() + expect(handle.doc()).toEqual({ title: "Hello World" }) + }) + + it("document views are read-only", async () => { + const { repo, handle } = setup() + const heads = handle.heads()! + const url = stringifyAutomergeUrl({ documentId: handle.documentId, heads }) + const view = await repo.find(url) + expect(() => view.change((doc: any) => (doc.title = "Changed"))).toThrow() + }) + + it("finds the latest document when given a URL without heads", async () => { + const { repo, handle } = setup() + await handle.change((doc: any) => (doc.title = "Changed")) + const found = await repo.find(handle.url) + expect(found.doc()).toEqual({ title: "Changed" }) + }) + + it("getHeadsFromUrl returns heads array if present or undefined", () => { + const { repo, handle } = setup() + const heads = handle.heads()! + const url = stringifyAutomergeUrl({ documentId: handle.documentId, heads }) + expect(getHeadsFromUrl(url)).toEqual(heads) + + const urlWithoutHeads = generateAutomergeUrl() + expect(getHeadsFromUrl(urlWithoutHeads)).toBeUndefined() + }) + + it("isValidAutomergeUrl returns true for valid URLs", () => { + const { repo, handle } = setup() + const url = generateAutomergeUrl() + expect(isValidAutomergeUrl(url)).toBe(true) + + const urlWithHeads = stringifyAutomergeUrl({ + documentId: handle.documentId, + heads: handle.heads()!, + }) + expect(isValidAutomergeUrl(urlWithHeads)).toBe(true) + }) + + it("isValidAutomergeUrl returns false for invalid URLs", () => { + const { repo, handle } = setup() + expect(isValidAutomergeUrl("not a url")).toBe(false) + expect(isValidAutomergeUrl("automerge:invalidid")).toBe(false) + expect(isValidAutomergeUrl("automerge:validid#invalidhead")).toBe(false) + }) + + it("parseAutomergeUrl extracts documentId and heads", () => { + const { repo, handle } = setup() + const url = stringifyAutomergeUrl({ + documentId: handle.documentId, + heads: handle.heads()!, + }) + const parsed = parseAutomergeUrl(url) + expect(parsed.documentId).toBe(handle.documentId) + expect(parsed.heads).toEqual(handle.heads()) + }) + + it("stringifyAutomergeUrl creates valid URL", () => { + const { repo, handle } = setup() + const url = stringifyAutomergeUrl({ + documentId: handle.documentId, + heads: handle.heads()!, + }) + expect(isValidAutomergeUrl(url)).toBe(true) + const parsed = parseAutomergeUrl(url) + expect(parsed.documentId).toBe(handle.documentId) + expect(parsed.heads).toEqual(handle.heads()) + }) +}) + +describe("Repo.find() abort behavior", () => { + it("aborts immediately if signal is already aborted", async () => { + const repo = new Repo() + const controller = new AbortController() + controller.abort() + + await expect( + repo.find(generateAutomergeUrl(), { signal: controller.signal }) + ).rejects.toThrow("Operation aborted") + }) + + it("can abort while waiting for ready state", async () => { + // Create a repo with no network adapters so document can't become ready + const repo = new Repo() + const url = generateAutomergeUrl() + + const controller = new AbortController() + + // Start find and abort after a moment + const findPromise = repo.find(url, { signal: controller.signal }) + controller.abort() + + await expect(findPromise).rejects.toThrow("Operation aborted") + await expect(findPromise).rejects.not.toThrow("unavailable") + }) + + it("returns handle immediately when allow unavailable is true, even with abort signal", async () => { + const repo = new Repo() + const controller = new AbortController() + const url = generateAutomergeUrl() + + const handle = await repo.find(url, { + allowableStates: ["unavailable"], + signal: controller.signal, + }) + + expect(handle).toBeDefined() + + // Abort shouldn't affect the result since we skipped ready + controller.abort() + expect(handle.url).toBe(url) + }) }) const warn = console.warn diff --git a/packages/automerge-repo/test/remoteHeads.test.ts b/packages/automerge-repo/test/remoteHeads.test.ts index 23f626118..7678553a8 100644 --- a/packages/automerge-repo/test/remoteHeads.test.ts +++ b/packages/automerge-repo/test/remoteHeads.test.ts @@ -13,6 +13,7 @@ import { import { DummyStorageAdapter } from "../src/helpers/DummyStorageAdapter.js" import { collectMessages } from "./helpers/collectMessages.js" import { TestDoc } from "./types.js" +import { pause } from "../src/helpers/pause.js" describe("DocHandle.remoteHeads", () => { const TEST_ID = parseAutomergeUrl(generateAutomergeUrl()).documentId @@ -128,13 +129,15 @@ describe("DocHandle.remoteHeads", () => { const aliceDoc = alice.create() aliceDoc.change(d => (d.foo = "bar")) + await pause(50) + // bob waits for the document to arrive - const bobDoc = bob.find(aliceDoc.url) - await bobDoc.whenReady() + const bobDoc = await bob.find(aliceDoc.url) // alice's service worker waits for the document to arrive - const aliceServiceWorkerDoc = aliceServiceWorker.find(aliceDoc.documentId) - await aliceServiceWorkerDoc.whenReady() + const aliceServiceWorkerDoc = await aliceServiceWorker.find( + aliceDoc.documentId + ) let aliceSeenByBobPromise = new Promise( resolve => { @@ -168,17 +171,21 @@ describe("DocHandle.remoteHeads", () => { const bobDocB = bob.create() bobDocB.change(d => (d.foo = "B")) + await pause(50) + // alice opens doc A - const aliceDocA = alice.find(bobDocA.url) + const aliceDocAPromise = alice.find(bobDocA.url) const remoteHeadsChangedMessages = ( await collectMessages({ emitter: alice.networkSubsystem, event: "message", - until: aliceDocA.whenReady(), + until: aliceDocAPromise, }) ).filter(({ type }) => type === "remote-heads-changed") + const aliceDocA = await aliceDocAPromise + // we should only be notified of the head changes of doc A assert( remoteHeadsChangedMessages.every( @@ -197,6 +204,8 @@ describe("DocHandle.remoteHeads", () => { const bobDocB = bob.create() bobDocB.change(d => (d.foo = "B")) + await pause(50) + // alice opens the docs const _aliceDocA = alice.find(bobDocA.url) const _aliceDocB = alice.find(bobDocB.url) @@ -209,19 +218,21 @@ describe("DocHandle.remoteHeads", () => { // stored remote heads immediately. // open doc and subscribe alice's second tab to bob's service worker - const alice2DocA = alice2.find(bobDocA.url) + const alice2DocAPromise = alice2.find(bobDocA.url) alice2.subscribeToRemotes([bobServiceWorkerStorageId]) const remoteHeadsChangedMessages = ( await collectMessages({ emitter: alice2.networkSubsystem, event: "message", - until: alice2DocA.whenReady(), + until: alice2DocAPromise, }) ).filter(({ type }) => type === "remote-heads-changed") + const alice2DocA = await alice2DocAPromise + // we should only be notified of the head changes of doc A - assert.strictEqual(remoteHeadsChangedMessages.length, 2) + assert.strictEqual(remoteHeadsChangedMessages.length, 1) assert( remoteHeadsChangedMessages.every( d => d.documentId === alice2DocA.documentId @@ -242,18 +253,22 @@ describe("DocHandle.remoteHeads", () => { // alice subscribes to bob's service worker alice.subscribeToRemotes([bobServiceWorkerStorageId]) + await pause(50) + // alice opens doc A - const alice1DocA = alice.find(bobDocA.url) + const alice1DocAPromise = alice.find(bobDocA.url) const remoteHeadsChangedMessages = ( await collectMessages({ emitter: alice.networkSubsystem, event: "message", - until: alice1DocA.whenReady(), + until: alice1DocAPromise, }) ).filter(({ type }) => type === "remote-heads-changed") - assert.strictEqual(remoteHeadsChangedMessages.length, 2) + const alice1DocA = await alice1DocAPromise + + assert.strictEqual(remoteHeadsChangedMessages.length, 1) assert( remoteHeadsChangedMessages.every( d => d.documentId === alice1DocA.documentId diff --git a/packages/create-repo-node-app/package.json b/packages/create-repo-node-app/package.json index 0b1173247..45f152a8e 100644 --- a/packages/create-repo-node-app/package.json +++ b/packages/create-repo-node-app/package.json @@ -1,6 +1,6 @@ { "name": "@automerge/create-repo-node-app", - "version": "2.0.0-alpha.11", + "version": "2.0.0-alpha.22", "description": "Create an automerge-repo app for node", "repository": "https://github.com/automerge/automerge-repo/tree/master/packages/create-repo-node-app", "author": "Alex Good ", diff --git a/packages/create-vite-app/package.json b/packages/create-vite-app/package.json index 7fe499943..fbd88b4f0 100644 --- a/packages/create-vite-app/package.json +++ b/packages/create-vite-app/package.json @@ -1,6 +1,6 @@ { "name": "@automerge/create-vite-app", - "version": "2.0.0-alpha.11", + "version": "2.0.0-alpha.22", "description": "Create an automerge-repo app which uses Vite", "repository": "https://github.com/automerge/automerge-repo/tree/master/packages/create-vite-app", "author": "Alex Good ", diff --git a/packages/create-vite-app/template/package.json b/packages/create-vite-app/template/package.json index c8a38137a..f45a428e8 100644 --- a/packages/create-vite-app/template/package.json +++ b/packages/create-vite-app/template/package.json @@ -16,8 +16,8 @@ "@automerge/automerge-repo-network-websocket": "^1.2.1", "@automerge/automerge-repo-react-hooks": "^1.2.1", "@automerge/automerge-repo-storage-indexeddb": "^1.2.1", - "react": "^18.2.0", - "react-dom": "^18.2.0" + "react": "^18.3.0", + "react-dom": "^18.3.0" }, "devDependencies": { "@types/react": "^18.2.64", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 517470d98..0ad1b636c 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -34,19 +34,19 @@ importers: version: 8.5.12 '@typescript-eslint/eslint-plugin': specifier: ^7.13.1 - version: 7.18.0(@typescript-eslint/parser@7.18.0)(eslint@9.9.0)(typescript@5.5.4) + version: 7.18.0(@typescript-eslint/parser@7.18.0(eslint@9.9.0(jiti@1.21.6))(typescript@5.5.4))(eslint@9.9.0(jiti@1.21.6))(typescript@5.5.4) '@typescript-eslint/parser': specifier: ^7.13.1 - version: 7.18.0(eslint@9.9.0)(typescript@5.5.4) + version: 7.18.0(eslint@9.9.0(jiti@1.21.6))(typescript@5.5.4) '@vitejs/plugin-react': - specifier: ^4.3.1 - version: 4.3.1(vite@5.4.1) + specifier: ^4.3.4 + version: 4.3.4(vite@6.0.11(@types/node@20.14.15)(jiti@1.21.6)(yaml@2.5.0)) '@vitest/coverage-v8': - specifier: ^1.4.0 - version: 1.6.0(vitest@1.6.0) + specifier: ^3.0.3 + version: 3.0.4(vitest@3.0.4) '@vitest/ui': - specifier: ^1.4.0 - version: 1.6.0(vitest@1.6.0) + specifier: ^3.0.3 + version: 3.0.4(vitest@3.0.4) c8: specifier: ^7.14.0 version: 7.14.0 @@ -58,31 +58,31 @@ importers: version: 2.0.0 eslint: specifier: ^9.5.0 - version: 9.9.0 + version: 9.9.0(jiti@1.21.6) eslint-config-airbnb: specifier: ^19.0.4 - version: 19.0.4(eslint-plugin-import@2.29.1)(eslint-plugin-jsx-a11y@6.9.0)(eslint-plugin-react-hooks@4.6.2)(eslint-plugin-react@7.35.0)(eslint@9.9.0) + version: 19.0.4(eslint-plugin-import@2.29.1(@typescript-eslint/parser@7.18.0(eslint@9.9.0(jiti@1.21.6))(typescript@5.5.4))(eslint@9.9.0(jiti@1.21.6)))(eslint-plugin-jsx-a11y@6.9.0(eslint@9.9.0(jiti@1.21.6)))(eslint-plugin-react-hooks@4.6.2(eslint@9.9.0(jiti@1.21.6)))(eslint-plugin-react@7.35.0(eslint@9.9.0(jiti@1.21.6)))(eslint@9.9.0(jiti@1.21.6)) eslint-config-airbnb-base: specifier: ^15.0.0 - version: 15.0.0(eslint-plugin-import@2.29.1)(eslint@9.9.0) + version: 15.0.0(eslint-plugin-import@2.29.1(@typescript-eslint/parser@7.18.0(eslint@9.9.0(jiti@1.21.6))(typescript@5.5.4))(eslint@9.9.0(jiti@1.21.6)))(eslint@9.9.0(jiti@1.21.6)) eslint-plugin-import: specifier: ^2.29.1 - version: 2.29.1(@typescript-eslint/parser@7.18.0)(eslint@9.9.0) + version: 2.29.1(@typescript-eslint/parser@7.18.0(eslint@9.9.0(jiti@1.21.6))(typescript@5.5.4))(eslint@9.9.0(jiti@1.21.6)) eslint-plugin-jsx-a11y: specifier: ^6.8.0 - version: 6.9.0(eslint@9.9.0) + version: 6.9.0(eslint@9.9.0(jiti@1.21.6)) eslint-plugin-react: specifier: ^7.34.1 - version: 7.35.0(eslint@9.9.0) + version: 7.35.0(eslint@9.9.0(jiti@1.21.6)) eslint-plugin-react-hooks: specifier: ^4.6.0 - version: 4.6.2(eslint@9.9.0) + version: 4.6.2(eslint@9.9.0(jiti@1.21.6)) globals: specifier: ^15.6.0 version: 15.9.0 lerna: specifier: ^8.1.6 - version: 8.1.8 + version: 8.1.8(@swc/core@1.7.11)(encoding@0.1.13) npm-run-all: specifier: ^4.1.5 version: 4.1.5 @@ -97,7 +97,7 @@ importers: version: 2.8.8 ts-node: specifier: ^10.9.2 - version: 10.9.2(@types/node@20.14.15)(typescript@5.5.4) + version: 10.9.2(@swc/core@1.7.11)(@types/node@20.14.15)(typescript@5.5.4) typedoc: specifier: ^0.25.12 version: 0.25.13(typescript@5.5.4) @@ -105,14 +105,14 @@ importers: specifier: ^5.4.2 version: 5.5.4 vite: - specifier: ^5.2.0 - version: 5.4.1(@types/node@20.14.15) + specifier: ^6.0.11 + version: 6.0.11(@types/node@20.14.15)(jiti@1.21.6)(yaml@2.5.0) vite-plugin-wasm: - specifier: ^3.3.0 - version: 3.3.0(vite@5.4.1) + specifier: ^3.4.1 + version: 3.4.1(vite@6.0.11(@types/node@20.14.15)(jiti@1.21.6)(yaml@2.5.0)) vitest: - specifier: ^1.4.0 - version: 1.6.0(@types/node@20.14.15)(@vitest/ui@1.6.0) + specifier: ^3.0.4 + version: 3.0.4(@types/debug@4.1.12)(@types/node@20.14.15)(@vitest/ui@3.0.4)(jiti@1.21.6)(jsdom@22.1.0)(yaml@2.5.0) examples/react-counter: dependencies: @@ -135,10 +135,10 @@ importers: specifier: workspace:* version: link:../../packages/automerge-repo-storage-indexeddb react: - specifier: ^18.2.0 + specifier: ^18.3.0 version: 18.3.1 react-dom: - specifier: ^18.2.0 + specifier: ^18.3.0 version: 18.3.1(react@18.3.1) examples/react-todo: @@ -161,9 +161,6 @@ importers: '@automerge/automerge-repo-storage-indexeddb': specifier: workspace:* version: link:../../packages/automerge-repo-storage-indexeddb - '@ibm/plex': - specifier: ^6.1.1 - version: 6.4.1 autoprefixer: specifier: ^10.4.13 version: 10.4.20(postcss@8.4.41) @@ -174,15 +171,18 @@ importers: specifier: ^8.4.21 version: 8.4.41 react: - specifier: ^18.2.0 + specifier: ^18.3.0 version: 18.3.1 react-dom: - specifier: ^18.2.0 + specifier: ^18.3.0 version: 18.3.1(react@18.3.1) + react-error-boundary: + specifier: ^5.0.0 + version: 5.0.0(react@18.3.1) devDependencies: tailwindcss: specifier: ^3.2.4 - version: 3.4.10(ts-node@10.9.2) + version: 3.4.10(ts-node@10.9.2(@swc/core@1.7.11)(@types/node@20.14.15)(typescript@5.5.4)) examples/react-use-awareness: dependencies: @@ -202,10 +202,10 @@ importers: specifier: ^5.0.1 version: 5.0.1 react: - specifier: ^18.2.0 + specifier: ^18.3.0 version: 18.3.1 react-dom: - specifier: ^18.2.0 + specifier: ^18.3.0 version: 18.3.1(react@18.3.1) react-usestateref: specifier: ^1.0.8 @@ -216,7 +216,7 @@ importers: devDependencies: '@vitejs/plugin-react-swc': specifier: ^3.2.0 - version: 3.7.0(vite@5.4.1) + version: 3.7.0(vite@6.0.11(@types/node@20.14.15)(jiti@1.21.6)(yaml@2.5.0)) examples/svelte-counter: dependencies: @@ -244,13 +244,13 @@ importers: devDependencies: '@sveltejs/vite-plugin-svelte': specifier: ^3.0.2 - version: 3.1.1(svelte@4.2.18)(vite@5.4.1) + version: 3.1.1(svelte@4.2.18)(vite@5.4.1(@types/node@20.14.15)) '@tsconfig/svelte': specifier: ^5.0.2 version: 5.0.4 svelte-check: specifier: ^3.6.8 - version: 3.8.5(@babel/core@7.25.2)(svelte@4.2.18) + version: 3.8.5(@babel/core@7.26.7)(postcss-load-config@4.0.2(postcss@8.5.1)(ts-node@10.9.2(@swc/core@1.7.11)(@types/node@20.14.15)(typescript@5.5.4)))(postcss@8.5.1)(svelte@4.2.18) vite: specifier: ^5.2.0 version: 5.4.1(@types/node@20.14.15) @@ -269,6 +269,9 @@ importers: express: specifier: ^4.18.1 version: 4.19.2 + prom-client: + specifier: ^15.1.3 + version: 15.1.3 ws: specifier: ^8.7.0 version: 8.18.0 @@ -302,7 +305,7 @@ importers: version: 2.1.0 ts-node: specifier: ^10.9.1 - version: 10.9.2(@types/node@20.14.15)(typescript@5.5.4) + version: 10.9.2(@swc/core@1.7.11)(@types/node@20.14.15)(typescript@5.5.4) uuid: specifier: ^9.0.0 version: 9.0.1 @@ -368,27 +371,39 @@ importers: specifier: ^5.0.1 version: 5.0.1 react: - specifier: ^18.2.0 + specifier: ^18.3.0 version: 18.3.1 react-dom: - specifier: ^18.2.0 + specifier: ^18.3.0 version: 18.3.1(react@18.3.1) react-usestateref: specifier: ^1.0.8 version: 1.0.9(react@18.3.1) devDependencies: + '@testing-library/jest-dom': + specifier: ^6.6.3 + version: 6.6.3 '@testing-library/react': specifier: ^14.0.0 - version: 14.3.1(react-dom@18.3.1)(react@18.3.1) + version: 14.3.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + eslint-plugin-react-hooks: + specifier: ^5.1.0 + version: 5.1.0(eslint@9.9.0(jiti@1.21.6)) jsdom: specifier: ^22.1.0 version: 22.1.0 + react-error-boundary: + specifier: ^5.0.0 + version: 5.0.0(react@18.3.1) rollup-plugin-visualizer: specifier: ^5.9.3 - version: 5.12.0 + version: 5.12.0(rollup@4.34.0) + vite: + specifier: ^6.0.7 + version: 6.0.11(@types/node@20.14.15)(jiti@1.21.6)(yaml@2.5.0) vite-plugin-dts: specifier: ^3.9.1 - version: 3.9.1(@types/node@20.14.15)(typescript@5.5.4)(vite@5.4.1) + version: 3.9.1(@types/node@20.14.15)(rollup@4.34.0)(typescript@5.5.4)(vite@6.0.11(@types/node@20.14.15)(jiti@1.21.6)(yaml@2.5.0)) packages/automerge-repo-storage-indexeddb: dependencies: @@ -427,6 +442,9 @@ importers: packages: + '@adobe/css-tools@4.4.1': + resolution: {integrity: sha512-12WGKBQzjUAI4ayyF4IAtfw2QR/IDoqk6jTddXDhtYTJF9ASmoE1zst7cVtP0aL/F1jUJL5r+JxKXKEgHNbEUQ==} + '@alloc/quick-lru@5.2.0': resolution: {integrity: sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==} engines: {node: '>=10'} @@ -442,54 +460,62 @@ packages: resolution: {integrity: sha512-BcYH1CVJBO9tvyIZ2jVeXgSIMvGZ2FDRvDdOIVQyuklNKSsx+eppDEBq/g47Ayw+RqNFE+URvOShmf+f/qwAlA==} engines: {node: '>=6.9.0'} - '@babel/compat-data@7.25.2': - resolution: {integrity: sha512-bYcppcpKBvX4znYaPEeFau03bp89ShqNMLs+rmdptMw+heSZh9+z84d2YG+K7cYLbWwzdjtDoW/uqZmPjulClQ==} + '@babel/code-frame@7.26.2': + resolution: {integrity: sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==} engines: {node: '>=6.9.0'} - '@babel/core@7.25.2': - resolution: {integrity: sha512-BBt3opiCOxUr9euZ5/ro/Xv8/V7yJ5bjYMqG/C1YAo8MIKAnumZalCN+msbci3Pigy4lIQfPUpfMM27HMGaYEA==} + '@babel/compat-data@7.26.5': + resolution: {integrity: sha512-XvcZi1KWf88RVbF9wn8MN6tYFloU5qX8KjuF3E1PVBmJ9eypXfs4GRiJwLuTZL0iSnJUKn1BFPa5BPZZJyFzPg==} engines: {node: '>=6.9.0'} - '@babel/generator@7.25.0': - resolution: {integrity: sha512-3LEEcj3PVW8pW2R1SR1M89g/qrYk/m/mB/tLqn7dn4sbBUQyTqnlod+II2U4dqiGtUmkcnAmkMDralTFZttRiw==} + '@babel/core@7.26.7': + resolution: {integrity: sha512-SRijHmF0PSPgLIBYlWnG0hyeJLwXE2CgpsXaMOrtt2yp9/86ALw6oUlj9KYuZ0JN07T4eBMVIW4li/9S1j2BGA==} engines: {node: '>=6.9.0'} - '@babel/helper-compilation-targets@7.25.2': - resolution: {integrity: sha512-U2U5LsSaZ7TAt3cfaymQ8WHh0pxvdHoEk6HVpaexxixjyEquMh0L0YNJNM6CTGKMXV1iksi0iZkGw4AcFkPaaw==} + '@babel/generator@7.26.5': + resolution: {integrity: sha512-2caSP6fN9I7HOe6nqhtft7V4g7/V/gfDsC3Ag4W7kEzzvRGKqiv0pu0HogPiZ3KaVSoNDhUws6IJjDjpfmYIXw==} engines: {node: '>=6.9.0'} - '@babel/helper-module-imports@7.24.7': - resolution: {integrity: sha512-8AyH3C+74cgCVVXow/myrynrAGv+nTVg5vKu2nZph9x7RcRwzmh0VFallJuFTZ9mx6u4eSdXZfcOzSqTUm0HCA==} + '@babel/helper-compilation-targets@7.26.5': + resolution: {integrity: sha512-IXuyn5EkouFJscIDuFF5EsiSolseme1s0CZB+QxVugqJLYmKdxI1VfIBOst0SUu4rnk2Z7kqTwmoO1lp3HIfnA==} engines: {node: '>=6.9.0'} - '@babel/helper-module-transforms@7.25.2': - resolution: {integrity: sha512-BjyRAbix6j/wv83ftcVJmBt72QtHI56C7JXZoG2xATiLpmoC7dpd8WnkikExHDVPpi/3qCmO6WY1EaXOluiecQ==} + '@babel/helper-module-imports@7.25.9': + resolution: {integrity: sha512-tnUA4RsrmflIM6W6RFTLFSXITtl0wKjgpnLgXyowocVPrbYrLUXSBXDgTs8BlbmIzIdlBySRQjINYs2BAkiLtw==} engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0 - '@babel/helper-plugin-utils@7.24.8': - resolution: {integrity: sha512-FFWx5142D8h2Mgr/iPVGH5G7w6jDn4jUSpZTyDnQO0Yn7Ks2Kuz6Pci8H6MPCoUJegd/UZQ3tAvfLCxQSnWWwg==} + '@babel/helper-module-transforms@7.26.0': + resolution: {integrity: sha512-xO+xu6B5K2czEnQye6BHA7DolFFmS3LB7stHZFaOLb1pAwO1HWLS8fXA+eh0A2yIvltPVmx3eNNDBJA2SLHXFw==} engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 - '@babel/helper-simple-access@7.24.7': - resolution: {integrity: sha512-zBAIvbCMh5Ts+b86r/CjU+4XGYIs+R1j951gxI3KmmxBMhCg4oQMsv6ZXQ64XOm/cvzfU1FmoCyt6+owc5QMYg==} + '@babel/helper-plugin-utils@7.26.5': + resolution: {integrity: sha512-RS+jZcRdZdRFzMyr+wcsaqOmld1/EqTghfaBGQQd/WnRdzdlvSZ//kF7U8VQTxf1ynZ4cjUcYgjVGx13ewNPMg==} engines: {node: '>=6.9.0'} '@babel/helper-string-parser@7.24.8': resolution: {integrity: sha512-pO9KhhRcuUyGnJWwyEgnRJTSIZHiT+vMD0kPeD+so0l7mxkMT19g3pjY9GTnHySck/hDzq+dtW/4VgnMkippsQ==} engines: {node: '>=6.9.0'} + '@babel/helper-string-parser@7.25.9': + resolution: {integrity: sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==} + engines: {node: '>=6.9.0'} + '@babel/helper-validator-identifier@7.24.7': resolution: {integrity: sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w==} engines: {node: '>=6.9.0'} - '@babel/helper-validator-option@7.24.8': - resolution: {integrity: sha512-xb8t9tD1MHLungh/AIoWYN+gVHaB9kwlu8gffXGSt3FFEIT7RjS+xWbc2vUD1UTZdIpKj/ab3rdqJ7ufngyi2Q==} + '@babel/helper-validator-identifier@7.25.9': + resolution: {integrity: sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==} + engines: {node: '>=6.9.0'} + + '@babel/helper-validator-option@7.25.9': + resolution: {integrity: sha512-e/zv1co8pp55dNdEcCynfj9X7nyUKUXoUEwfXqaZt0omVOmDe9oOTdKStH4GmAw6zxMFs50ZayuMfHDKlO7Tfw==} engines: {node: '>=6.9.0'} - '@babel/helpers@7.25.0': - resolution: {integrity: sha512-MjgLZ42aCm0oGjJj8CtSM3DB8NOOf8h2l7DCTePJs29u+v7yO/RBX9nShlKMgFnRks/Q4tBAe7Hxnov9VkGwLw==} + '@babel/helpers@7.26.7': + resolution: {integrity: sha512-8NHiL98vsi0mbPQmYAGWwfcFaOy4j2HY49fXJCfuDcdE7fMIsH9a7GdaeXpIBsbT7307WU8KCMp5pUVDNL4f9A==} engines: {node: '>=6.9.0'} '@babel/highlight@7.24.7': @@ -501,14 +527,19 @@ packages: engines: {node: '>=6.0.0'} hasBin: true - '@babel/plugin-transform-react-jsx-self@7.24.7': - resolution: {integrity: sha512-fOPQYbGSgH0HUp4UJO4sMBFjY6DuWq+2i8rixyUMb3CdGixs/gccURvYOAhajBdKDoGajFr3mUq5rH3phtkGzw==} + '@babel/parser@7.26.7': + resolution: {integrity: sha512-kEvgGGgEjRUutvdVvZhbn/BxVt+5VSpwXz1j3WYXQbXDo8KzFOPNG2GQbdAiNq8g6wn1yKk7C/qrke03a84V+w==} + engines: {node: '>=6.0.0'} + hasBin: true + + '@babel/plugin-transform-react-jsx-self@7.25.9': + resolution: {integrity: sha512-y8quW6p0WHkEhmErnfe58r7x0A70uKphQm8Sp8cV7tjNQwK56sNVK0M73LK3WuYmsuyrftut4xAkjjgU0twaMg==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-react-jsx-source@7.24.7': - resolution: {integrity: sha512-J2z+MWzZHVOemyLweMqngXrgGC42jQ//R0KdxqkIz/OrbVIIlhFI3WigZ5fO+nwFvBlncr4MGapd8vTyc7RPNQ==} + '@babel/plugin-transform-react-jsx-source@7.25.9': + resolution: {integrity: sha512-+iqjT8xmXhhYv4/uiYd8FNQsraMFZIfxVSqxxVSZP0WbbSAWvBXAul0m/zu+7Vv4O/3WtApy9pmaTMiumEZgfg==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 @@ -517,21 +548,29 @@ packages: resolution: {integrity: sha512-7dRy4DwXwtzBrPbZflqxnvfxLF8kdZXPkhymtDeFoFqE6ldzjQFgYTtYIFARcLEYDrqfBfYcZt1WqFxRoyC9Rw==} engines: {node: '>=6.9.0'} - '@babel/template@7.25.0': - resolution: {integrity: sha512-aOOgh1/5XzKvg1jvVz7AVrx2piJ2XBi227DHmbY6y+bM9H2FlN+IfecYu4Xl0cNiiVejlsCri89LUsbj8vJD9Q==} + '@babel/template@7.25.9': + resolution: {integrity: sha512-9DGttpmPvIxBb/2uwpVo3dqJ+O6RooAFOS+lB+xDqoE2PVCE8nfoHMdZLpfCQRLwvohzXISPZcgxt80xLfsuwg==} engines: {node: '>=6.9.0'} - '@babel/traverse@7.25.3': - resolution: {integrity: sha512-HefgyP1x754oGCsKmV5reSmtV7IXj/kpaE1XYY+D9G5PvKKoFfSbiS4M77MdjuwlZKDIKFCffq9rPU+H/s3ZdQ==} + '@babel/traverse@7.26.7': + resolution: {integrity: sha512-1x1sgeyRLC3r5fQOM0/xtQKsYjyxmFjaOrLJNtZ81inNjyJHGIolTULPiSc/2qe1/qfpFLisLQYFnnZl7QoedA==} engines: {node: '>=6.9.0'} '@babel/types@7.25.2': resolution: {integrity: sha512-YTnYtra7W9e6/oAZEHj0bJehPRUlLH9/fbpT5LfB0NhQXyALCRkRs3zH9v07IYhkgpqX6Z78FnuccZr/l4Fs4Q==} engines: {node: '>=6.9.0'} + '@babel/types@7.26.7': + resolution: {integrity: sha512-t8kDRGrKXyp6+tjUh7hw2RLyclsW4TRoRvRHtSyAX9Bb5ldlFh+90YAYY6awRXrlB4G5G2izNeGySpATlFzmOg==} + engines: {node: '>=6.9.0'} + '@bcoe/v8-coverage@0.2.3': resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==} + '@bcoe/v8-coverage@1.0.2': + resolution: {integrity: sha512-6zABk/ECA/QYSCQ1NGiVwwbQerUCZ+TQbp64Q3AgmfNvurHH0j8TtXa1qbShXA6qqkpAj4V5W8pP6mLe1mcMqA==} + engines: {node: '>=18'} + '@cbor-extract/cbor-extract-darwin-arm64@2.2.0': resolution: {integrity: sha512-P7swiOAdF7aSi0H+tHtHtr6zrpF3aAq/W9FXx5HektRvLTM2O89xCyXF3pk7pLc7QpaY7AoaE8UowVf9QBdh3w==} cpu: [arm64] @@ -581,138 +620,288 @@ packages: cpu: [ppc64] os: [aix] + '@esbuild/aix-ppc64@0.24.2': + resolution: {integrity: sha512-thpVCb/rhxE/BnMLQ7GReQLLN8q9qbHmI55F4489/ByVg2aQaQ6kbcLb6FHkocZzQhxc4gx0sCk0tJkKBFzDhA==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [aix] + '@esbuild/android-arm64@0.21.5': resolution: {integrity: sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==} engines: {node: '>=12'} cpu: [arm64] os: [android] + '@esbuild/android-arm64@0.24.2': + resolution: {integrity: sha512-cNLgeqCqV8WxfcTIOeL4OAtSmL8JjcN6m09XIgro1Wi7cF4t/THaWEa7eL5CMoMBdjoHOTh/vwTO/o2TRXIyzg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [android] + '@esbuild/android-arm@0.21.5': resolution: {integrity: sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==} engines: {node: '>=12'} cpu: [arm] os: [android] + '@esbuild/android-arm@0.24.2': + resolution: {integrity: sha512-tmwl4hJkCfNHwFB3nBa8z1Uy3ypZpxqxfTQOcHX+xRByyYgunVbZ9MzUUfb0RxaHIMnbHagwAxuTL+tnNM+1/Q==} + engines: {node: '>=18'} + cpu: [arm] + os: [android] + '@esbuild/android-x64@0.21.5': resolution: {integrity: sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==} engines: {node: '>=12'} cpu: [x64] os: [android] + '@esbuild/android-x64@0.24.2': + resolution: {integrity: sha512-B6Q0YQDqMx9D7rvIcsXfmJfvUYLoP722bgfBlO5cGvNVb5V/+Y7nhBE3mHV9OpxBf4eAS2S68KZztiPaWq4XYw==} + engines: {node: '>=18'} + cpu: [x64] + os: [android] + '@esbuild/darwin-arm64@0.21.5': resolution: {integrity: sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==} engines: {node: '>=12'} cpu: [arm64] os: [darwin] + '@esbuild/darwin-arm64@0.24.2': + resolution: {integrity: sha512-kj3AnYWc+CekmZnS5IPu9D+HWtUI49hbnyqk0FLEJDbzCIQt7hg7ucF1SQAilhtYpIujfaHr6O0UHlzzSPdOeA==} + engines: {node: '>=18'} + cpu: [arm64] + os: [darwin] + '@esbuild/darwin-x64@0.21.5': resolution: {integrity: sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==} engines: {node: '>=12'} cpu: [x64] os: [darwin] + '@esbuild/darwin-x64@0.24.2': + resolution: {integrity: sha512-WeSrmwwHaPkNR5H3yYfowhZcbriGqooyu3zI/3GGpF8AyUdsrrP0X6KumITGA9WOyiJavnGZUwPGvxvwfWPHIA==} + engines: {node: '>=18'} + cpu: [x64] + os: [darwin] + '@esbuild/freebsd-arm64@0.21.5': resolution: {integrity: sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==} engines: {node: '>=12'} cpu: [arm64] os: [freebsd] + '@esbuild/freebsd-arm64@0.24.2': + resolution: {integrity: sha512-UN8HXjtJ0k/Mj6a9+5u6+2eZ2ERD7Edt1Q9IZiB5UZAIdPnVKDoG7mdTVGhHJIeEml60JteamR3qhsr1r8gXvg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [freebsd] + '@esbuild/freebsd-x64@0.21.5': resolution: {integrity: sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==} engines: {node: '>=12'} cpu: [x64] os: [freebsd] + '@esbuild/freebsd-x64@0.24.2': + resolution: {integrity: sha512-TvW7wE/89PYW+IevEJXZ5sF6gJRDY/14hyIGFXdIucxCsbRmLUcjseQu1SyTko+2idmCw94TgyaEZi9HUSOe3Q==} + engines: {node: '>=18'} + cpu: [x64] + os: [freebsd] + '@esbuild/linux-arm64@0.21.5': resolution: {integrity: sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==} engines: {node: '>=12'} cpu: [arm64] os: [linux] + '@esbuild/linux-arm64@0.24.2': + resolution: {integrity: sha512-7HnAD6074BW43YvvUmE/35Id9/NB7BeX5EoNkK9obndmZBUk8xmJJeU7DwmUeN7tkysslb2eSl6CTrYz6oEMQg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [linux] + '@esbuild/linux-arm@0.21.5': resolution: {integrity: sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==} engines: {node: '>=12'} cpu: [arm] os: [linux] + '@esbuild/linux-arm@0.24.2': + resolution: {integrity: sha512-n0WRM/gWIdU29J57hJyUdIsk0WarGd6To0s+Y+LwvlC55wt+GT/OgkwoXCXvIue1i1sSNWblHEig00GBWiJgfA==} + engines: {node: '>=18'} + cpu: [arm] + os: [linux] + '@esbuild/linux-ia32@0.21.5': resolution: {integrity: sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==} engines: {node: '>=12'} cpu: [ia32] os: [linux] + '@esbuild/linux-ia32@0.24.2': + resolution: {integrity: sha512-sfv0tGPQhcZOgTKO3oBE9xpHuUqguHvSo4jl+wjnKwFpapx+vUDcawbwPNuBIAYdRAvIDBfZVvXprIj3HA+Ugw==} + engines: {node: '>=18'} + cpu: [ia32] + os: [linux] + '@esbuild/linux-loong64@0.21.5': resolution: {integrity: sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==} engines: {node: '>=12'} cpu: [loong64] os: [linux] + '@esbuild/linux-loong64@0.24.2': + resolution: {integrity: sha512-CN9AZr8kEndGooS35ntToZLTQLHEjtVB5n7dl8ZcTZMonJ7CCfStrYhrzF97eAecqVbVJ7APOEe18RPI4KLhwQ==} + engines: {node: '>=18'} + cpu: [loong64] + os: [linux] + '@esbuild/linux-mips64el@0.21.5': resolution: {integrity: sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==} engines: {node: '>=12'} cpu: [mips64el] os: [linux] + '@esbuild/linux-mips64el@0.24.2': + resolution: {integrity: sha512-iMkk7qr/wl3exJATwkISxI7kTcmHKE+BlymIAbHO8xanq/TjHaaVThFF6ipWzPHryoFsesNQJPE/3wFJw4+huw==} + engines: {node: '>=18'} + cpu: [mips64el] + os: [linux] + '@esbuild/linux-ppc64@0.21.5': resolution: {integrity: sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==} engines: {node: '>=12'} cpu: [ppc64] os: [linux] + '@esbuild/linux-ppc64@0.24.2': + resolution: {integrity: sha512-shsVrgCZ57Vr2L8mm39kO5PPIb+843FStGt7sGGoqiiWYconSxwTiuswC1VJZLCjNiMLAMh34jg4VSEQb+iEbw==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [linux] + '@esbuild/linux-riscv64@0.21.5': resolution: {integrity: sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==} engines: {node: '>=12'} cpu: [riscv64] os: [linux] + '@esbuild/linux-riscv64@0.24.2': + resolution: {integrity: sha512-4eSFWnU9Hhd68fW16GD0TINewo1L6dRrB+oLNNbYyMUAeOD2yCK5KXGK1GH4qD/kT+bTEXjsyTCiJGHPZ3eM9Q==} + engines: {node: '>=18'} + cpu: [riscv64] + os: [linux] + '@esbuild/linux-s390x@0.21.5': resolution: {integrity: sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==} engines: {node: '>=12'} cpu: [s390x] os: [linux] + '@esbuild/linux-s390x@0.24.2': + resolution: {integrity: sha512-S0Bh0A53b0YHL2XEXC20bHLuGMOhFDO6GN4b3YjRLK//Ep3ql3erpNcPlEFed93hsQAjAQDNsvcK+hV90FubSw==} + engines: {node: '>=18'} + cpu: [s390x] + os: [linux] + '@esbuild/linux-x64@0.21.5': resolution: {integrity: sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==} engines: {node: '>=12'} cpu: [x64] os: [linux] + '@esbuild/linux-x64@0.24.2': + resolution: {integrity: sha512-8Qi4nQcCTbLnK9WoMjdC9NiTG6/E38RNICU6sUNqK0QFxCYgoARqVqxdFmWkdonVsvGqWhmm7MO0jyTqLqwj0Q==} + engines: {node: '>=18'} + cpu: [x64] + os: [linux] + + '@esbuild/netbsd-arm64@0.24.2': + resolution: {integrity: sha512-wuLK/VztRRpMt9zyHSazyCVdCXlpHkKm34WUyinD2lzK07FAHTq0KQvZZlXikNWkDGoT6x3TD51jKQ7gMVpopw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [netbsd] + '@esbuild/netbsd-x64@0.21.5': resolution: {integrity: sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==} engines: {node: '>=12'} cpu: [x64] os: [netbsd] + '@esbuild/netbsd-x64@0.24.2': + resolution: {integrity: sha512-VefFaQUc4FMmJuAxmIHgUmfNiLXY438XrL4GDNV1Y1H/RW3qow68xTwjZKfj/+Plp9NANmzbH5R40Meudu8mmw==} + engines: {node: '>=18'} + cpu: [x64] + os: [netbsd] + + '@esbuild/openbsd-arm64@0.24.2': + resolution: {integrity: sha512-YQbi46SBct6iKnszhSvdluqDmxCJA+Pu280Av9WICNwQmMxV7nLRHZfjQzwbPs3jeWnuAhE9Jy0NrnJ12Oz+0A==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openbsd] + '@esbuild/openbsd-x64@0.21.5': resolution: {integrity: sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==} engines: {node: '>=12'} cpu: [x64] os: [openbsd] + '@esbuild/openbsd-x64@0.24.2': + resolution: {integrity: sha512-+iDS6zpNM6EnJyWv0bMGLWSWeXGN/HTaF/LXHXHwejGsVi+ooqDfMCCTerNFxEkM3wYVcExkeGXNqshc9iMaOA==} + engines: {node: '>=18'} + cpu: [x64] + os: [openbsd] + '@esbuild/sunos-x64@0.21.5': resolution: {integrity: sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==} engines: {node: '>=12'} cpu: [x64] os: [sunos] + '@esbuild/sunos-x64@0.24.2': + resolution: {integrity: sha512-hTdsW27jcktEvpwNHJU4ZwWFGkz2zRJUz8pvddmXPtXDzVKTTINmlmga3ZzwcuMpUvLw7JkLy9QLKyGpD2Yxig==} + engines: {node: '>=18'} + cpu: [x64] + os: [sunos] + '@esbuild/win32-arm64@0.21.5': resolution: {integrity: sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==} engines: {node: '>=12'} cpu: [arm64] os: [win32] + '@esbuild/win32-arm64@0.24.2': + resolution: {integrity: sha512-LihEQ2BBKVFLOC9ZItT9iFprsE9tqjDjnbulhHoFxYQtQfai7qfluVODIYxt1PgdoyQkz23+01rzwNwYfutxUQ==} + engines: {node: '>=18'} + cpu: [arm64] + os: [win32] + '@esbuild/win32-ia32@0.21.5': resolution: {integrity: sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==} engines: {node: '>=12'} cpu: [ia32] os: [win32] + '@esbuild/win32-ia32@0.24.2': + resolution: {integrity: sha512-q+iGUwfs8tncmFC9pcnD5IvRHAzmbwQ3GPS5/ceCyHdjXubwQWI12MKWSNSMYLJMq23/IUCvJMS76PDqXe1fxA==} + engines: {node: '>=18'} + cpu: [ia32] + os: [win32] + '@esbuild/win32-x64@0.21.5': resolution: {integrity: sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==} engines: {node: '>=12'} cpu: [x64] os: [win32] + '@esbuild/win32-x64@0.24.2': + resolution: {integrity: sha512-7VTgWzgMGvup6aSqDPLiW5zHaxYJGTO4OokMjIlrCtf+VpEL+cXKtCvg723iguPYI5oaUNdS+/V7OU2gvXVWEg==} + engines: {node: '>=18'} + cpu: [x64] + os: [win32] + '@eslint-community/eslint-utils@4.4.0': resolution: {integrity: sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} @@ -751,13 +940,6 @@ packages: resolution: {integrity: sha512-H9XAx3hc0BQHY6l+IFSWHDySypcXsvsuLhgYLUGywmJ5pswRVQJUHpOsobnLYp2ZUaUlKiKDrgWWhosOwAEM8Q==} engines: {node: '>=6.9.0'} - '@ibm/plex@6.4.1': - resolution: {integrity: sha512-fnsipQywHt3zWvsnlyYKMikcVI7E2fEwpiPnIHFqlbByXVfQfANAAeJk1IV4mNnxhppUIDlhU0TzwYwL++Rn2g==} - - '@ibm/telemetry-js@1.6.1': - resolution: {integrity: sha512-ds45f2bz4qVvJPK84VcSMJTvTZI+qXu6wVlBcy9/hAlmzOcxeX6rh8W0De4H133HPONsZWvs0lV/H2aUcznCxw==} - hasBin: true - '@isaacs/cliui@8.0.2': resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} engines: {node: '>=12'} @@ -1037,6 +1219,10 @@ packages: '@octokit/types@9.3.2': resolution: {integrity: sha512-D4iHGTdAnEEVsB8fl95m1hiz7D5YiRdQ9b/OEb3BYRVwbLsGHcRVPz+u+BgRLNk0Q0/4iZCBqDN96j2XNxfXrA==} + '@opentelemetry/api@1.9.0': + resolution: {integrity: sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==} + engines: {node: '>=8.0.0'} + '@pkgjs/parseargs@0.11.0': resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} engines: {node: '>=14'} @@ -1070,81 +1256,176 @@ packages: cpu: [arm] os: [android] + '@rollup/rollup-android-arm-eabi@4.34.0': + resolution: {integrity: sha512-Eeao7ewDq79jVEsrtWIj5RNqB8p2knlm9fhR6uJ2gqP7UfbLrTrxevudVrEPDM7Wkpn/HpRC2QfazH7MXLz3vQ==} + cpu: [arm] + os: [android] + '@rollup/rollup-android-arm64@4.20.0': resolution: {integrity: sha512-u00Ro/nok7oGzVuh/FMYfNoGqxU5CPWz1mxV85S2w9LxHR8OoMQBuSk+3BKVIDYgkpeOET5yXkx90OYFc+ytpQ==} cpu: [arm64] os: [android] + '@rollup/rollup-android-arm64@4.34.0': + resolution: {integrity: sha512-yVh0Kf1f0Fq4tWNf6mWcbQBCLDpDrDEl88lzPgKhrgTcDrTtlmun92ywEF9dCjmYO3EFiSuJeeo9cYRxl2FswA==} + cpu: [arm64] + os: [android] + '@rollup/rollup-darwin-arm64@4.20.0': resolution: {integrity: sha512-uFVfvzvsdGtlSLuL0ZlvPJvl6ZmrH4CBwLGEFPe7hUmf7htGAN+aXo43R/V6LATyxlKVC/m6UsLb7jbG+LG39Q==} cpu: [arm64] os: [darwin] + '@rollup/rollup-darwin-arm64@4.34.0': + resolution: {integrity: sha512-gCs0ErAZ9s0Osejpc3qahTsqIPUDjSKIyxK/0BGKvL+Tn0n3Kwvj8BrCv7Y5sR1Ypz1K2qz9Ny0VvkVyoXBVUQ==} + cpu: [arm64] + os: [darwin] + '@rollup/rollup-darwin-x64@4.20.0': resolution: {integrity: sha512-xbrMDdlev53vNXexEa6l0LffojxhqDTBeL+VUxuuIXys4x6xyvbKq5XqTXBCEUA8ty8iEJblHvFaWRJTk/icAQ==} cpu: [x64] os: [darwin] + '@rollup/rollup-darwin-x64@4.34.0': + resolution: {integrity: sha512-aIB5Anc8hngk15t3GUkiO4pv42ykXHfmpXGS+CzM9CTyiWyT8HIS5ygRAy7KcFb/wiw4Br+vh1byqcHRTfq2tQ==} + cpu: [x64] + os: [darwin] + + '@rollup/rollup-freebsd-arm64@4.34.0': + resolution: {integrity: sha512-kpdsUdMlVJMRMaOf/tIvxk8TQdzHhY47imwmASOuMajg/GXpw8GKNd8LNwIHE5Yd1onehNpcUB9jHY6wgw9nHQ==} + cpu: [arm64] + os: [freebsd] + + '@rollup/rollup-freebsd-x64@4.34.0': + resolution: {integrity: sha512-D0RDyHygOBCQiqookcPevrvgEarN0CttBecG4chOeIYCNtlKHmf5oi5kAVpXV7qs0Xh/WO2RnxeicZPtT50V0g==} + cpu: [x64] + os: [freebsd] + '@rollup/rollup-linux-arm-gnueabihf@4.20.0': resolution: {integrity: sha512-jMYvxZwGmoHFBTbr12Xc6wOdc2xA5tF5F2q6t7Rcfab68TT0n+r7dgawD4qhPEvasDsVpQi+MgDzj2faOLsZjA==} cpu: [arm] os: [linux] + '@rollup/rollup-linux-arm-gnueabihf@4.34.0': + resolution: {integrity: sha512-mCIw8j5LPDXmCOW8mfMZwT6F/Kza03EnSr4wGYEswrEfjTfVsFOxvgYfuRMxTuUF/XmRb9WSMD5GhCWDe2iNrg==} + cpu: [arm] + os: [linux] + '@rollup/rollup-linux-arm-musleabihf@4.20.0': resolution: {integrity: sha512-1asSTl4HKuIHIB1GcdFHNNZhxAYEdqML/MW4QmPS4G0ivbEcBr1JKlFLKsIRqjSwOBkdItn3/ZDlyvZ/N6KPlw==} cpu: [arm] os: [linux] + '@rollup/rollup-linux-arm-musleabihf@4.34.0': + resolution: {integrity: sha512-AwwldAu4aCJPob7zmjuDUMvvuatgs8B/QiVB0KwkUarAcPB3W+ToOT+18TQwY4z09Al7G0BvCcmLRop5zBLTag==} + cpu: [arm] + os: [linux] + '@rollup/rollup-linux-arm64-gnu@4.20.0': resolution: {integrity: sha512-COBb8Bkx56KldOYJfMf6wKeYJrtJ9vEgBRAOkfw6Ens0tnmzPqvlpjZiLgkhg6cA3DGzCmLmmd319pmHvKWWlQ==} cpu: [arm64] os: [linux] + '@rollup/rollup-linux-arm64-gnu@4.34.0': + resolution: {integrity: sha512-e7kDUGVP+xw05pV65ZKb0zulRploU3gTu6qH1qL58PrULDGxULIS0OSDQJLH7WiFnpd3ZKUU4VM3u/Z7Zw+e7Q==} + cpu: [arm64] + os: [linux] + '@rollup/rollup-linux-arm64-musl@4.20.0': resolution: {integrity: sha512-+it+mBSyMslVQa8wSPvBx53fYuZK/oLTu5RJoXogjk6x7Q7sz1GNRsXWjn6SwyJm8E/oMjNVwPhmNdIjwP135Q==} cpu: [arm64] os: [linux] + '@rollup/rollup-linux-arm64-musl@4.34.0': + resolution: {integrity: sha512-SXYJw3zpwHgaBqTXeAZ31qfW/v50wq4HhNVvKFhRr5MnptRX2Af4KebLWR1wpxGJtLgfS2hEPuALRIY3LPAAcA==} + cpu: [arm64] + os: [linux] + + '@rollup/rollup-linux-loongarch64-gnu@4.34.0': + resolution: {integrity: sha512-e5XiCinINCI4RdyU3sFyBH4zzz7LiQRvHqDtRe9Dt8o/8hTBaYpdPimayF00eY2qy5j4PaaWK0azRgUench6WQ==} + cpu: [loong64] + os: [linux] + '@rollup/rollup-linux-powerpc64le-gnu@4.20.0': resolution: {integrity: sha512-yAMvqhPfGKsAxHN8I4+jE0CpLWD8cv4z7CK7BMmhjDuz606Q2tFKkWRY8bHR9JQXYcoLfopo5TTqzxgPUjUMfw==} cpu: [ppc64] os: [linux] + '@rollup/rollup-linux-powerpc64le-gnu@4.34.0': + resolution: {integrity: sha512-3SWN3e0bAsm9ToprLFBSro8nJe6YN+5xmB11N4FfNf92wvLye/+Rh5JGQtKOpwLKt6e61R1RBc9g+luLJsc23A==} + cpu: [ppc64] + os: [linux] + '@rollup/rollup-linux-riscv64-gnu@4.20.0': resolution: {integrity: sha512-qmuxFpfmi/2SUkAw95TtNq/w/I7Gpjurx609OOOV7U4vhvUhBcftcmXwl3rqAek+ADBwSjIC4IVNLiszoj3dPA==} cpu: [riscv64] os: [linux] + '@rollup/rollup-linux-riscv64-gnu@4.34.0': + resolution: {integrity: sha512-B1Oqt3GLh7qmhvfnc2WQla4NuHlcxAD5LyueUi5WtMc76ZWY+6qDtQYqnxARx9r+7mDGfamD+8kTJO0pKUJeJA==} + cpu: [riscv64] + os: [linux] + '@rollup/rollup-linux-s390x-gnu@4.20.0': resolution: {integrity: sha512-I0BtGXddHSHjV1mqTNkgUZLnS3WtsqebAXv11D5BZE/gfw5KoyXSAXVqyJximQXNvNzUo4GKlCK/dIwXlz+jlg==} cpu: [s390x] os: [linux] + '@rollup/rollup-linux-s390x-gnu@4.34.0': + resolution: {integrity: sha512-UfUCo0h/uj48Jq2lnhX0AOhZPSTAq3Eostas+XZ+GGk22pI+Op1Y6cxQ1JkUuKYu2iU+mXj1QjPrZm9nNWV9rg==} + cpu: [s390x] + os: [linux] + '@rollup/rollup-linux-x64-gnu@4.20.0': resolution: {integrity: sha512-y+eoL2I3iphUg9tN9GB6ku1FA8kOfmF4oUEWhztDJ4KXJy1agk/9+pejOuZkNFhRwHAOxMsBPLbXPd6mJiCwew==} cpu: [x64] os: [linux] + '@rollup/rollup-linux-x64-gnu@4.34.0': + resolution: {integrity: sha512-chZLTUIPbgcpm+Z7ALmomXW8Zh+wE2icrG+K6nt/HenPLmtwCajhQC5flNSk1Xy5EDMt/QAOz2MhzfOfJOLSiA==} + cpu: [x64] + os: [linux] + '@rollup/rollup-linux-x64-musl@4.20.0': resolution: {integrity: sha512-hM3nhW40kBNYUkZb/r9k2FKK+/MnKglX7UYd4ZUy5DJs8/sMsIbqWK2piZtVGE3kcXVNj3B2IrUYROJMMCikNg==} cpu: [x64] os: [linux] + '@rollup/rollup-linux-x64-musl@4.34.0': + resolution: {integrity: sha512-jo0UolK70O28BifvEsFD/8r25shFezl0aUk2t0VJzREWHkq19e+pcLu4kX5HiVXNz5qqkD+aAq04Ct8rkxgbyQ==} + cpu: [x64] + os: [linux] + '@rollup/rollup-win32-arm64-msvc@4.20.0': resolution: {integrity: sha512-psegMvP+Ik/Bg7QRJbv8w8PAytPA7Uo8fpFjXyCRHWm6Nt42L+JtoqH8eDQ5hRP7/XW2UiIriy1Z46jf0Oa1kA==} cpu: [arm64] os: [win32] + '@rollup/rollup-win32-arm64-msvc@4.34.0': + resolution: {integrity: sha512-Vmg0NhAap2S54JojJchiu5An54qa6t/oKT7LmDaWggpIcaiL8WcWHEN6OQrfTdL6mQ2GFyH7j2T5/3YPEDOOGA==} + cpu: [arm64] + os: [win32] + '@rollup/rollup-win32-ia32-msvc@4.20.0': resolution: {integrity: sha512-GabekH3w4lgAJpVxkk7hUzUf2hICSQO0a/BLFA11/RMxQT92MabKAqyubzDZmMOC/hcJNlc+rrypzNzYl4Dx7A==} cpu: [ia32] os: [win32] + '@rollup/rollup-win32-ia32-msvc@4.34.0': + resolution: {integrity: sha512-CV2aqhDDOsABKHKhNcs1SZFryffQf8vK2XrxP6lxC99ELZAdvsDgPklIBfd65R8R+qvOm1SmLaZ/Fdq961+m7A==} + cpu: [ia32] + os: [win32] + '@rollup/rollup-win32-x64-msvc@4.20.0': resolution: {integrity: sha512-aJ1EJSuTdGnM6qbVC4B5DSmozPTqIag9fSzXRNNo+humQLG89XpPgdt16Ia56ORD7s+H8Pmyx44uczDQ0yDzpg==} cpu: [x64] os: [win32] + '@rollup/rollup-win32-x64-msvc@4.34.0': + resolution: {integrity: sha512-g2ASy1QwHP88y5KWvblUolJz9rN+i4ZOsYzkEwcNfaNooxNUXG+ON6F5xFo0NIItpHqxcdAyls05VXpBnludGw==} + cpu: [x64] + os: [win32] + '@rushstack/node-core-library@4.0.2': resolution: {integrity: sha512-hyES82QVpkfQMeBMteQUnrhASL/KHPhd7iJ8euduwNJG4mu2GSOKybf0rOEjOm1Wz7CwJEUm9y0yD7jg2C1bfg==} peerDependencies: @@ -1296,6 +1577,10 @@ packages: resolution: {integrity: sha512-FlS4ZWlp97iiNWig0Muq8p+3rVDjRiYE+YKGbAqXOu9nwJFFOdL00kFpz42M+4huzYi86vAK1sOOfyOG45muIQ==} engines: {node: '>=14'} + '@testing-library/jest-dom@6.6.3': + resolution: {integrity: sha512-IteBhl4XqYNkM54f4ejhLRJiZNqcSCoXUOG2CPK7qbD322KjQozM4kHQOfkG2oln9b9HTYqs+Sae8vBATubxxA==} + engines: {node: '>=14', npm: '>=6', yarn: '>=1'} + '@testing-library/react@14.3.1': resolution: {integrity: sha512-H99XjUhWQw0lTgyMN05W3xQG1Nh4lq574D8keFf1dDoNTJgp66VbJozRaczoF+wsiaPJNt/TcnfpLGufGxSrZQ==} engines: {node: '>=14'} @@ -1363,6 +1648,9 @@ packages: '@types/estree@1.0.5': resolution: {integrity: sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==} + '@types/estree@1.0.6': + resolution: {integrity: sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==} + '@types/express-serve-static-core@4.19.5': resolution: {integrity: sha512-y6W03tvrACO72aijJ5uF02FRq5cgDR9lUxddQ8vyF+GvmjJQqbzDcJngEjURc+ZsG31VI3hODNZJ2URj86pzmg==} @@ -1495,36 +1783,54 @@ packages: peerDependencies: vite: ^4 || ^5 - '@vitejs/plugin-react@4.3.1': - resolution: {integrity: sha512-m/V2syj5CuVnaxcUJOQRel/Wr31FFXRFlnOoq1TVtkCxsY5veGMTEmpWHndrhB2U8ScHtCQB1e+4hWYExQc6Lg==} + '@vitejs/plugin-react@4.3.4': + resolution: {integrity: sha512-SCCPBJtYLdE8PX/7ZQAs1QAZ8Jqwih+0VBLum1EGqmCCQal+MIUqLCzj3ZUy8ufbC0cAM4LRlSTm7IQJwWT4ug==} engines: {node: ^14.18.0 || >=16.0.0} peerDependencies: - vite: ^4.2.0 || ^5.0.0 + vite: ^4.2.0 || ^5.0.0 || ^6.0.0 - '@vitest/coverage-v8@1.6.0': - resolution: {integrity: sha512-KvapcbMY/8GYIG0rlwwOKCVNRc0OL20rrhFkg/CHNzncV03TE2XWvO5w9uZYoxNiMEBacAJt3unSOiZ7svePew==} + '@vitest/coverage-v8@3.0.4': + resolution: {integrity: sha512-f0twgRCHgbs24Dp8cLWagzcObXMcuKtAwgxjJV/nnysPAJJk1JiKu/W0gIehZLmkljhJXU/E0/dmuQzsA/4jhA==} peerDependencies: - vitest: 1.6.0 + '@vitest/browser': 3.0.4 + vitest: 3.0.4 + peerDependenciesMeta: + '@vitest/browser': + optional: true + + '@vitest/expect@3.0.4': + resolution: {integrity: sha512-Nm5kJmYw6P2BxhJPkO3eKKhGYKRsnqJqf+r0yOGRKpEP+bSCBDsjXgiu1/5QFrnPMEgzfC38ZEjvCFgaNBC0Eg==} + + '@vitest/mocker@3.0.4': + resolution: {integrity: sha512-gEef35vKafJlfQbnyOXZ0Gcr9IBUsMTyTLXsEQwuyYAerpHqvXhzdBnDFuHLpFqth3F7b6BaFr4qV/Cs1ULx5A==} + peerDependencies: + msw: ^2.4.9 + vite: ^5.0.0 || ^6.0.0 + peerDependenciesMeta: + msw: + optional: true + vite: + optional: true - '@vitest/expect@1.6.0': - resolution: {integrity: sha512-ixEvFVQjycy/oNgHjqsL6AZCDduC+tflRluaHIzKIsdbzkLn2U/iBnVeJwB6HsIjQBdfMR8Z0tRxKUsvFJEeWQ==} + '@vitest/pretty-format@3.0.4': + resolution: {integrity: sha512-ts0fba+dEhK2aC9PFuZ9LTpULHpY/nd6jhAQ5IMU7Gaj7crPCTdCFfgvXxruRBLFS+MLraicCuFXxISEq8C93g==} - '@vitest/runner@1.6.0': - resolution: {integrity: sha512-P4xgwPjwesuBiHisAVz/LSSZtDjOTPYZVmNAnpHHSR6ONrf8eCJOFRvUwdHn30F5M1fxhqtl7QZQUk2dprIXAg==} + '@vitest/runner@3.0.4': + resolution: {integrity: sha512-dKHzTQ7n9sExAcWH/0sh1elVgwc7OJ2lMOBrAm73J7AH6Pf9T12Zh3lNE1TETZaqrWFXtLlx3NVrLRb5hCK+iw==} - '@vitest/snapshot@1.6.0': - resolution: {integrity: sha512-+Hx43f8Chus+DCmygqqfetcAZrDJwvTj0ymqjQq4CvmpKFSTVteEOBzCusu1x2tt4OJcvBflyHUE0DZSLgEMtQ==} + '@vitest/snapshot@3.0.4': + resolution: {integrity: sha512-+p5knMLwIk7lTQkM3NonZ9zBewzVp9EVkVpvNta0/PlFWpiqLaRcF4+33L1it3uRUCh0BGLOaXPPGEjNKfWb4w==} - '@vitest/spy@1.6.0': - resolution: {integrity: sha512-leUTap6B/cqi/bQkXUu6bQV5TZPx7pmMBKBQiI0rJA8c3pB56ZsaTbREnF7CJfmvAS4V2cXIBAh/3rVwrrCYgw==} + '@vitest/spy@3.0.4': + resolution: {integrity: sha512-sXIMF0oauYyUy2hN49VFTYodzEAu744MmGcPR3ZBsPM20G+1/cSW/n1U+3Yu/zHxX2bIDe1oJASOkml+osTU6Q==} - '@vitest/ui@1.6.0': - resolution: {integrity: sha512-k3Lyo+ONLOgylctiGovRKy7V4+dIN2yxstX3eY5cWFXH6WP+ooVX79YSyi0GagdTQzLmT43BF27T0s6dOIPBXA==} + '@vitest/ui@3.0.4': + resolution: {integrity: sha512-e+s2F9e9FUURkZ5aFIe1Fi3Y8M7UF6gEuShcaV/ur7y/Ldri+1tzWQ1TJq9Vas42NXnXvCAIrU39Z4U2RyET6g==} peerDependencies: - vitest: 1.6.0 + vitest: 3.0.4 - '@vitest/utils@1.6.0': - resolution: {integrity: sha512-21cPiuGMoMZwiOHa2i4LXkMkMkCGzA+MVFV70jRwHo95dL4x/ts5GZhML1QWuy7yfp3WzK3lRvZi3JnXTYqrBw==} + '@vitest/utils@3.0.4': + resolution: {integrity: sha512-8BqC1ksYsHtbWH+DfpOAKrFw3jl3Uf9J7yeFh85Pz52IWuh1hBBtyfEbRNNZNjl8H8A5yMLH9/t+k7HIKzQcZQ==} '@volar/language-core@1.11.1': resolution: {integrity: sha512-dOcNn3i9GgZAcJt43wuaEykSluAuOkQgzni1cuxLxTV0nJKanQztp7FxyswdRILaKH+P2XZMPRp2S4MV/pElCw==} @@ -1728,8 +2034,9 @@ packages: resolution: {integrity: sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==} engines: {node: '>=8'} - assertion-error@1.1.0: - resolution: {integrity: sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==} + assertion-error@2.0.1: + resolution: {integrity: sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==} + engines: {node: '>=12'} ast-types-flow@0.0.8: resolution: {integrity: sha512-OH/2E5Fg20h2aPrbe+QL8JZQFko0YZaF+j4mnQ7BGhfavO7OpSLa8a0y9sBwomHdSbkhTS8TQNayBfnW5DwbvQ==} @@ -1792,6 +2099,9 @@ packages: resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==} engines: {node: '>=8'} + bintrees@1.0.2: + resolution: {integrity: sha512-VOMgTMwjAaUG580SXn3LacVgjurrbMme7ZZNYGSSV7mmtY6QQRh0Eg3pwIcntQ77DErK1L0NxkbetjcoXzVwKw==} + bl@4.1.0: resolution: {integrity: sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==} @@ -1814,6 +2124,11 @@ packages: engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} hasBin: true + browserslist@4.24.4: + resolution: {integrity: sha512-KDi1Ny1gSePi1vm0q4oxSF8b4DR44GF4BbmS2YdhPLOEqd8pDviZOGH/GsmRwoWJ2+5Lr085X7naowMwKHDG1A==} + engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} + hasBin: true + bs58@5.0.0: resolution: {integrity: sha512-r+ihvQJvahgYT50JD05dyJNKlmmSlMoOGwn1lCcEzanPglg7TxYjioQUYehQ9mAR/+hOSd2jRc/Z2y5UxBymvQ==} @@ -1893,6 +2208,9 @@ packages: caniuse-lite@1.0.30001651: resolution: {integrity: sha512-9Cf+Xv1jJNe1xPZLGuUXLNkE1BoDkqRqYyFJ9TDYSqhduqA4hu4oR9HluGoWYQC/aj8WHjsGVV+bwkh0+tegRg==} + caniuse-lite@1.0.30001696: + resolution: {integrity: sha512-pDCPkvzfa39ehJtJ+OwGT/2yvT2SbjfHhiIW2LWOAcMQ7BzwxT/XuyUp4OTOd0XFWA6BKw0JalnBHgSi5DGJBQ==} + cbor-extract@2.2.0: resolution: {integrity: sha512-Ig1zM66BjLfTXpNgKpvBePq271BPOvu8MR0Jl080yG7Jsl+wAZunfrwiwA+9ruzm/WEdIV5QF/bjDZTqyAIVHA==} hasBin: true @@ -1900,14 +2218,18 @@ packages: cbor-x@1.6.0: resolution: {integrity: sha512-0kareyRwHSkL6ws5VXHEf8uY1liitysCVJjlmhaLG+IXLqhSaOO+t63coaso7yjwEzWZzLy8fJo06gZDVQM9Qg==} - chai@4.5.0: - resolution: {integrity: sha512-RITGBfijLkBddZvnn8jdqoTypxvqbOLYQkGGxXzeFjVHvudaPw0HNFD9x928/eUwYWd2dPCugVqspGALTZZQKw==} - engines: {node: '>=4'} + chai@5.1.2: + resolution: {integrity: sha512-aGtmf24DW6MLHHG5gCx4zaI3uBq3KRtxeVs0DjFH6Z0rDNbsvTxFASFvdj79pxjxZ8/5u3PIiN3IwEIQkiiuPw==} + engines: {node: '>=12'} chalk@2.4.2: resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==} engines: {node: '>=4'} + chalk@3.0.0: + resolution: {integrity: sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==} + engines: {node: '>=8'} + chalk@4.1.0: resolution: {integrity: sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==} engines: {node: '>=10'} @@ -1919,8 +2241,9 @@ packages: chardet@0.7.0: resolution: {integrity: sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==} - check-error@1.0.3: - resolution: {integrity: sha512-iKEoDYaRmd1mxM90a2OEfWhjsjPpYPuQ+lMYsoxB126+t8fw7ySEO48nmDg5COTjxDI65/Y2OWpeEHk3ZOe8zg==} + check-error@2.1.1: + resolution: {integrity: sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==} + engines: {node: '>= 16'} chokidar@3.6.0: resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==} @@ -2032,9 +2355,6 @@ packages: resolution: {integrity: sha512-MWufYdFw53ccGjCA+Ol7XJYpAlW6/prSMzuPOTRnJGcGzuhLn4Scrz7qf6o8bROZ514ltazcIFJZevcfbo0x7A==} engines: {'0': node >= 6.0} - confbox@0.1.7: - resolution: {integrity: sha512-uJcB/FKZtBMCJpK8MQji6bJHgu1tixKPxRLeGkNzBoOZzpnZUJm0jm2/sBDWcuBx1dYgxV4JU+g5hmNxCyAmdA==} - config-chain@1.1.13: resolution: {integrity: sha512-qj+f8APARXHrM0hraqXYb2/bOVSV4PvJQlNZ/DVj0QrmNM2q2euizkeuVckQ57J+W0mRH6Hvi+k50M4Jul2VRQ==} @@ -2132,6 +2452,9 @@ packages: resolution: {integrity: sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw==} engines: {node: ^10 || ^12.20.0 || ^14.13.0 || >=15.0.0} + css.escape@1.5.1: + resolution: {integrity: sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg==} + cssesc@3.0.0: resolution: {integrity: sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==} engines: {node: '>=4'} @@ -2198,6 +2521,15 @@ packages: supports-color: optional: true + debug@4.4.0: + resolution: {integrity: sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + decamelize-keys@1.1.1: resolution: {integrity: sha512-WiPxgEirIV0/eIOMcnFBA3/IJZAZqKnwAwWyvvdi4lsr1WCN22nhdf/3db3DoZcUjTV2SqfzIwNyp6y2xs3nmg==} engines: {node: '>=0.10.0'} @@ -2225,8 +2557,8 @@ packages: babel-plugin-macros: optional: true - deep-eql@4.1.4: - resolution: {integrity: sha512-SUwdGfqdKOwxCPeVYjwSyRpJ7Z+fhpwIAtmCUdZIWZ/YP5R9WAsyuSgpLVDi9bjWoN2LXHNss/dk3urXtdQxGg==} + deep-eql@5.0.2: + resolution: {integrity: sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==} engines: {node: '>=6'} deep-equal@2.2.3: @@ -2324,6 +2656,9 @@ packages: dom-accessibility-api@0.5.16: resolution: {integrity: sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg==} + dom-accessibility-api@0.6.3: + resolution: {integrity: sha512-7ZgogeTnjuHbo+ct10G9Ffp0mif17idi0IyWNVA/wcwcm7NPOD/WEHVP3n7n3MhXqxoIYm8d6MuZohYWIZ4T3w==} + domexception@4.0.0: resolution: {integrity: sha512-A2is4PLG+eeSfoTMA95/s4pvAoSo2mKtiM5jlHkAVewmiO8ISFTFKZjH7UAM1Atli/OT/7JHOrJRJiMKUZKYBw==} engines: {node: '>=12'} @@ -2358,6 +2693,9 @@ packages: electron-to-chromium@1.5.9: resolution: {integrity: sha512-HfkT8ndXR0SEkU8gBQQM3rz035bpE/hxkZ1YIt4KJPEFES68HfIU6LzKukH0H794Lm83WJtkSAMfEToxCs15VA==} + electron-to-chromium@1.5.90: + resolution: {integrity: sha512-C3PN4aydfW91Natdyd449Kw+BzhLmof6tzy5W1pFC5SpQxVXT+oyiyOG9AgYYSN9OdA/ik3YkCrpwqI8ug5Tug==} + emoji-regex@8.0.0: resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} @@ -2416,6 +2754,9 @@ packages: resolution: {integrity: sha512-zoMwbCcH5hwUkKJkT8kDIBZSz9I6mVG//+lDCinLCGov4+r7NIy0ld8o03M0cJxl2spVf6ESYVS6/gpIfq1FFw==} engines: {node: '>= 0.4'} + es-module-lexer@1.6.0: + resolution: {integrity: sha512-qqnD1yMU6tk/jnaMosogGySTZP8YtUgAffA9nMN+E/rjxcfRQ6IEk7IiozUjgxKoFHBGjTLnrHB/YC45r/59EQ==} + es-object-atoms@1.0.0: resolution: {integrity: sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==} engines: {node: '>= 0.4'} @@ -2439,10 +2780,19 @@ packages: engines: {node: '>=12'} hasBin: true + esbuild@0.24.2: + resolution: {integrity: sha512-+9egpBW8I3CD5XPe0n6BfT5fxLzxrlDzqydF3aviG+9ni1lDC/OvMHcxqEFV0+LANZG5R1bFMWfUrjVsdwxJvA==} + engines: {node: '>=18'} + hasBin: true + escalade@3.1.2: resolution: {integrity: sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==} engines: {node: '>=6'} + escalade@3.2.0: + resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==} + engines: {node: '>=6'} + escape-html@1.0.3: resolution: {integrity: sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==} @@ -2517,6 +2867,12 @@ packages: peerDependencies: eslint: ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 + eslint-plugin-react-hooks@5.1.0: + resolution: {integrity: sha512-mpJRtPgHN2tNAvZ35AMfqeB3Xqeo273QxrHJsbBEPWODRM4r0yB6jfoROqKEYrOn27UtRPpcpHc2UqyBSuUNTw==} + engines: {node: '>=10'} + peerDependencies: + eslint: ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 || ^9.0.0 + eslint-plugin-react@7.35.0: resolution: {integrity: sha512-v501SSMOWv8gerHkk+IIQBkcGRGrO2nfybfj5pLxuJNFTPxxA3PSryhXTK+9pNbtkggheDdsC0E9Q8CuPk6JKA==} engines: {node: '>=4'} @@ -2594,6 +2950,10 @@ packages: resolution: {integrity: sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==} engines: {node: '>=16.17'} + expect-type@1.1.0: + resolution: {integrity: sha512-bFi65yM+xZgk+u/KRIpekdSYkTB5W1pEf0Lt8Q8Msh7b+eQ7LXVtIB1Bkm4fvclDEL1b2CZkMhv2mOeF8tMdkA==} + engines: {node: '>=12.0.0'} + exponential-backoff@3.1.1: resolution: {integrity: sha512-dX7e/LHVJ6W3DE1MHWi9S1EYzDESENfLrYohG2G++ovZrYOkm4Knwa0mc1cn84xJOR4KEU0WSchhLbd0UklbHw==} @@ -2624,6 +2984,14 @@ packages: fastq@1.17.1: resolution: {integrity: sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==} + fdir@6.4.3: + resolution: {integrity: sha512-PMXmW2y1hDDfTSRc9gaXIuCCRpuoz3Kaz8cUelp3smouvfT632ozg2vrT6lJsHKKOF59YLbOGfAWGUcKEfRMQw==} + peerDependencies: + picomatch: ^3 || ^4 + peerDependenciesMeta: + picomatch: + optional: true + fflate@0.8.2: resolution: {integrity: sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==} @@ -2669,6 +3037,9 @@ packages: flatted@3.3.1: resolution: {integrity: sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==} + flatted@3.3.2: + resolution: {integrity: sha512-AiwGJM8YcNOaobumgtng+6NHuOqC3A7MixFeDafM3X9cIUM+xUXoS5Vfgf+OihAYe20fxqNM9yPBXJzRtZ/4eA==} + follow-redirects@1.15.6: resolution: {integrity: sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==} engines: {node: '>=4.0'} @@ -2760,9 +3131,6 @@ packages: resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} engines: {node: 6.* || 8.* || >= 10.*} - get-func-name@2.0.2: - resolution: {integrity: sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ==} - get-intrinsic@1.2.4: resolution: {integrity: sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==} engines: {node: '>= 0.4'} @@ -3304,9 +3672,6 @@ packages: js-tokens@4.0.0: resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} - js-tokens@9.0.0: - resolution: {integrity: sha512-WriZw1luRMlmV3LGJaR6QOJjWwgLUTf89OwT2lUOyjX2dJGBwgmIkbcz+7WFZjrZM635JOIR517++e/67CP9dQ==} - js-yaml@3.14.1: resolution: {integrity: sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==} hasBin: true @@ -3327,9 +3692,9 @@ packages: canvas: optional: true - jsesc@2.5.2: - resolution: {integrity: sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==} - engines: {node: '>=4'} + jsesc@3.1.0: + resolution: {integrity: sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==} + engines: {node: '>=6'} hasBin: true json-buffer@3.0.1: @@ -3453,10 +3818,6 @@ packages: resolution: {integrity: sha512-gUD/epcRms75Cw8RT1pUdHugZYM5ce64ucs2GEISABwkRsOQr0q2wm/MV2TKThycIe5e0ytRweW2RZxclogCdQ==} engines: {node: '>=8'} - local-pkg@0.5.0: - resolution: {integrity: sha512-ok6z3qlYyCDS4ZEU27HaU6x/xZa9Whf8jD4ptH5UZTQYZVYeb9bnZ3ojVhiJNLiXK1Hfc0GNbLXcmZ5plLDDBg==} - engines: {node: '>=14'} - locate-character@3.0.0: resolution: {integrity: sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA==} @@ -3474,9 +3835,11 @@ packages: lodash.get@4.4.2: resolution: {integrity: sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ==} + deprecated: This package is deprecated. Use the optional chaining (?.) operator instead. lodash.isequal@4.5.0: resolution: {integrity: sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==} + deprecated: This package is deprecated. Use require('node:util').isDeepStrictEqual instead. lodash.ismatch@4.4.0: resolution: {integrity: sha512-fPMfXjGQEV9Xsq/8MTSgUf255gawYRbjwMyDbcvDhXgV7enSZA0hynz6vMPnpAb5iONEzBHBPsT+0zes5Z301g==} @@ -3495,8 +3858,8 @@ packages: resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} hasBin: true - loupe@2.3.7: - resolution: {integrity: sha512-zSMINGVYkdpYSOBmLi0D1Uo7JU9nVdQKrHxC8eYlV+9YKK9WePqAlL7lSlorG/U2Fw1w0hTBmaa/jrQ3UbPHtA==} + loupe@3.1.3: + resolution: {integrity: sha512-kkIp7XSkP78ZxJEsSxW3712C6teJVoeHHwgo9zJ380de7IYyJ2ISlxojcH2pC5OFLewESmnRi/+XCDIEEVyoug==} lowercase-keys@3.0.0: resolution: {integrity: sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ==} @@ -3525,8 +3888,11 @@ packages: magic-string@0.30.11: resolution: {integrity: sha512-+Wri9p0QHMy+545hKww7YAu5NyzF8iomPL/RQazugQ9+Ez4Ic3mERMd8ZTX5rfK944j+560ZJi8iAwgak1Ac7A==} - magicast@0.3.4: - resolution: {integrity: sha512-TyDF/Pn36bBji9rWKHlZe+PZb6Mx5V8IHCSxk7X4aljM4e/vyDvZZYwHewdVaqiA0nb3ghfHU/6AUpDxWoER2Q==} + magic-string@0.30.17: + resolution: {integrity: sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==} + + magicast@0.3.5: + resolution: {integrity: sha512-L0WhttDl+2BOsybvEOLK7fW3UA0OQ0IQ2d6Zl2x/a6vVRs3bAY0ECOSHHeL5jD+SbOpOCUEi0y1DgHEn9Qn1AQ==} make-dir@2.1.0: resolution: {integrity: sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==} @@ -3707,9 +4073,6 @@ packages: engines: {node: '>=10'} hasBin: true - mlly@1.7.1: - resolution: {integrity: sha512-rrVRZRELyQzrIUAVMHxP97kv+G786pHmOKzuFII8zDYahFBS7qnHh2AlYSl1GAHhaMPCz6/oHjVMcfFYgFYHgA==} - modify-values@1.0.1: resolution: {integrity: sha512-xV2bxeN6F7oYjZWTe/YPAy6MN2M+sL4u/Rlm2AHCIVGfo2p1yGmBHQ6vHehl4bRTZBdHu3TSkWdYgkwpYzAGSw==} engines: {node: '>=0.10.0'} @@ -3753,6 +4116,11 @@ packages: engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} hasBin: true + nanoid@3.3.8: + resolution: {integrity: sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==} + engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} + hasBin: true + natural-compare@1.4.0: resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} @@ -3790,6 +4158,9 @@ packages: node-releases@2.0.18: resolution: {integrity: sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==} + node-releases@2.0.19: + resolution: {integrity: sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==} + nodemon@2.0.22: resolution: {integrity: sha512-B8YqaKMmyuCO7BowF1Z1/mkPqLk6cs/l63Ojtd6otKjMx47Dq1utxfRxcavH1I7VSaL8n5BUaoutadnsX3AAVQ==} engines: {node: '>=8.10.0'} @@ -3986,10 +4357,6 @@ packages: resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} engines: {node: '>=10'} - p-limit@5.0.0: - resolution: {integrity: sha512-/Eaoq+QyLSiXQ4lyYV23f14mZRQcXnxfHrN0vCai+ak9G0pp9iEQukIIZq5NccEvwRB8PUnZT0KsOoDCINS1qQ==} - engines: {node: '>=18'} - p-locate@2.0.0: resolution: {integrity: sha512-nQja7m7gSKuewoVRen45CtVfODR3crN3goVQ0DDZ9N3yHxgpkuBhZqsaiotSQRrADUrne346peY7kT3TSACykg==} engines: {node: '>=4'} @@ -4129,11 +4496,12 @@ packages: resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} engines: {node: '>=8'} - pathe@1.1.2: - resolution: {integrity: sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==} + pathe@2.0.2: + resolution: {integrity: sha512-15Ztpk+nov8DR524R4BF7uEuzESgzUEAV4Ah7CUMNGXdE5ELuvxElxGXndBl32vMSsWa1jpNf22Z+Er3sKwq+w==} - pathval@1.1.1: - resolution: {integrity: sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==} + pathval@2.0.0: + resolution: {integrity: sha512-vE7JKRyES09KiunauX7nd2Q9/L7lhok4smP9RZTDeD4MVs72Dp2qNFVz39Nz5a0FVEW0BJR6C0DYrq6unoziZA==} + engines: {node: '>= 14.16'} periscopic@3.1.0: resolution: {integrity: sha512-vKiQ8RRtkl9P+r/+oefh25C3fhybptkHKCZSPlcXiJux2tJF55GnEj3BVn4A5gKfq9NWWXXrxkHBwVPUfH0opw==} @@ -4141,10 +4509,17 @@ packages: picocolors@1.0.1: resolution: {integrity: sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==} + picocolors@1.1.1: + resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} + picomatch@2.3.1: resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} engines: {node: '>=8.6'} + picomatch@4.0.2: + resolution: {integrity: sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==} + engines: {node: '>=12'} + pidtree@0.3.1: resolution: {integrity: sha512-qQbW94hLHEqCg7nhby4yRC7G2+jYHY4Rguc2bjw7Uug4GIJuu1tvf2uHaZv5Q8zdt+WKJ6qK1FOI6amaWUo5FA==} engines: {node: '>=0.10'} @@ -4174,9 +4549,6 @@ packages: resolution: {integrity: sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==} engines: {node: '>=8'} - pkg-types@1.1.3: - resolution: {integrity: sha512-+JrgthZG6m3ckicaOB74TwQ+tBWsFl3qVQg7mN8ulwSOElJ7gBhKzj2VkCPnZ4NlF6kEquYU+RIYNVAvzd54UA==} - portfinder@1.0.32: resolution: {integrity: sha512-on2ZJVVDXRADWE6jnQaX0ioEylzgBpQk8r55NE4wjXW1ZxO+BgDlY6DXwj20i0V8eB4SenDQ00WEaxfiIQPcxg==} engines: {node: '>= 0.12.0'} @@ -4226,6 +4598,10 @@ packages: resolution: {integrity: sha512-TesUflQ0WKZqAvg52PWL6kHgLKP6xB6heTOdoYM0Wt2UHyxNa4K25EZZMgKns3BH1RLVbZCREPpLY0rhnNoHVQ==} engines: {node: ^10 || ^12 || >=14} + postcss@8.5.1: + resolution: {integrity: sha512-6oz2beyjc5VMn/KV1pPw8fliQkhBXrVn1Z3TVyqZxU8kZpzEKhBdmCFqI6ZbmGtamQvQGuU1sgPTk8ZrXDD7jQ==} + engines: {node: ^10 || ^12 || >=14} + prelude-ls@1.2.1: resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} engines: {node: '>= 0.8.0'} @@ -4254,6 +4630,10 @@ packages: resolution: {integrity: sha512-69agxLtnI8xBs9gUGqEnK26UfiexpHy+KUpBQWabiytQjnn5wFY8rklAi7GRfABIuPNnQ/ik48+LGLkYYJcy4A==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + prom-client@15.1.3: + resolution: {integrity: sha512-6ZiOBfCywsD4k1BN9IX0uZhF+tJkV8q8llP64G5Hajs4JOeVLPCwpPVcpXy3BwYiUGgyJzsJJQeOIv7+hDSq8g==} + engines: {node: ^16 || ^18 || >=20} + promise-all-reject-late@1.0.1: resolution: {integrity: sha512-vuf0Lf0lOxyQREH7GDIOUMLS7kz+gs8i6B+Yi8dC68a2sychGrHTJYghMBD6k7eUcH0H5P73EckCA48xijWqXw==} @@ -4344,6 +4724,11 @@ packages: peerDependencies: react: ^18.3.1 + react-error-boundary@5.0.0: + resolution: {integrity: sha512-tnjAxG+IkpLephNcePNA7v6F/QpWLH8He65+DmedchDwg162JZqx4NmbXj0mlAYVVEd81OW7aFhmbsScYfiAFQ==} + peerDependencies: + react: '>=16.13.1' + react-is@16.13.1: resolution: {integrity: sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==} @@ -4526,6 +4911,11 @@ packages: engines: {node: '>=18.0.0', npm: '>=8.0.0'} hasBin: true + rollup@4.34.0: + resolution: {integrity: sha512-+4C/cgJ9w6sudisA0nZz0+O7lTP9a3CzNLsoDwaRumM8QHwghUsu6tqHXiTmNUp/rqNiM14++7dkzHDyCRs0Jg==} + engines: {node: '>=18.0.0', npm: '>=8.0.0'} + hasBin: true + rrweb-cssom@0.6.0: resolution: {integrity: sha512-APM0Gt1KoXBz0iIkkdB/kfvGOwC4UuJFeG/c+yV7wSc7q96cG/kJ0HiYCnzivD9SB53cLV1MlHFNfOuPaadYSw==} @@ -4668,9 +5058,9 @@ packages: resolution: {integrity: sha512-VpsrsJSUcJEseSbMHkrsrAVSdvVS5I96Qo1QAQ4FxQ9wXFcB+pjj7FB7/us9+GcgfW4ziHtYMc1J0PLczb55mg==} engines: {node: '>=8.10.0'} - sirv@2.0.4: - resolution: {integrity: sha512-94Bdh3cC2PKrbgSOUqTiGPWVZeSiXfKOVZNJniWoqrWrRkB1CJzBU3NEbiTsPcYy1lDsANA/THzS+9WBiy5nfQ==} - engines: {node: '>= 10'} + sirv@3.0.0: + resolution: {integrity: sha512-BPwJGUeDaDCHihkORDchNyyTvWFhcusy1XMmhEVTQTwGeybFbp8YEmB+njbPnth1FibULBSBVwCQni25XlCUDg==} + engines: {node: '>=18'} slash@3.0.0: resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} @@ -4700,6 +5090,10 @@ packages: resolution: {integrity: sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==} engines: {node: '>=0.10.0'} + source-map-js@1.2.1: + resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==} + engines: {node: '>=0.10.0'} + source-map@0.6.1: resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} engines: {node: '>=0.10.0'} @@ -4746,8 +5140,8 @@ packages: resolution: {integrity: sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==} engines: {node: '>= 0.8'} - std-env@3.7.0: - resolution: {integrity: sha512-JPbdCEQLj1w5GilpiHAx3qJvFndqybBysA3qUOnznweH4QbNYUsW/ea8QzSrnh0vNsezMMw5bcVool8lM0gwzg==} + std-env@3.8.0: + resolution: {integrity: sha512-Bc3YwwCB+OzldMxOXJIIvC6cPRWr/LxOp48CdQTOkPyk/t4JWWJbrilwBd7RJzKV8QW7tJkcgAmeuLLJugl5/w==} stop-iteration-iterator@1.0.0: resolution: {integrity: sha512-iCGQj+0l0HOdZ2AEeBADlsRC+vsnDsZsbdSiH1yNSjcfKM7fdpCMfqAL/dwF5BLiw/XhRft/Wax6zQbhq2BcjQ==} @@ -4836,9 +5230,6 @@ packages: resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} engines: {node: '>=8'} - strip-literal@2.1.0: - resolution: {integrity: sha512-Op+UycaUt/8FbN/Z2TWPBLge3jWrP3xj10f3fnYxf052bKuS3EKs1ZQcVGjnEMdsNVAM+plXRdmjrZ/KgG3Skw==} - strong-log-transformer@2.1.0: resolution: {integrity: sha512-B3Hgul+z0L9a236FAUC9iZsL+nVHgoCJnqCbN588DjYxvGXaXaaFbfmQ/JhvKjZwsOukuR72XbHv71Qkug0HxA==} engines: {node: '>=4'} @@ -4934,6 +5325,9 @@ packages: resolution: {integrity: sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==} engines: {node: '>=10'} + tdigest@0.1.2: + resolution: {integrity: sha512-+G0LLgjjo9BZX2MfdvPfH+MKLCrxlXSYec5DaPYP1fe6Iyhf0/fSmJ0bFiZ1F8BT6cGXl2LpltQptzjXKWEkKA==} + temp-dir@1.0.0: resolution: {integrity: sha512-xZFXEGbG7SNC3itwBzI3RYjq/cEhBkx2hJuKGIUOcEULmkQExXiHat2z/qkISYsuR+IKumhEfKKbV5qXmhICFQ==} engines: {node: '>=4'} @@ -4942,6 +5336,10 @@ packages: resolution: {integrity: sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==} engines: {node: '>=8'} + test-exclude@7.0.1: + resolution: {integrity: sha512-pFYqmTw68LXVjeWJMST4+borgQP2AyMNbg1BpZh9LbyhUeNkeaPF9gzfPGUAnSMV3qPYdWUwDIjjCLiSDOl7vg==} + engines: {node: '>=18'} + text-extensions@1.9.0: resolution: {integrity: sha512-wiBrwC1EhBelW12Zy26JeOUkQ5mRu+5o8rpsJk5+2t+Y5vE7e842qtZDQ2g1NpX/29HdyFeJ4nSIhI47ENSxlQ==} engines: {node: '>=0.10'} @@ -4971,12 +5369,23 @@ packages: tinybench@2.9.0: resolution: {integrity: sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==} - tinypool@0.8.4: - resolution: {integrity: sha512-i11VH5gS6IFeLY3gMBQ00/MmLncVP7JLXOw1vlgkytLmJK7QnEr7NXf0LBdxfmNPAeyetukOk0bOYrJrFGjYJQ==} + tinyexec@0.3.2: + resolution: {integrity: sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==} + + tinyglobby@0.2.10: + resolution: {integrity: sha512-Zc+8eJlFMvgatPZTl6A9L/yht8QqdmUNtURHaKZLmKBE12hNPSrqNkUp2cs3M/UKmNVVAMFQYSjYIVHDjW5zew==} + engines: {node: '>=12.0.0'} + + tinypool@1.0.2: + resolution: {integrity: sha512-al6n+QEANGFOMf/dmUMsuS5/r9B06uwlyNjZZql/zv8J7ybHCgoihBNORZCY2mzUuAnomQa2JdhyHKzZxPCrFA==} + engines: {node: ^18.0.0 || >=20.0.0} + + tinyrainbow@2.0.0: + resolution: {integrity: sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==} engines: {node: '>=14.0.0'} - tinyspy@2.2.1: - resolution: {integrity: sha512-KYad6Vy5VDWV4GH3fjpseMQ/XU2BhIYP7Vzd0LG44qRWm/Yt2WCOTicFdvmgo6gWaqooMQCawTtILVQJupKu7A==} + tinyspy@3.0.2: + resolution: {integrity: sha512-n1cw8k1k0x4pgA2+9XrOkFydTerNcJ1zWCO5Nn9scWHTD+5tp8dghT2x1uduQePZTZgd3Tupf+x9BxJjeJi77Q==} engines: {node: '>=14.0.0'} tmp@0.0.33: @@ -5071,10 +5480,6 @@ packages: resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==} engines: {node: '>= 0.8.0'} - type-detect@4.1.0: - resolution: {integrity: sha512-Acylog8/luQ8L7il+geoSxhEkazvkslg7PSNKOX59mbB9cOveP5aq9h74Y7YU8yDpJwetzQQrfIwtf4Wp4LKcw==} - engines: {node: '>=4'} - type-fest@0.18.1: resolution: {integrity: sha512-OIAYXk8+ISY+qTOwkHtKqzAuxchoMiD9Udx+FSGQDuiRR+PJKJHc2NJAXlbhkGwTt/4/nKZxELY1w3ReWOL8mw==} engines: {node: '>=10'} @@ -5139,9 +5544,6 @@ packages: engines: {node: '>=14.17'} hasBin: true - ufo@1.5.4: - resolution: {integrity: sha512-UsUk3byDzKd04EyoZ7U4DOlxQaD14JUKQl6/P7wiX4FNvUfm3XL246n9W5AmqwW5RSFJ27NAuM0iLscAOYUiGQ==} - uglify-js@3.19.2: resolution: {integrity: sha512-S8KA6DDI47nQXJSi2ctQ629YzwOVs+bQML6DAtvy0wgNdpi+0ySpQK0g2pxBq2xfF2z3YCscu7NNA8nXT9PlIQ==} engines: {node: '>=0.8.0'} @@ -5197,6 +5599,12 @@ packages: peerDependencies: browserslist: '>= 4.21.0' + update-browserslist-db@1.1.2: + resolution: {integrity: sha512-PPypAm5qvlD7XMZC3BujecnaOxwhrtoFR+Dqkk5Aa/6DssiH0ibKoketaj9w8LP7Bont1rYeoV5plxD7RTEPRg==} + hasBin: true + peerDependencies: + browserslist: '>= 4.21.0' + uri-js@4.4.1: resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} @@ -5246,9 +5654,9 @@ packages: resolution: {integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==} engines: {node: '>= 0.8'} - vite-node@1.6.0: - resolution: {integrity: sha512-de6HJgzC+TFzOu0NTC4RAIsyf/DY/ibWDYQUcuEA84EMHhcefTUGkjFHKKEJhQN4A+6I0u++kr3l36ZF2d7XRw==} - engines: {node: ^18.0.0 || >=20.0.0} + vite-node@3.0.4: + resolution: {integrity: sha512-7JZKEzcYV2Nx3u6rlvN8qdo3QV7Fxyt6hx+CCKz9fbWxdX5IvUOmTWEAxMrWxaiSf7CKGLJQ5rFu8prb/jBjOA==} + engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} hasBin: true vite-plugin-dts@3.9.1: @@ -5261,10 +5669,10 @@ packages: vite: optional: true - vite-plugin-wasm@3.3.0: - resolution: {integrity: sha512-tVhz6w+W9MVsOCHzxo6SSMSswCeIw4HTrXEi6qL3IRzATl83jl09JVO1djBqPSwfjgnpVHNLYcaMbaDX5WB/pg==} + vite-plugin-wasm@3.4.1: + resolution: {integrity: sha512-ja3nSo2UCkVeitltJGkS3pfQHAanHv/DqGatdI39ja6McgABlpsZ5hVgl6wuR8Qx5etY3T5qgDQhOWzc5RReZA==} peerDependencies: - vite: ^2 || ^3 || ^4 || ^5 + vite: ^2 || ^3 || ^4 || ^5 || ^6 vite@5.4.1: resolution: {integrity: sha512-1oE6yuNXssjrZdblI9AfBbHCC41nnyoVoEZxQnID6yvQZAFBzxxkqoFLtHUMkYunL8hwOLEjgTuxpkRxvba3kA==} @@ -5297,6 +5705,46 @@ packages: terser: optional: true + vite@6.0.11: + resolution: {integrity: sha512-4VL9mQPKoHy4+FE0NnRE/kbY51TOfaknxAjt3fJbGJxhIpBZiqVzlZDEesWWsuREXHwNdAoOFZ9MkPEVXczHwg==} + engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} + hasBin: true + peerDependencies: + '@types/node': ^18.0.0 || ^20.0.0 || >=22.0.0 + jiti: '>=1.21.0' + less: '*' + lightningcss: ^1.21.0 + sass: '*' + sass-embedded: '*' + stylus: '*' + sugarss: '*' + terser: ^5.16.0 + tsx: ^4.8.1 + yaml: ^2.4.2 + peerDependenciesMeta: + '@types/node': + optional: true + jiti: + optional: true + less: + optional: true + lightningcss: + optional: true + sass: + optional: true + sass-embedded: + optional: true + stylus: + optional: true + sugarss: + optional: true + terser: + optional: true + tsx: + optional: true + yaml: + optional: true + vitefu@0.2.5: resolution: {integrity: sha512-SgHtMLoqaeeGnd2evZ849ZbACbnwQCIwRH57t18FxcXoZop0uQu0uzlIhJBlF/eWVzuce0sHeqPcDo+evVcg8Q==} peerDependencies: @@ -5305,20 +5753,23 @@ packages: vite: optional: true - vitest@1.6.0: - resolution: {integrity: sha512-H5r/dN06swuFnzNFhq/dnz37bPXnq8xB2xB5JOVk8K09rUtoeNN+LHWkoQ0A/i3hvbUKKcCei9KpbxqHMLhLLA==} - engines: {node: ^18.0.0 || >=20.0.0} + vitest@3.0.4: + resolution: {integrity: sha512-6XG8oTKy2gnJIFTHP6LD7ExFeNLxiTkK3CfMvT7IfR8IN+BYICCf0lXUQmX7i7JoxUP8QmeP4mTnWXgflu4yjw==} + engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} hasBin: true peerDependencies: '@edge-runtime/vm': '*' - '@types/node': ^18.0.0 || >=20.0.0 - '@vitest/browser': 1.6.0 - '@vitest/ui': 1.6.0 + '@types/debug': ^4.1.12 + '@types/node': ^18.0.0 || ^20.0.0 || >=22.0.0 + '@vitest/browser': 3.0.4 + '@vitest/ui': 3.0.4 happy-dom: '*' jsdom: '*' peerDependenciesMeta: '@edge-runtime/vm': optional: true + '@types/debug': + optional: true '@types/node': optional: true '@vitest/browser': @@ -5519,10 +5970,6 @@ packages: resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} engines: {node: '>=10'} - yocto-queue@1.1.1: - resolution: {integrity: sha512-b4JR1PFR10y1mKjhHY9LaGo6tmrgjit7hxVIeAmyMw3jegXR4dhYqLaQF5zMXZxY7tLpMyJeLjr1C4rLmkVe8g==} - engines: {node: '>=12.20'} - z-schema@5.0.5: resolution: {integrity: sha512-D7eujBWkLa3p2sIpJA0d1pr7es+a7m0vFAnZLlCEKq/Ij2k0MLi9Br2UPxoxdYystm5K1yeBGzub0FlYUEWj2Q==} engines: {node: '>=8.0.0'} @@ -5530,6 +5977,8 @@ packages: snapshots: + '@adobe/css-tools@4.4.1': {} + '@alloc/quick-lru@5.2.0': {} '@ampproject/remapping@2.3.0': @@ -5546,20 +5995,26 @@ snapshots: '@babel/highlight': 7.24.7 picocolors: 1.0.1 - '@babel/compat-data@7.25.2': {} + '@babel/code-frame@7.26.2': + dependencies: + '@babel/helper-validator-identifier': 7.25.9 + js-tokens: 4.0.0 + picocolors: 1.0.1 + + '@babel/compat-data@7.26.5': {} - '@babel/core@7.25.2': + '@babel/core@7.26.7': dependencies: '@ampproject/remapping': 2.3.0 - '@babel/code-frame': 7.24.7 - '@babel/generator': 7.25.0 - '@babel/helper-compilation-targets': 7.25.2 - '@babel/helper-module-transforms': 7.25.2(@babel/core@7.25.2) - '@babel/helpers': 7.25.0 - '@babel/parser': 7.25.3 - '@babel/template': 7.25.0 - '@babel/traverse': 7.25.3 - '@babel/types': 7.25.2 + '@babel/code-frame': 7.26.2 + '@babel/generator': 7.26.5 + '@babel/helper-compilation-targets': 7.26.5 + '@babel/helper-module-transforms': 7.26.0(@babel/core@7.26.7) + '@babel/helpers': 7.26.7 + '@babel/parser': 7.26.7 + '@babel/template': 7.25.9 + '@babel/traverse': 7.26.7 + '@babel/types': 7.26.7 convert-source-map: 2.0.0 debug: 4.3.6 gensync: 1.0.0-beta.2 @@ -5568,57 +6023,54 @@ snapshots: transitivePeerDependencies: - supports-color - '@babel/generator@7.25.0': + '@babel/generator@7.26.5': dependencies: - '@babel/types': 7.25.2 + '@babel/parser': 7.26.7 + '@babel/types': 7.26.7 '@jridgewell/gen-mapping': 0.3.5 '@jridgewell/trace-mapping': 0.3.25 - jsesc: 2.5.2 + jsesc: 3.1.0 - '@babel/helper-compilation-targets@7.25.2': + '@babel/helper-compilation-targets@7.26.5': dependencies: - '@babel/compat-data': 7.25.2 - '@babel/helper-validator-option': 7.24.8 - browserslist: 4.23.3 + '@babel/compat-data': 7.26.5 + '@babel/helper-validator-option': 7.25.9 + browserslist: 4.24.4 lru-cache: 5.1.1 semver: 6.3.1 - '@babel/helper-module-imports@7.24.7': + '@babel/helper-module-imports@7.25.9': dependencies: - '@babel/traverse': 7.25.3 - '@babel/types': 7.25.2 + '@babel/traverse': 7.26.7 + '@babel/types': 7.26.7 transitivePeerDependencies: - supports-color - '@babel/helper-module-transforms@7.25.2(@babel/core@7.25.2)': + '@babel/helper-module-transforms@7.26.0(@babel/core@7.26.7)': dependencies: - '@babel/core': 7.25.2 - '@babel/helper-module-imports': 7.24.7 - '@babel/helper-simple-access': 7.24.7 - '@babel/helper-validator-identifier': 7.24.7 - '@babel/traverse': 7.25.3 + '@babel/core': 7.26.7 + '@babel/helper-module-imports': 7.25.9 + '@babel/helper-validator-identifier': 7.25.9 + '@babel/traverse': 7.26.7 transitivePeerDependencies: - supports-color - '@babel/helper-plugin-utils@7.24.8': {} - - '@babel/helper-simple-access@7.24.7': - dependencies: - '@babel/traverse': 7.25.3 - '@babel/types': 7.25.2 - transitivePeerDependencies: - - supports-color + '@babel/helper-plugin-utils@7.26.5': {} '@babel/helper-string-parser@7.24.8': {} + '@babel/helper-string-parser@7.25.9': {} + '@babel/helper-validator-identifier@7.24.7': {} - '@babel/helper-validator-option@7.24.8': {} + '@babel/helper-validator-identifier@7.25.9': {} - '@babel/helpers@7.25.0': + '@babel/helper-validator-option@7.25.9': {} + + '@babel/helpers@7.26.7': dependencies: - '@babel/template': 7.25.0 - '@babel/types': 7.25.2 + '@babel/template': 7.25.9 + '@babel/types': 7.26.7 '@babel/highlight@7.24.7': dependencies: @@ -5631,33 +6083,37 @@ snapshots: dependencies: '@babel/types': 7.25.2 - '@babel/plugin-transform-react-jsx-self@7.24.7(@babel/core@7.25.2)': + '@babel/parser@7.26.7': + dependencies: + '@babel/types': 7.26.7 + + '@babel/plugin-transform-react-jsx-self@7.25.9(@babel/core@7.26.7)': dependencies: - '@babel/core': 7.25.2 - '@babel/helper-plugin-utils': 7.24.8 + '@babel/core': 7.26.7 + '@babel/helper-plugin-utils': 7.26.5 - '@babel/plugin-transform-react-jsx-source@7.24.7(@babel/core@7.25.2)': + '@babel/plugin-transform-react-jsx-source@7.25.9(@babel/core@7.26.7)': dependencies: - '@babel/core': 7.25.2 - '@babel/helper-plugin-utils': 7.24.8 + '@babel/core': 7.26.7 + '@babel/helper-plugin-utils': 7.26.5 '@babel/runtime@7.25.0': dependencies: regenerator-runtime: 0.14.1 - '@babel/template@7.25.0': + '@babel/template@7.25.9': dependencies: - '@babel/code-frame': 7.24.7 - '@babel/parser': 7.25.3 - '@babel/types': 7.25.2 + '@babel/code-frame': 7.26.2 + '@babel/parser': 7.26.7 + '@babel/types': 7.26.7 - '@babel/traverse@7.25.3': + '@babel/traverse@7.26.7': dependencies: - '@babel/code-frame': 7.24.7 - '@babel/generator': 7.25.0 - '@babel/parser': 7.25.3 - '@babel/template': 7.25.0 - '@babel/types': 7.25.2 + '@babel/code-frame': 7.26.2 + '@babel/generator': 7.26.5 + '@babel/parser': 7.26.7 + '@babel/template': 7.25.9 + '@babel/types': 7.26.7 debug: 4.3.6 globals: 11.12.0 transitivePeerDependencies: @@ -5669,8 +6125,15 @@ snapshots: '@babel/helper-validator-identifier': 7.24.7 to-fast-properties: 2.0.0 + '@babel/types@7.26.7': + dependencies: + '@babel/helper-string-parser': 7.25.9 + '@babel/helper-validator-identifier': 7.25.9 + '@bcoe/v8-coverage@0.2.3': {} + '@bcoe/v8-coverage@1.0.2': {} + '@cbor-extract/cbor-extract-darwin-arm64@2.2.0': optional: true @@ -5709,75 +6172,150 @@ snapshots: '@esbuild/aix-ppc64@0.21.5': optional: true + '@esbuild/aix-ppc64@0.24.2': + optional: true + '@esbuild/android-arm64@0.21.5': optional: true + '@esbuild/android-arm64@0.24.2': + optional: true + '@esbuild/android-arm@0.21.5': optional: true + '@esbuild/android-arm@0.24.2': + optional: true + '@esbuild/android-x64@0.21.5': optional: true + '@esbuild/android-x64@0.24.2': + optional: true + '@esbuild/darwin-arm64@0.21.5': optional: true + '@esbuild/darwin-arm64@0.24.2': + optional: true + '@esbuild/darwin-x64@0.21.5': optional: true + '@esbuild/darwin-x64@0.24.2': + optional: true + '@esbuild/freebsd-arm64@0.21.5': optional: true + '@esbuild/freebsd-arm64@0.24.2': + optional: true + '@esbuild/freebsd-x64@0.21.5': optional: true + '@esbuild/freebsd-x64@0.24.2': + optional: true + '@esbuild/linux-arm64@0.21.5': optional: true + '@esbuild/linux-arm64@0.24.2': + optional: true + '@esbuild/linux-arm@0.21.5': optional: true - '@esbuild/linux-ia32@0.21.5': + '@esbuild/linux-arm@0.24.2': + optional: true + + '@esbuild/linux-ia32@0.21.5': + optional: true + + '@esbuild/linux-ia32@0.24.2': + optional: true + + '@esbuild/linux-loong64@0.21.5': + optional: true + + '@esbuild/linux-loong64@0.24.2': + optional: true + + '@esbuild/linux-mips64el@0.21.5': + optional: true + + '@esbuild/linux-mips64el@0.24.2': + optional: true + + '@esbuild/linux-ppc64@0.21.5': + optional: true + + '@esbuild/linux-ppc64@0.24.2': + optional: true + + '@esbuild/linux-riscv64@0.21.5': + optional: true + + '@esbuild/linux-riscv64@0.24.2': + optional: true + + '@esbuild/linux-s390x@0.21.5': + optional: true + + '@esbuild/linux-s390x@0.24.2': optional: true - '@esbuild/linux-loong64@0.21.5': + '@esbuild/linux-x64@0.21.5': optional: true - '@esbuild/linux-mips64el@0.21.5': + '@esbuild/linux-x64@0.24.2': optional: true - '@esbuild/linux-ppc64@0.21.5': + '@esbuild/netbsd-arm64@0.24.2': optional: true - '@esbuild/linux-riscv64@0.21.5': + '@esbuild/netbsd-x64@0.21.5': optional: true - '@esbuild/linux-s390x@0.21.5': + '@esbuild/netbsd-x64@0.24.2': optional: true - '@esbuild/linux-x64@0.21.5': + '@esbuild/openbsd-arm64@0.24.2': optional: true - '@esbuild/netbsd-x64@0.21.5': + '@esbuild/openbsd-x64@0.21.5': optional: true - '@esbuild/openbsd-x64@0.21.5': + '@esbuild/openbsd-x64@0.24.2': optional: true '@esbuild/sunos-x64@0.21.5': optional: true + '@esbuild/sunos-x64@0.24.2': + optional: true + '@esbuild/win32-arm64@0.21.5': optional: true + '@esbuild/win32-arm64@0.24.2': + optional: true + '@esbuild/win32-ia32@0.21.5': optional: true + '@esbuild/win32-ia32@0.24.2': + optional: true + '@esbuild/win32-x64@0.21.5': optional: true - '@eslint-community/eslint-utils@4.4.0(eslint@9.9.0)': + '@esbuild/win32-x64@0.24.2': + optional: true + + '@eslint-community/eslint-utils@4.4.0(eslint@9.9.0(jiti@1.21.6))': dependencies: - eslint: 9.9.0 + eslint: 9.9.0(jiti@1.21.6) eslint-visitor-keys: 3.4.3 '@eslint-community/regexpp@4.11.0': {} @@ -5814,12 +6352,6 @@ snapshots: '@hutson/parse-repository-url@3.0.2': {} - '@ibm/plex@6.4.1': - dependencies: - '@ibm/telemetry-js': 1.6.1 - - '@ibm/telemetry-js@1.6.1': {} - '@isaacs/cliui@8.0.2': dependencies: string-width: 5.1.2 @@ -5859,14 +6391,14 @@ snapshots: '@jridgewell/resolve-uri': 3.1.2 '@jridgewell/sourcemap-codec': 1.5.0 - '@lerna/create@8.1.8(typescript@5.5.4)': + '@lerna/create@8.1.8(@swc/core@1.7.11)(encoding@0.1.13)(typescript@5.5.4)': dependencies: '@npmcli/arborist': 7.5.4 '@npmcli/package-json': 5.2.0 '@npmcli/run-script': 8.1.0 - '@nx/devkit': 19.6.0(nx@19.6.0) + '@nx/devkit': 19.6.0(nx@19.6.0(@swc/core@1.7.11)) '@octokit/plugin-enterprise-rest': 6.0.1 - '@octokit/rest': 19.0.11 + '@octokit/rest': 19.0.11(encoding@0.1.13) aproba: 2.0.0 byte-size: 8.1.1 chalk: 4.1.0 @@ -5899,11 +6431,11 @@ snapshots: make-dir: 4.0.0 minimatch: 3.0.5 multimatch: 5.0.0 - node-fetch: 2.6.7 + node-fetch: 2.6.7(encoding@0.1.13) npm-package-arg: 11.0.2 npm-packlist: 8.0.2 npm-registry-fetch: 17.1.0 - nx: 19.6.0 + nx: 19.6.0(@swc/core@1.7.11) p-map: 4.0.0 p-map-series: 2.1.0 p-queue: 6.6.2 @@ -6158,29 +6690,29 @@ snapshots: - bluebird - supports-color - '@nrwl/devkit@19.6.0(nx@19.6.0)': + '@nrwl/devkit@19.6.0(nx@19.6.0(@swc/core@1.7.11))': dependencies: - '@nx/devkit': 19.6.0(nx@19.6.0) + '@nx/devkit': 19.6.0(nx@19.6.0(@swc/core@1.7.11)) transitivePeerDependencies: - nx - '@nrwl/tao@19.6.0': + '@nrwl/tao@19.6.0(@swc/core@1.7.11)': dependencies: - nx: 19.6.0 + nx: 19.6.0(@swc/core@1.7.11) tslib: 2.6.3 transitivePeerDependencies: - '@swc-node/register' - '@swc/core' - debug - '@nx/devkit@19.6.0(nx@19.6.0)': + '@nx/devkit@19.6.0(nx@19.6.0(@swc/core@1.7.11))': dependencies: - '@nrwl/devkit': 19.6.0(nx@19.6.0) + '@nrwl/devkit': 19.6.0(nx@19.6.0(@swc/core@1.7.11)) ejs: 3.1.10 enquirer: 2.3.6 ignore: 5.3.2 minimatch: 9.0.3 - nx: 19.6.0 + nx: 19.6.0(@swc/core@1.7.11) semver: 7.6.3 tmp: 0.2.3 tslib: 2.6.3 @@ -6218,11 +6750,11 @@ snapshots: '@octokit/auth-token@3.0.4': {} - '@octokit/core@4.2.4': + '@octokit/core@4.2.4(encoding@0.1.13)': dependencies: '@octokit/auth-token': 3.0.4 - '@octokit/graphql': 5.0.6 - '@octokit/request': 6.2.8 + '@octokit/graphql': 5.0.6(encoding@0.1.13) + '@octokit/request': 6.2.8(encoding@0.1.13) '@octokit/request-error': 3.0.3 '@octokit/types': 9.3.2 before-after-hook: 2.2.3 @@ -6236,9 +6768,9 @@ snapshots: is-plain-object: 5.0.0 universal-user-agent: 6.0.1 - '@octokit/graphql@5.0.6': + '@octokit/graphql@5.0.6(encoding@0.1.13)': dependencies: - '@octokit/request': 6.2.8 + '@octokit/request': 6.2.8(encoding@0.1.13) '@octokit/types': 9.3.2 universal-user-agent: 6.0.1 transitivePeerDependencies: @@ -6248,19 +6780,19 @@ snapshots: '@octokit/plugin-enterprise-rest@6.0.1': {} - '@octokit/plugin-paginate-rest@6.1.2(@octokit/core@4.2.4)': + '@octokit/plugin-paginate-rest@6.1.2(@octokit/core@4.2.4(encoding@0.1.13))': dependencies: - '@octokit/core': 4.2.4 + '@octokit/core': 4.2.4(encoding@0.1.13) '@octokit/tsconfig': 1.0.2 '@octokit/types': 9.3.2 - '@octokit/plugin-request-log@1.0.4(@octokit/core@4.2.4)': + '@octokit/plugin-request-log@1.0.4(@octokit/core@4.2.4(encoding@0.1.13))': dependencies: - '@octokit/core': 4.2.4 + '@octokit/core': 4.2.4(encoding@0.1.13) - '@octokit/plugin-rest-endpoint-methods@7.2.3(@octokit/core@4.2.4)': + '@octokit/plugin-rest-endpoint-methods@7.2.3(@octokit/core@4.2.4(encoding@0.1.13))': dependencies: - '@octokit/core': 4.2.4 + '@octokit/core': 4.2.4(encoding@0.1.13) '@octokit/types': 10.0.0 '@octokit/request-error@3.0.3': @@ -6269,23 +6801,23 @@ snapshots: deprecation: 2.3.1 once: 1.4.0 - '@octokit/request@6.2.8': + '@octokit/request@6.2.8(encoding@0.1.13)': dependencies: '@octokit/endpoint': 7.0.6 '@octokit/request-error': 3.0.3 '@octokit/types': 9.3.2 is-plain-object: 5.0.0 - node-fetch: 2.6.7 + node-fetch: 2.6.7(encoding@0.1.13) universal-user-agent: 6.0.1 transitivePeerDependencies: - encoding - '@octokit/rest@19.0.11': + '@octokit/rest@19.0.11(encoding@0.1.13)': dependencies: - '@octokit/core': 4.2.4 - '@octokit/plugin-paginate-rest': 6.1.2(@octokit/core@4.2.4) - '@octokit/plugin-request-log': 1.0.4(@octokit/core@4.2.4) - '@octokit/plugin-rest-endpoint-methods': 7.2.3(@octokit/core@4.2.4) + '@octokit/core': 4.2.4(encoding@0.1.13) + '@octokit/plugin-paginate-rest': 6.1.2(@octokit/core@4.2.4(encoding@0.1.13)) + '@octokit/plugin-request-log': 1.0.4(@octokit/core@4.2.4(encoding@0.1.13)) + '@octokit/plugin-rest-endpoint-methods': 7.2.3(@octokit/core@4.2.4(encoding@0.1.13)) transitivePeerDependencies: - encoding @@ -6299,6 +6831,8 @@ snapshots: dependencies: '@octokit/openapi-types': 18.1.1 + '@opentelemetry/api@1.9.0': {} + '@pkgjs/parseargs@0.11.0': optional: true @@ -6316,69 +6850,129 @@ snapshots: '@polka/url@1.0.0-next.25': {} - '@rollup/pluginutils@5.1.0': + '@rollup/pluginutils@5.1.0(rollup@4.34.0)': dependencies: '@types/estree': 1.0.5 estree-walker: 2.0.2 picomatch: 2.3.1 + optionalDependencies: + rollup: 4.34.0 '@rollup/rollup-android-arm-eabi@4.20.0': optional: true + '@rollup/rollup-android-arm-eabi@4.34.0': + optional: true + '@rollup/rollup-android-arm64@4.20.0': optional: true + '@rollup/rollup-android-arm64@4.34.0': + optional: true + '@rollup/rollup-darwin-arm64@4.20.0': optional: true + '@rollup/rollup-darwin-arm64@4.34.0': + optional: true + '@rollup/rollup-darwin-x64@4.20.0': optional: true + '@rollup/rollup-darwin-x64@4.34.0': + optional: true + + '@rollup/rollup-freebsd-arm64@4.34.0': + optional: true + + '@rollup/rollup-freebsd-x64@4.34.0': + optional: true + '@rollup/rollup-linux-arm-gnueabihf@4.20.0': optional: true + '@rollup/rollup-linux-arm-gnueabihf@4.34.0': + optional: true + '@rollup/rollup-linux-arm-musleabihf@4.20.0': optional: true + '@rollup/rollup-linux-arm-musleabihf@4.34.0': + optional: true + '@rollup/rollup-linux-arm64-gnu@4.20.0': optional: true + '@rollup/rollup-linux-arm64-gnu@4.34.0': + optional: true + '@rollup/rollup-linux-arm64-musl@4.20.0': optional: true + '@rollup/rollup-linux-arm64-musl@4.34.0': + optional: true + + '@rollup/rollup-linux-loongarch64-gnu@4.34.0': + optional: true + '@rollup/rollup-linux-powerpc64le-gnu@4.20.0': optional: true + '@rollup/rollup-linux-powerpc64le-gnu@4.34.0': + optional: true + '@rollup/rollup-linux-riscv64-gnu@4.20.0': optional: true + '@rollup/rollup-linux-riscv64-gnu@4.34.0': + optional: true + '@rollup/rollup-linux-s390x-gnu@4.20.0': optional: true + '@rollup/rollup-linux-s390x-gnu@4.34.0': + optional: true + '@rollup/rollup-linux-x64-gnu@4.20.0': optional: true + '@rollup/rollup-linux-x64-gnu@4.34.0': + optional: true + '@rollup/rollup-linux-x64-musl@4.20.0': optional: true + '@rollup/rollup-linux-x64-musl@4.34.0': + optional: true + '@rollup/rollup-win32-arm64-msvc@4.20.0': optional: true + '@rollup/rollup-win32-arm64-msvc@4.34.0': + optional: true + '@rollup/rollup-win32-ia32-msvc@4.20.0': optional: true + '@rollup/rollup-win32-ia32-msvc@4.34.0': + optional: true + '@rollup/rollup-win32-x64-msvc@4.20.0': optional: true + '@rollup/rollup-win32-x64-msvc@4.34.0': + optional: true + '@rushstack/node-core-library@4.0.2(@types/node@20.14.15)': dependencies: - '@types/node': 20.14.15 fs-extra: 7.0.1 import-lazy: 4.0.0 jju: 1.4.0 resolve: 1.22.8 semver: 7.5.4 z-schema: 5.0.5 + optionalDependencies: + '@types/node': 20.14.15 '@rushstack/rig-package@0.5.2': dependencies: @@ -6388,8 +6982,9 @@ snapshots: '@rushstack/terminal@0.10.0(@types/node@20.14.15)': dependencies: '@rushstack/node-core-library': 4.0.2(@types/node@20.14.15) - '@types/node': 20.14.15 supports-color: 8.1.1 + optionalDependencies: + '@types/node': 20.14.15 '@rushstack/ts-command-line@4.19.1(@types/node@20.14.15)': dependencies: @@ -6436,18 +7031,18 @@ snapshots: '@sindresorhus/is@5.6.0': {} - '@sveltejs/vite-plugin-svelte-inspector@2.1.0(@sveltejs/vite-plugin-svelte@3.1.1)(svelte@4.2.18)(vite@5.4.1)': + '@sveltejs/vite-plugin-svelte-inspector@2.1.0(@sveltejs/vite-plugin-svelte@3.1.1(svelte@4.2.18)(vite@5.4.1(@types/node@20.14.15)))(svelte@4.2.18)(vite@5.4.1(@types/node@20.14.15))': dependencies: - '@sveltejs/vite-plugin-svelte': 3.1.1(svelte@4.2.18)(vite@5.4.1) + '@sveltejs/vite-plugin-svelte': 3.1.1(svelte@4.2.18)(vite@5.4.1(@types/node@20.14.15)) debug: 4.3.6 svelte: 4.2.18 vite: 5.4.1(@types/node@20.14.15) transitivePeerDependencies: - supports-color - '@sveltejs/vite-plugin-svelte@3.1.1(svelte@4.2.18)(vite@5.4.1)': + '@sveltejs/vite-plugin-svelte@3.1.1(svelte@4.2.18)(vite@5.4.1(@types/node@20.14.15))': dependencies: - '@sveltejs/vite-plugin-svelte-inspector': 2.1.0(@sveltejs/vite-plugin-svelte@3.1.1)(svelte@4.2.18)(vite@5.4.1) + '@sveltejs/vite-plugin-svelte-inspector': 2.1.0(@sveltejs/vite-plugin-svelte@3.1.1(svelte@4.2.18)(vite@5.4.1(@types/node@20.14.15)))(svelte@4.2.18)(vite@5.4.1(@types/node@20.14.15)) debug: 4.3.6 deepmerge: 4.3.1 kleur: 4.1.5 @@ -6455,7 +7050,7 @@ snapshots: svelte: 4.2.18 svelte-hmr: 0.16.0(svelte@4.2.18) vite: 5.4.1(@types/node@20.14.15) - vitefu: 0.2.5(vite@5.4.1) + vitefu: 0.2.5(vite@5.4.1(@types/node@20.14.15)) transitivePeerDependencies: - supports-color @@ -6526,7 +7121,17 @@ snapshots: lz-string: 1.5.0 pretty-format: 27.5.1 - '@testing-library/react@14.3.1(react-dom@18.3.1)(react@18.3.1)': + '@testing-library/jest-dom@6.6.3': + dependencies: + '@adobe/css-tools': 4.4.1 + aria-query: 5.3.0 + chalk: 3.0.0 + css.escape: 1.5.1 + dom-accessibility-api: 0.6.3 + lodash: 4.17.21 + redent: 3.0.0 + + '@testing-library/react@14.3.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': dependencies: '@babel/runtime': 7.25.0 '@testing-library/dom': 9.3.4 @@ -6597,6 +7202,8 @@ snapshots: '@types/estree@1.0.5': {} + '@types/estree@1.0.6': {} + '@types/express-serve-static-core@4.19.5': dependencies: '@types/node': 20.14.15 @@ -6669,31 +7276,33 @@ snapshots: dependencies: '@types/node': 20.14.15 - '@typescript-eslint/eslint-plugin@7.18.0(@typescript-eslint/parser@7.18.0)(eslint@9.9.0)(typescript@5.5.4)': + '@typescript-eslint/eslint-plugin@7.18.0(@typescript-eslint/parser@7.18.0(eslint@9.9.0(jiti@1.21.6))(typescript@5.5.4))(eslint@9.9.0(jiti@1.21.6))(typescript@5.5.4)': dependencies: '@eslint-community/regexpp': 4.11.0 - '@typescript-eslint/parser': 7.18.0(eslint@9.9.0)(typescript@5.5.4) + '@typescript-eslint/parser': 7.18.0(eslint@9.9.0(jiti@1.21.6))(typescript@5.5.4) '@typescript-eslint/scope-manager': 7.18.0 - '@typescript-eslint/type-utils': 7.18.0(eslint@9.9.0)(typescript@5.5.4) - '@typescript-eslint/utils': 7.18.0(eslint@9.9.0)(typescript@5.5.4) + '@typescript-eslint/type-utils': 7.18.0(eslint@9.9.0(jiti@1.21.6))(typescript@5.5.4) + '@typescript-eslint/utils': 7.18.0(eslint@9.9.0(jiti@1.21.6))(typescript@5.5.4) '@typescript-eslint/visitor-keys': 7.18.0 - eslint: 9.9.0 + eslint: 9.9.0(jiti@1.21.6) graphemer: 1.4.0 ignore: 5.3.2 natural-compare: 1.4.0 ts-api-utils: 1.3.0(typescript@5.5.4) + optionalDependencies: typescript: 5.5.4 transitivePeerDependencies: - supports-color - '@typescript-eslint/parser@7.18.0(eslint@9.9.0)(typescript@5.5.4)': + '@typescript-eslint/parser@7.18.0(eslint@9.9.0(jiti@1.21.6))(typescript@5.5.4)': dependencies: '@typescript-eslint/scope-manager': 7.18.0 '@typescript-eslint/types': 7.18.0 '@typescript-eslint/typescript-estree': 7.18.0(typescript@5.5.4) '@typescript-eslint/visitor-keys': 7.18.0 debug: 4.3.6 - eslint: 9.9.0 + eslint: 9.9.0(jiti@1.21.6) + optionalDependencies: typescript: 5.5.4 transitivePeerDependencies: - supports-color @@ -6703,13 +7312,14 @@ snapshots: '@typescript-eslint/types': 7.18.0 '@typescript-eslint/visitor-keys': 7.18.0 - '@typescript-eslint/type-utils@7.18.0(eslint@9.9.0)(typescript@5.5.4)': + '@typescript-eslint/type-utils@7.18.0(eslint@9.9.0(jiti@1.21.6))(typescript@5.5.4)': dependencies: '@typescript-eslint/typescript-estree': 7.18.0(typescript@5.5.4) - '@typescript-eslint/utils': 7.18.0(eslint@9.9.0)(typescript@5.5.4) + '@typescript-eslint/utils': 7.18.0(eslint@9.9.0(jiti@1.21.6))(typescript@5.5.4) debug: 4.3.6 - eslint: 9.9.0 + eslint: 9.9.0(jiti@1.21.6) ts-api-utils: 1.3.0(typescript@5.5.4) + optionalDependencies: typescript: 5.5.4 transitivePeerDependencies: - supports-color @@ -6726,17 +7336,18 @@ snapshots: minimatch: 9.0.5 semver: 7.6.3 ts-api-utils: 1.3.0(typescript@5.5.4) + optionalDependencies: typescript: 5.5.4 transitivePeerDependencies: - supports-color - '@typescript-eslint/utils@7.18.0(eslint@9.9.0)(typescript@5.5.4)': + '@typescript-eslint/utils@7.18.0(eslint@9.9.0(jiti@1.21.6))(typescript@5.5.4)': dependencies: - '@eslint-community/eslint-utils': 4.4.0(eslint@9.9.0) + '@eslint-community/eslint-utils': 4.4.0(eslint@9.9.0(jiti@1.21.6)) '@typescript-eslint/scope-manager': 7.18.0 '@typescript-eslint/types': 7.18.0 '@typescript-eslint/typescript-estree': 7.18.0(typescript@5.5.4) - eslint: 9.9.0 + eslint: 9.9.0(jiti@1.21.6) transitivePeerDependencies: - supports-color - typescript @@ -6746,82 +7357,92 @@ snapshots: '@typescript-eslint/types': 7.18.0 eslint-visitor-keys: 3.4.3 - '@vitejs/plugin-react-swc@3.7.0(vite@5.4.1)': + '@vitejs/plugin-react-swc@3.7.0(vite@6.0.11(@types/node@20.14.15)(jiti@1.21.6)(yaml@2.5.0))': dependencies: '@swc/core': 1.7.11 - vite: 5.4.1(@types/node@20.14.15) + vite: 6.0.11(@types/node@20.14.15)(jiti@1.21.6)(yaml@2.5.0) transitivePeerDependencies: - '@swc/helpers' - '@vitejs/plugin-react@4.3.1(vite@5.4.1)': + '@vitejs/plugin-react@4.3.4(vite@6.0.11(@types/node@20.14.15)(jiti@1.21.6)(yaml@2.5.0))': dependencies: - '@babel/core': 7.25.2 - '@babel/plugin-transform-react-jsx-self': 7.24.7(@babel/core@7.25.2) - '@babel/plugin-transform-react-jsx-source': 7.24.7(@babel/core@7.25.2) + '@babel/core': 7.26.7 + '@babel/plugin-transform-react-jsx-self': 7.25.9(@babel/core@7.26.7) + '@babel/plugin-transform-react-jsx-source': 7.25.9(@babel/core@7.26.7) '@types/babel__core': 7.20.5 react-refresh: 0.14.2 - vite: 5.4.1(@types/node@20.14.15) + vite: 6.0.11(@types/node@20.14.15)(jiti@1.21.6)(yaml@2.5.0) transitivePeerDependencies: - supports-color - '@vitest/coverage-v8@1.6.0(vitest@1.6.0)': + '@vitest/coverage-v8@3.0.4(vitest@3.0.4)': dependencies: '@ampproject/remapping': 2.3.0 - '@bcoe/v8-coverage': 0.2.3 - debug: 4.3.6 + '@bcoe/v8-coverage': 1.0.2 + debug: 4.4.0 istanbul-lib-coverage: 3.2.2 istanbul-lib-report: 3.0.1 istanbul-lib-source-maps: 5.0.6 istanbul-reports: 3.1.7 - magic-string: 0.30.11 - magicast: 0.3.4 - picocolors: 1.0.1 - std-env: 3.7.0 - strip-literal: 2.1.0 - test-exclude: 6.0.0 - vitest: 1.6.0(@types/node@20.14.15)(@vitest/ui@1.6.0) + magic-string: 0.30.17 + magicast: 0.3.5 + std-env: 3.8.0 + test-exclude: 7.0.1 + tinyrainbow: 2.0.0 + vitest: 3.0.4(@types/debug@4.1.12)(@types/node@20.14.15)(@vitest/ui@3.0.4)(jiti@1.21.6)(jsdom@22.1.0)(yaml@2.5.0) transitivePeerDependencies: - supports-color - '@vitest/expect@1.6.0': + '@vitest/expect@3.0.4': dependencies: - '@vitest/spy': 1.6.0 - '@vitest/utils': 1.6.0 - chai: 4.5.0 + '@vitest/spy': 3.0.4 + '@vitest/utils': 3.0.4 + chai: 5.1.2 + tinyrainbow: 2.0.0 - '@vitest/runner@1.6.0': + '@vitest/mocker@3.0.4(vite@6.0.11(@types/node@20.14.15)(jiti@1.21.6)(yaml@2.5.0))': dependencies: - '@vitest/utils': 1.6.0 - p-limit: 5.0.0 - pathe: 1.1.2 + '@vitest/spy': 3.0.4 + estree-walker: 3.0.3 + magic-string: 0.30.17 + optionalDependencies: + vite: 6.0.11(@types/node@20.14.15)(jiti@1.21.6)(yaml@2.5.0) - '@vitest/snapshot@1.6.0': + '@vitest/pretty-format@3.0.4': dependencies: - magic-string: 0.30.11 - pathe: 1.1.2 - pretty-format: 29.7.0 + tinyrainbow: 2.0.0 - '@vitest/spy@1.6.0': + '@vitest/runner@3.0.4': dependencies: - tinyspy: 2.2.1 + '@vitest/utils': 3.0.4 + pathe: 2.0.2 - '@vitest/ui@1.6.0(vitest@1.6.0)': + '@vitest/snapshot@3.0.4': dependencies: - '@vitest/utils': 1.6.0 - fast-glob: 3.3.2 + '@vitest/pretty-format': 3.0.4 + magic-string: 0.30.17 + pathe: 2.0.2 + + '@vitest/spy@3.0.4': + dependencies: + tinyspy: 3.0.2 + + '@vitest/ui@3.0.4(vitest@3.0.4)': + dependencies: + '@vitest/utils': 3.0.4 fflate: 0.8.2 - flatted: 3.3.1 - pathe: 1.1.2 - picocolors: 1.0.1 - sirv: 2.0.4 - vitest: 1.6.0(@types/node@20.14.15)(@vitest/ui@1.6.0) + flatted: 3.3.2 + pathe: 2.0.2 + sirv: 3.0.0 + tinyglobby: 0.2.10 + tinyrainbow: 2.0.0 + vitest: 3.0.4(@types/debug@4.1.12)(@types/node@20.14.15)(@vitest/ui@3.0.4)(jiti@1.21.6)(jsdom@22.1.0)(yaml@2.5.0) - '@vitest/utils@1.6.0': + '@vitest/utils@3.0.4': dependencies: - diff-sequences: 29.6.3 - estree-walker: 3.0.3 - loupe: 2.3.7 - pretty-format: 29.7.0 + '@vitest/pretty-format': 3.0.4 + loupe: 3.1.3 + tinyrainbow: 2.0.0 '@volar/language-core@1.11.1': dependencies: @@ -6859,8 +7480,9 @@ snapshots: minimatch: 9.0.5 muggle-string: 0.3.1 path-browserify: 1.0.1 - typescript: 5.5.4 vue-template-compiler: 2.7.16 + optionalDependencies: + typescript: 5.5.4 '@vue/shared@3.4.38': {} @@ -7053,7 +7675,7 @@ snapshots: arrify@2.0.1: {} - assertion-error@1.1.0: {} + assertion-error@2.0.1: {} ast-types-flow@0.0.8: {} @@ -7116,6 +7738,8 @@ snapshots: binary-extensions@2.3.0: {} + bintrees@1.0.2: {} + bl@4.1.0: dependencies: buffer: 5.7.1 @@ -7159,6 +7783,13 @@ snapshots: node-releases: 2.0.18 update-browserslist-db: 1.1.0(browserslist@4.23.3) + browserslist@4.24.4: + dependencies: + caniuse-lite: 1.0.30001696 + electron-to-chromium: 1.5.90 + node-releases: 2.0.19 + update-browserslist-db: 1.1.2(browserslist@4.24.4) + bs58@5.0.0: dependencies: base-x: 4.0.0 @@ -7258,6 +7889,8 @@ snapshots: caniuse-lite@1.0.30001651: {} + caniuse-lite@1.0.30001696: {} + cbor-extract@2.2.0: dependencies: node-gyp-build-optional-packages: 5.1.1 @@ -7274,15 +7907,13 @@ snapshots: optionalDependencies: cbor-extract: 2.2.0 - chai@4.5.0: + chai@5.1.2: dependencies: - assertion-error: 1.1.0 - check-error: 1.0.3 - deep-eql: 4.1.4 - get-func-name: 2.0.2 - loupe: 2.3.7 - pathval: 1.1.1 - type-detect: 4.1.0 + assertion-error: 2.0.1 + check-error: 2.1.1 + deep-eql: 5.0.2 + loupe: 3.1.3 + pathval: 2.0.0 chalk@2.4.2: dependencies: @@ -7290,6 +7921,11 @@ snapshots: escape-string-regexp: 1.0.5 supports-color: 5.5.0 + chalk@3.0.0: + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + chalk@4.1.0: dependencies: ansi-styles: 4.3.0 @@ -7302,9 +7938,7 @@ snapshots: chardet@0.7.0: {} - check-error@1.0.3: - dependencies: - get-func-name: 2.0.2 + check-error@2.1.1: {} chokidar@3.6.0: dependencies: @@ -7414,8 +8048,6 @@ snapshots: readable-stream: 3.6.2 typedarray: 0.0.6 - confbox@0.1.7: {} - config-chain@1.1.13: dependencies: ini: 1.3.8 @@ -7499,6 +8131,7 @@ snapshots: js-yaml: 4.1.0 parse-json: 5.2.0 path-type: 4.0.0 + optionalDependencies: typescript: 5.5.4 create-require@1.1.1: {} @@ -7532,6 +8165,8 @@ snapshots: mdn-data: 2.0.30 source-map-js: 1.2.0 + css.escape@1.5.1: {} + cssesc@3.0.0: {} cssstyle@3.0.0: @@ -7579,12 +8214,17 @@ snapshots: debug@3.2.7(supports-color@5.5.0): dependencies: ms: 2.1.3 + optionalDependencies: supports-color: 5.5.0 debug@4.3.6: dependencies: ms: 2.1.2 + debug@4.4.0: + dependencies: + ms: 2.1.3 + decamelize-keys@1.1.1: dependencies: decamelize: 1.2.0 @@ -7602,9 +8242,7 @@ snapshots: dedent@1.5.3: {} - deep-eql@4.1.4: - dependencies: - type-detect: 4.1.0 + deep-eql@5.0.2: {} deep-equal@2.2.3: dependencies: @@ -7693,6 +8331,8 @@ snapshots: dom-accessibility-api@0.5.16: {} + dom-accessibility-api@0.6.3: {} + domexception@4.0.0: dependencies: webidl-conversions: 7.0.0 @@ -7719,6 +8359,8 @@ snapshots: electron-to-chromium@1.5.9: {} + electron-to-chromium@1.5.90: {} + emoji-regex@8.0.0: {} emoji-regex@9.2.2: {} @@ -7834,6 +8476,8 @@ snapshots: iterator.prototype: 1.1.2 safe-array-concat: 1.1.2 + es-module-lexer@1.6.0: {} + es-object-atoms@1.0.0: dependencies: es-errors: 1.3.0 @@ -7882,31 +8526,61 @@ snapshots: '@esbuild/win32-ia32': 0.21.5 '@esbuild/win32-x64': 0.21.5 + esbuild@0.24.2: + optionalDependencies: + '@esbuild/aix-ppc64': 0.24.2 + '@esbuild/android-arm': 0.24.2 + '@esbuild/android-arm64': 0.24.2 + '@esbuild/android-x64': 0.24.2 + '@esbuild/darwin-arm64': 0.24.2 + '@esbuild/darwin-x64': 0.24.2 + '@esbuild/freebsd-arm64': 0.24.2 + '@esbuild/freebsd-x64': 0.24.2 + '@esbuild/linux-arm': 0.24.2 + '@esbuild/linux-arm64': 0.24.2 + '@esbuild/linux-ia32': 0.24.2 + '@esbuild/linux-loong64': 0.24.2 + '@esbuild/linux-mips64el': 0.24.2 + '@esbuild/linux-ppc64': 0.24.2 + '@esbuild/linux-riscv64': 0.24.2 + '@esbuild/linux-s390x': 0.24.2 + '@esbuild/linux-x64': 0.24.2 + '@esbuild/netbsd-arm64': 0.24.2 + '@esbuild/netbsd-x64': 0.24.2 + '@esbuild/openbsd-arm64': 0.24.2 + '@esbuild/openbsd-x64': 0.24.2 + '@esbuild/sunos-x64': 0.24.2 + '@esbuild/win32-arm64': 0.24.2 + '@esbuild/win32-ia32': 0.24.2 + '@esbuild/win32-x64': 0.24.2 + escalade@3.1.2: {} + escalade@3.2.0: {} + escape-html@1.0.3: {} escape-string-regexp@1.0.5: {} escape-string-regexp@4.0.0: {} - eslint-config-airbnb-base@15.0.0(eslint-plugin-import@2.29.1)(eslint@9.9.0): + eslint-config-airbnb-base@15.0.0(eslint-plugin-import@2.29.1(@typescript-eslint/parser@7.18.0(eslint@9.9.0(jiti@1.21.6))(typescript@5.5.4))(eslint@9.9.0(jiti@1.21.6)))(eslint@9.9.0(jiti@1.21.6)): dependencies: confusing-browser-globals: 1.0.11 - eslint: 9.9.0 - eslint-plugin-import: 2.29.1(@typescript-eslint/parser@7.18.0)(eslint@9.9.0) + eslint: 9.9.0(jiti@1.21.6) + eslint-plugin-import: 2.29.1(@typescript-eslint/parser@7.18.0(eslint@9.9.0(jiti@1.21.6))(typescript@5.5.4))(eslint@9.9.0(jiti@1.21.6)) object.assign: 4.1.5 object.entries: 1.1.8 semver: 6.3.1 - eslint-config-airbnb@19.0.4(eslint-plugin-import@2.29.1)(eslint-plugin-jsx-a11y@6.9.0)(eslint-plugin-react-hooks@4.6.2)(eslint-plugin-react@7.35.0)(eslint@9.9.0): + eslint-config-airbnb@19.0.4(eslint-plugin-import@2.29.1(@typescript-eslint/parser@7.18.0(eslint@9.9.0(jiti@1.21.6))(typescript@5.5.4))(eslint@9.9.0(jiti@1.21.6)))(eslint-plugin-jsx-a11y@6.9.0(eslint@9.9.0(jiti@1.21.6)))(eslint-plugin-react-hooks@4.6.2(eslint@9.9.0(jiti@1.21.6)))(eslint-plugin-react@7.35.0(eslint@9.9.0(jiti@1.21.6)))(eslint@9.9.0(jiti@1.21.6)): dependencies: - eslint: 9.9.0 - eslint-config-airbnb-base: 15.0.0(eslint-plugin-import@2.29.1)(eslint@9.9.0) - eslint-plugin-import: 2.29.1(@typescript-eslint/parser@7.18.0)(eslint@9.9.0) - eslint-plugin-jsx-a11y: 6.9.0(eslint@9.9.0) - eslint-plugin-react: 7.35.0(eslint@9.9.0) - eslint-plugin-react-hooks: 4.6.2(eslint@9.9.0) + eslint: 9.9.0(jiti@1.21.6) + eslint-config-airbnb-base: 15.0.0(eslint-plugin-import@2.29.1(@typescript-eslint/parser@7.18.0(eslint@9.9.0(jiti@1.21.6))(typescript@5.5.4))(eslint@9.9.0(jiti@1.21.6)))(eslint@9.9.0(jiti@1.21.6)) + eslint-plugin-import: 2.29.1(@typescript-eslint/parser@7.18.0(eslint@9.9.0(jiti@1.21.6))(typescript@5.5.4))(eslint@9.9.0(jiti@1.21.6)) + eslint-plugin-jsx-a11y: 6.9.0(eslint@9.9.0(jiti@1.21.6)) + eslint-plugin-react: 7.35.0(eslint@9.9.0(jiti@1.21.6)) + eslint-plugin-react-hooks: 4.6.2(eslint@9.9.0(jiti@1.21.6)) object.assign: 4.1.5 object.entries: 1.1.8 @@ -7918,27 +8592,27 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-module-utils@2.8.1(@typescript-eslint/parser@7.18.0)(eslint-import-resolver-node@0.3.9)(eslint@9.9.0): + eslint-module-utils@2.8.1(@typescript-eslint/parser@7.18.0(eslint@9.9.0(jiti@1.21.6))(typescript@5.5.4))(eslint-import-resolver-node@0.3.9)(eslint@9.9.0(jiti@1.21.6)): dependencies: - '@typescript-eslint/parser': 7.18.0(eslint@9.9.0)(typescript@5.5.4) debug: 3.2.7(supports-color@5.5.0) - eslint: 9.9.0 + optionalDependencies: + '@typescript-eslint/parser': 7.18.0(eslint@9.9.0(jiti@1.21.6))(typescript@5.5.4) + eslint: 9.9.0(jiti@1.21.6) eslint-import-resolver-node: 0.3.9 transitivePeerDependencies: - supports-color - eslint-plugin-import@2.29.1(@typescript-eslint/parser@7.18.0)(eslint@9.9.0): + eslint-plugin-import@2.29.1(@typescript-eslint/parser@7.18.0(eslint@9.9.0(jiti@1.21.6))(typescript@5.5.4))(eslint@9.9.0(jiti@1.21.6)): dependencies: - '@typescript-eslint/parser': 7.18.0(eslint@9.9.0)(typescript@5.5.4) array-includes: 3.1.8 array.prototype.findlastindex: 1.2.5 array.prototype.flat: 1.3.2 array.prototype.flatmap: 1.3.2 debug: 3.2.7(supports-color@5.5.0) doctrine: 2.1.0 - eslint: 9.9.0 + eslint: 9.9.0(jiti@1.21.6) eslint-import-resolver-node: 0.3.9 - eslint-module-utils: 2.8.1(@typescript-eslint/parser@7.18.0)(eslint-import-resolver-node@0.3.9)(eslint@9.9.0) + eslint-module-utils: 2.8.1(@typescript-eslint/parser@7.18.0(eslint@9.9.0(jiti@1.21.6))(typescript@5.5.4))(eslint-import-resolver-node@0.3.9)(eslint@9.9.0(jiti@1.21.6)) hasown: 2.0.2 is-core-module: 2.15.0 is-glob: 4.0.3 @@ -7948,12 +8622,14 @@ snapshots: object.values: 1.2.0 semver: 6.3.1 tsconfig-paths: 3.15.0 + optionalDependencies: + '@typescript-eslint/parser': 7.18.0(eslint@9.9.0(jiti@1.21.6))(typescript@5.5.4) transitivePeerDependencies: - eslint-import-resolver-typescript - eslint-import-resolver-webpack - supports-color - eslint-plugin-jsx-a11y@6.9.0(eslint@9.9.0): + eslint-plugin-jsx-a11y@6.9.0(eslint@9.9.0(jiti@1.21.6)): dependencies: aria-query: 5.1.3 array-includes: 3.1.8 @@ -7964,7 +8640,7 @@ snapshots: damerau-levenshtein: 1.0.8 emoji-regex: 9.2.2 es-iterator-helpers: 1.0.19 - eslint: 9.9.0 + eslint: 9.9.0(jiti@1.21.6) hasown: 2.0.2 jsx-ast-utils: 3.3.5 language-tags: 1.0.9 @@ -7973,11 +8649,15 @@ snapshots: safe-regex-test: 1.0.3 string.prototype.includes: 2.0.0 - eslint-plugin-react-hooks@4.6.2(eslint@9.9.0): + eslint-plugin-react-hooks@4.6.2(eslint@9.9.0(jiti@1.21.6)): + dependencies: + eslint: 9.9.0(jiti@1.21.6) + + eslint-plugin-react-hooks@5.1.0(eslint@9.9.0(jiti@1.21.6)): dependencies: - eslint: 9.9.0 + eslint: 9.9.0(jiti@1.21.6) - eslint-plugin-react@7.35.0(eslint@9.9.0): + eslint-plugin-react@7.35.0(eslint@9.9.0(jiti@1.21.6)): dependencies: array-includes: 3.1.8 array.prototype.findlast: 1.2.5 @@ -7985,7 +8665,7 @@ snapshots: array.prototype.tosorted: 1.1.4 doctrine: 2.1.0 es-iterator-helpers: 1.0.19 - eslint: 9.9.0 + eslint: 9.9.0(jiti@1.21.6) estraverse: 5.3.0 hasown: 2.0.2 jsx-ast-utils: 3.3.5 @@ -8008,9 +8688,9 @@ snapshots: eslint-visitor-keys@4.0.0: {} - eslint@9.9.0: + eslint@9.9.0(jiti@1.21.6): dependencies: - '@eslint-community/eslint-utils': 4.4.0(eslint@9.9.0) + '@eslint-community/eslint-utils': 4.4.0(eslint@9.9.0(jiti@1.21.6)) '@eslint-community/regexpp': 4.11.0 '@eslint/config-array': 0.17.1 '@eslint/eslintrc': 3.1.0 @@ -8044,6 +8724,8 @@ snapshots: optionator: 0.9.4 strip-ansi: 6.0.1 text-table: 0.2.0 + optionalDependencies: + jiti: 1.21.6 transitivePeerDependencies: - supports-color @@ -8082,7 +8764,7 @@ snapshots: execa@5.0.0: dependencies: cross-spawn: 7.0.3 - get-stream: 6.0.0 + get-stream: 6.0.1 human-signals: 2.1.0 is-stream: 2.0.0 merge-stream: 2.0.0 @@ -8103,6 +8785,8 @@ snapshots: signal-exit: 4.1.0 strip-final-newline: 3.0.0 + expect-type@1.1.0: {} + exponential-backoff@3.1.1: {} express@4.19.2: @@ -8167,6 +8851,10 @@ snapshots: dependencies: reusify: 1.0.4 + fdir@6.4.3(picomatch@4.0.2): + optionalDependencies: + picomatch: 4.0.2 + fflate@0.8.2: {} figures@3.2.0: @@ -8220,8 +8908,10 @@ snapshots: flatted@3.3.1: {} + flatted@3.3.2: {} + follow-redirects@1.15.6(debug@4.3.6): - dependencies: + optionalDependencies: debug: 4.3.6 for-each@0.3.3: @@ -8304,8 +8994,6 @@ snapshots: get-caller-file@2.0.5: {} - get-func-name@2.0.2: {} - get-intrinsic@1.2.4: dependencies: es-errors: 1.3.0 @@ -8833,7 +9521,7 @@ snapshots: istanbul-lib-source-maps@5.0.6: dependencies: '@jridgewell/trace-mapping': 0.3.25 - debug: 4.3.6 + debug: 4.4.0 istanbul-lib-coverage: 3.2.2 transitivePeerDependencies: - supports-color @@ -8860,13 +9548,13 @@ snapshots: jake@10.9.2: dependencies: async: 3.2.5 - chalk: 4.1.0 + chalk: 4.1.2 filelist: 1.0.4 minimatch: 3.1.2 jest-diff@29.7.0: dependencies: - chalk: 4.1.0 + chalk: 4.1.2 diff-sequences: 29.6.3 jest-get-type: 29.6.3 pretty-format: 29.7.0 @@ -8879,8 +9567,6 @@ snapshots: js-tokens@4.0.0: {} - js-tokens@9.0.0: {} - js-yaml@3.14.1: dependencies: argparse: 1.0.10 @@ -8922,7 +9608,7 @@ snapshots: - supports-color - utf-8-validate - jsesc@2.5.2: {} + jsesc@3.1.0: {} json-buffer@3.0.1: {} @@ -8989,15 +9675,15 @@ snapshots: dependencies: language-subtag-registry: 0.3.23 - lerna@8.1.8: + lerna@8.1.8(@swc/core@1.7.11)(encoding@0.1.13): dependencies: - '@lerna/create': 8.1.8(typescript@5.5.4) + '@lerna/create': 8.1.8(@swc/core@1.7.11)(encoding@0.1.13)(typescript@5.5.4) '@npmcli/arborist': 7.5.4 '@npmcli/package-json': 5.2.0 '@npmcli/run-script': 8.1.0 - '@nx/devkit': 19.6.0(nx@19.6.0) + '@nx/devkit': 19.6.0(nx@19.6.0(@swc/core@1.7.11)) '@octokit/plugin-enterprise-rest': 6.0.1 - '@octokit/rest': 19.0.11 + '@octokit/rest': 19.0.11(encoding@0.1.13) aproba: 2.0.0 byte-size: 8.1.1 chalk: 4.1.0 @@ -9036,11 +9722,11 @@ snapshots: make-dir: 4.0.0 minimatch: 3.0.5 multimatch: 5.0.0 - node-fetch: 2.6.7 + node-fetch: 2.6.7(encoding@0.1.13) npm-package-arg: 11.0.2 npm-packlist: 8.0.2 npm-registry-fetch: 17.1.0 - nx: 19.6.0 + nx: 19.6.0(@swc/core@1.7.11) p-map: 4.0.0 p-map-series: 2.1.0 p-pipe: 3.1.0 @@ -9128,11 +9814,6 @@ snapshots: strip-bom: 4.0.0 type-fest: 0.6.0 - local-pkg@0.5.0: - dependencies: - mlly: 1.7.1 - pkg-types: 1.1.3 - locate-character@3.0.0: {} locate-path@2.0.0: @@ -9160,16 +9841,14 @@ snapshots: log-symbols@4.1.0: dependencies: - chalk: 4.1.0 + chalk: 4.1.2 is-unicode-supported: 0.1.0 loose-envify@1.4.0: dependencies: js-tokens: 4.0.0 - loupe@2.3.7: - dependencies: - get-func-name: 2.0.2 + loupe@3.1.3: {} lowercase-keys@3.0.0: {} @@ -9196,10 +9875,14 @@ snapshots: dependencies: '@jridgewell/sourcemap-codec': 1.5.0 - magicast@0.3.4: + magic-string@0.30.17: dependencies: - '@babel/parser': 7.25.3 - '@babel/types': 7.25.2 + '@jridgewell/sourcemap-codec': 1.5.0 + + magicast@0.3.5: + dependencies: + '@babel/parser': 7.26.7 + '@babel/types': 7.26.7 source-map-js: 1.2.0 make-dir@2.1.0: @@ -9383,13 +10066,6 @@ snapshots: mkdirp@1.0.4: {} - mlly@1.7.1: - dependencies: - acorn: 8.12.1 - pathe: 1.1.2 - pkg-types: 1.1.3 - ufo: 1.5.4 - modify-values@1.0.1: {} mri@1.2.0: {} @@ -9410,7 +10086,7 @@ snapshots: array-differ: 3.0.0 array-union: 2.1.0 arrify: 2.0.1 - minimatch: 3.0.5 + minimatch: 3.1.2 mute-stream@0.0.8: {} @@ -9424,6 +10100,8 @@ snapshots: nanoid@3.3.7: {} + nanoid@3.3.8: {} + natural-compare@1.4.0: {} negotiator@0.6.3: {} @@ -9432,9 +10110,11 @@ snapshots: nice-try@1.0.5: {} - node-fetch@2.6.7: + node-fetch@2.6.7(encoding@0.1.13): dependencies: whatwg-url: 5.0.0 + optionalDependencies: + encoding: 0.1.13 node-gyp-build-optional-packages@5.1.1: dependencies: @@ -9460,6 +10140,8 @@ snapshots: node-releases@2.0.18: {} + node-releases@2.0.19: {} + nodemon@2.0.22: dependencies: chokidar: 3.6.0 @@ -9573,15 +10255,15 @@ snapshots: nwsapi@2.2.12: {} - nx@19.6.0: + nx@19.6.0(@swc/core@1.7.11): dependencies: '@napi-rs/wasm-runtime': 0.2.4 - '@nrwl/tao': 19.6.0 + '@nrwl/tao': 19.6.0(@swc/core@1.7.11) '@yarnpkg/lockfile': 1.1.0 '@yarnpkg/parsers': 3.0.0-rc.46 '@zkochan/js-yaml': 0.0.7 axios: 1.7.4 - chalk: 4.1.0 + chalk: 4.1.2 cli-cursor: 3.1.0 cli-spinners: 2.6.1 cliui: 8.0.1 @@ -9621,6 +10303,7 @@ snapshots: '@nx/nx-linux-x64-musl': 19.6.0 '@nx/nx-win32-arm64-msvc': 19.6.0 '@nx/nx-win32-x64-msvc': 19.6.0 + '@swc/core': 1.7.11 transitivePeerDependencies: - debug @@ -9705,9 +10388,9 @@ snapshots: ora@5.3.0: dependencies: bl: 4.1.0 - chalk: 4.1.0 + chalk: 4.1.2 cli-cursor: 3.1.0 - cli-spinners: 2.6.1 + cli-spinners: 2.9.2 is-interactive: 1.0.0 log-symbols: 4.1.0 strip-ansi: 6.0.1 @@ -9743,10 +10426,6 @@ snapshots: dependencies: yocto-queue: 0.1.0 - p-limit@5.0.0: - dependencies: - yocto-queue: 1.1.1 - p-locate@2.0.0: dependencies: p-limit: 1.3.0 @@ -9885,9 +10564,9 @@ snapshots: path-type@4.0.0: {} - pathe@1.1.2: {} + pathe@2.0.2: {} - pathval@1.1.1: {} + pathval@2.0.0: {} periscopic@3.1.0: dependencies: @@ -9897,8 +10576,12 @@ snapshots: picocolors@1.0.1: {} + picocolors@1.1.1: {} + picomatch@2.3.1: {} + picomatch@4.0.2: {} + pidtree@0.3.1: {} pify@2.3.0: {} @@ -9915,12 +10598,6 @@ snapshots: dependencies: find-up: 4.1.0 - pkg-types@1.1.3: - dependencies: - confbox: 0.1.7 - mlly: 1.7.1 - pathe: 1.1.2 - portfinder@1.0.32: dependencies: async: 2.6.4 @@ -9943,12 +10620,22 @@ snapshots: camelcase-css: 2.0.1 postcss: 8.4.41 - postcss-load-config@4.0.2(postcss@8.4.41)(ts-node@10.9.2): + postcss-load-config@4.0.2(postcss@8.4.41)(ts-node@10.9.2(@swc/core@1.7.11)(@types/node@20.14.15)(typescript@5.5.4)): dependencies: lilconfig: 3.1.2 + yaml: 2.5.0 + optionalDependencies: postcss: 8.4.41 - ts-node: 10.9.2(@types/node@20.14.15)(typescript@5.5.4) + ts-node: 10.9.2(@swc/core@1.7.11)(@types/node@20.14.15)(typescript@5.5.4) + + postcss-load-config@4.0.2(postcss@8.5.1)(ts-node@10.9.2(@swc/core@1.7.11)(@types/node@20.14.15)(typescript@5.5.4)): + dependencies: + lilconfig: 3.1.2 yaml: 2.5.0 + optionalDependencies: + postcss: 8.5.1 + ts-node: 10.9.2(@swc/core@1.7.11)(@types/node@20.14.15)(typescript@5.5.4) + optional: true postcss-nested@6.2.0(postcss@8.4.41): dependencies: @@ -9968,6 +10655,12 @@ snapshots: picocolors: 1.0.1 source-map-js: 1.2.0 + postcss@8.5.1: + dependencies: + nanoid: 3.3.8 + picocolors: 1.1.1 + source-map-js: 1.2.1 + prelude-ls@1.2.1: {} prettier@2.8.8: {} @@ -9990,6 +10683,11 @@ snapshots: proggy@2.0.0: {} + prom-client@15.1.3: + dependencies: + '@opentelemetry/api': 1.9.0 + tdigest: 0.1.2 + promise-all-reject-late@1.0.1: {} promise-call-limit@3.0.1: {} @@ -10068,6 +10766,11 @@ snapshots: react: 18.3.1 scheduler: 0.23.2 + react-error-boundary@5.0.0(react@18.3.1): + dependencies: + '@babel/runtime': 7.25.0 + react: 18.3.1 + react-is@16.13.1: {} react-is@17.0.2: {} @@ -10253,12 +10956,14 @@ snapshots: dependencies: glob: 10.4.5 - rollup-plugin-visualizer@5.12.0: + rollup-plugin-visualizer@5.12.0(rollup@4.34.0): dependencies: open: 8.4.2 picomatch: 2.3.1 source-map: 0.7.4 yargs: 17.7.2 + optionalDependencies: + rollup: 4.34.0 rollup@4.20.0: dependencies: @@ -10282,6 +10987,31 @@ snapshots: '@rollup/rollup-win32-x64-msvc': 4.20.0 fsevents: 2.3.3 + rollup@4.34.0: + dependencies: + '@types/estree': 1.0.6 + optionalDependencies: + '@rollup/rollup-android-arm-eabi': 4.34.0 + '@rollup/rollup-android-arm64': 4.34.0 + '@rollup/rollup-darwin-arm64': 4.34.0 + '@rollup/rollup-darwin-x64': 4.34.0 + '@rollup/rollup-freebsd-arm64': 4.34.0 + '@rollup/rollup-freebsd-x64': 4.34.0 + '@rollup/rollup-linux-arm-gnueabihf': 4.34.0 + '@rollup/rollup-linux-arm-musleabihf': 4.34.0 + '@rollup/rollup-linux-arm64-gnu': 4.34.0 + '@rollup/rollup-linux-arm64-musl': 4.34.0 + '@rollup/rollup-linux-loongarch64-gnu': 4.34.0 + '@rollup/rollup-linux-powerpc64le-gnu': 4.34.0 + '@rollup/rollup-linux-riscv64-gnu': 4.34.0 + '@rollup/rollup-linux-s390x-gnu': 4.34.0 + '@rollup/rollup-linux-x64-gnu': 4.34.0 + '@rollup/rollup-linux-x64-musl': 4.34.0 + '@rollup/rollup-win32-arm64-msvc': 4.34.0 + '@rollup/rollup-win32-ia32-msvc': 4.34.0 + '@rollup/rollup-win32-x64-msvc': 4.34.0 + fsevents: 2.3.3 + rrweb-cssom@0.6.0: {} run-async@2.4.1: {} @@ -10451,7 +11181,7 @@ snapshots: dependencies: semver: 7.0.0 - sirv@2.0.4: + sirv@3.0.0: dependencies: '@polka/url': 1.0.0-next.25 mrmime: 2.0.0 @@ -10487,6 +11217,8 @@ snapshots: source-map-js@1.2.0: {} + source-map-js@1.2.1: {} + source-map@0.6.1: {} source-map@0.7.4: {} @@ -10530,7 +11262,7 @@ snapshots: statuses@2.0.1: {} - std-env@3.7.0: {} + std-env@3.8.0: {} stop-iteration-iterator@1.0.0: dependencies: @@ -10637,10 +11369,6 @@ snapshots: strip-json-comments@3.1.1: {} - strip-literal@2.1.0: - dependencies: - js-tokens: 9.0.0 - strong-log-transformer@2.1.0: dependencies: duplexer: 0.1.2 @@ -10671,14 +11399,14 @@ snapshots: supports-preserve-symlinks-flag@1.0.0: {} - svelte-check@3.8.5(@babel/core@7.25.2)(svelte@4.2.18): + svelte-check@3.8.5(@babel/core@7.26.7)(postcss-load-config@4.0.2(postcss@8.5.1)(ts-node@10.9.2(@swc/core@1.7.11)(@types/node@20.14.15)(typescript@5.5.4)))(postcss@8.5.1)(svelte@4.2.18): dependencies: '@jridgewell/trace-mapping': 0.3.25 chokidar: 3.6.0 picocolors: 1.0.1 sade: 1.8.1 svelte: 4.2.18 - svelte-preprocess: 5.1.4(@babel/core@7.25.2)(svelte@4.2.18)(typescript@5.5.4) + svelte-preprocess: 5.1.4(@babel/core@7.26.7)(postcss-load-config@4.0.2(postcss@8.5.1)(ts-node@10.9.2(@swc/core@1.7.11)(@types/node@20.14.15)(typescript@5.5.4)))(postcss@8.5.1)(svelte@4.2.18)(typescript@5.5.4) typescript: 5.5.4 transitivePeerDependencies: - '@babel/core' @@ -10695,15 +11423,18 @@ snapshots: dependencies: svelte: 4.2.18 - svelte-preprocess@5.1.4(@babel/core@7.25.2)(svelte@4.2.18)(typescript@5.5.4): + svelte-preprocess@5.1.4(@babel/core@7.26.7)(postcss-load-config@4.0.2(postcss@8.5.1)(ts-node@10.9.2(@swc/core@1.7.11)(@types/node@20.14.15)(typescript@5.5.4)))(postcss@8.5.1)(svelte@4.2.18)(typescript@5.5.4): dependencies: - '@babel/core': 7.25.2 '@types/pug': 2.0.10 detect-indent: 6.1.0 magic-string: 0.30.11 sorcery: 0.11.1 strip-indent: 3.0.0 svelte: 4.2.18 + optionalDependencies: + '@babel/core': 7.26.7 + postcss: 8.5.1 + postcss-load-config: 4.0.2(postcss@8.5.1)(ts-node@10.9.2(@swc/core@1.7.11)(@types/node@20.14.15)(typescript@5.5.4)) typescript: 5.5.4 svelte@4.2.18: @@ -10725,7 +11456,7 @@ snapshots: symbol-tree@3.2.4: {} - tailwindcss@3.4.10(ts-node@10.9.2): + tailwindcss@3.4.10(ts-node@10.9.2(@swc/core@1.7.11)(@types/node@20.14.15)(typescript@5.5.4)): dependencies: '@alloc/quick-lru': 5.2.0 arg: 5.0.2 @@ -10744,7 +11475,7 @@ snapshots: postcss: 8.4.41 postcss-import: 15.1.0(postcss@8.4.41) postcss-js: 4.0.1(postcss@8.4.41) - postcss-load-config: 4.0.2(postcss@8.4.41)(ts-node@10.9.2) + postcss-load-config: 4.0.2(postcss@8.4.41)(ts-node@10.9.2(@swc/core@1.7.11)(@types/node@20.14.15)(typescript@5.5.4)) postcss-nested: 6.2.0(postcss@8.4.41) postcss-selector-parser: 6.1.2 resolve: 1.22.8 @@ -10769,6 +11500,10 @@ snapshots: mkdirp: 1.0.4 yallist: 4.0.0 + tdigest@0.1.2: + dependencies: + bintrees: 1.0.2 + temp-dir@1.0.0: {} test-exclude@6.0.0: @@ -10777,6 +11512,12 @@ snapshots: glob: 7.2.3 minimatch: 3.1.2 + test-exclude@7.0.1: + dependencies: + '@istanbuljs/schema': 0.1.3 + glob: 10.4.5 + minimatch: 9.0.5 + text-extensions@1.9.0: {} text-table@0.2.0: {} @@ -10804,9 +11545,18 @@ snapshots: tinybench@2.9.0: {} - tinypool@0.8.4: {} + tinyexec@0.3.2: {} + + tinyglobby@0.2.10: + dependencies: + fdir: 6.4.3(picomatch@4.0.2) + picomatch: 4.0.2 + + tinypool@1.0.2: {} - tinyspy@2.2.1: {} + tinyrainbow@2.0.0: {} + + tinyspy@3.0.2: {} tmp@0.0.33: dependencies: @@ -10851,7 +11601,7 @@ snapshots: ts-interface-checker@0.1.13: {} - ts-node@10.9.2(@types/node@20.14.15)(typescript@5.5.4): + ts-node@10.9.2(@swc/core@1.7.11)(@types/node@20.14.15)(typescript@5.5.4): dependencies: '@cspotcode/source-map-support': 0.8.1 '@tsconfig/node10': 1.0.11 @@ -10868,6 +11618,8 @@ snapshots: typescript: 5.5.4 v8-compile-cache-lib: 3.0.1 yn: 3.1.1 + optionalDependencies: + '@swc/core': 1.7.11 tsconfig-paths@3.15.0: dependencies: @@ -10896,8 +11648,6 @@ snapshots: dependencies: prelude-ls: 1.2.1 - type-detect@4.1.0: {} - type-fest@0.18.1: {} type-fest@0.21.3: {} @@ -10961,8 +11711,6 @@ snapshots: typescript@5.5.4: {} - ufo@1.5.4: {} - uglify-js@3.19.2: optional: true @@ -11007,6 +11755,12 @@ snapshots: escalade: 3.1.2 picocolors: 1.0.1 + update-browserslist-db@1.1.2(browserslist@4.24.4): + dependencies: + browserslist: 4.24.4 + escalade: 3.2.0 + picocolors: 1.1.1 + uri-js@4.4.1: dependencies: punycode: 2.3.1 @@ -11049,15 +11803,16 @@ snapshots: vary@1.1.2: {} - vite-node@1.6.0(@types/node@20.14.15): + vite-node@3.0.4(@types/node@20.14.15)(jiti@1.21.6)(yaml@2.5.0): dependencies: cac: 6.7.14 - debug: 4.3.6 - pathe: 1.1.2 - picocolors: 1.0.1 - vite: 5.4.1(@types/node@20.14.15) + debug: 4.4.0 + es-module-lexer: 1.6.0 + pathe: 2.0.2 + vite: 6.0.11(@types/node@20.14.15)(jiti@1.21.6)(yaml@2.5.0) transitivePeerDependencies: - '@types/node' + - jiti - less - lightningcss - sass @@ -11066,73 +11821,94 @@ snapshots: - sugarss - supports-color - terser + - tsx + - yaml - vite-plugin-dts@3.9.1(@types/node@20.14.15)(typescript@5.5.4)(vite@5.4.1): + vite-plugin-dts@3.9.1(@types/node@20.14.15)(rollup@4.34.0)(typescript@5.5.4)(vite@6.0.11(@types/node@20.14.15)(jiti@1.21.6)(yaml@2.5.0)): dependencies: '@microsoft/api-extractor': 7.43.0(@types/node@20.14.15) - '@rollup/pluginutils': 5.1.0 + '@rollup/pluginutils': 5.1.0(rollup@4.34.0) '@vue/language-core': 1.8.27(typescript@5.5.4) debug: 4.3.6 kolorist: 1.8.0 magic-string: 0.30.11 typescript: 5.5.4 - vite: 5.4.1(@types/node@20.14.15) vue-tsc: 1.8.27(typescript@5.5.4) + optionalDependencies: + vite: 6.0.11(@types/node@20.14.15)(jiti@1.21.6)(yaml@2.5.0) transitivePeerDependencies: - '@types/node' - rollup - supports-color - vite-plugin-wasm@3.3.0(vite@5.4.1): + vite-plugin-wasm@3.4.1(vite@6.0.11(@types/node@20.14.15)(jiti@1.21.6)(yaml@2.5.0)): dependencies: - vite: 5.4.1(@types/node@20.14.15) + vite: 6.0.11(@types/node@20.14.15)(jiti@1.21.6)(yaml@2.5.0) vite@5.4.1(@types/node@20.14.15): dependencies: - '@types/node': 20.14.15 esbuild: 0.21.5 postcss: 8.4.41 rollup: 4.20.0 optionalDependencies: + '@types/node': 20.14.15 fsevents: 2.3.3 - vitefu@0.2.5(vite@5.4.1): + vite@6.0.11(@types/node@20.14.15)(jiti@1.21.6)(yaml@2.5.0): dependencies: + esbuild: 0.24.2 + postcss: 8.5.1 + rollup: 4.34.0 + optionalDependencies: + '@types/node': 20.14.15 + fsevents: 2.3.3 + jiti: 1.21.6 + yaml: 2.5.0 + + vitefu@0.2.5(vite@5.4.1(@types/node@20.14.15)): + optionalDependencies: vite: 5.4.1(@types/node@20.14.15) - vitest@1.6.0(@types/node@20.14.15)(@vitest/ui@1.6.0): - dependencies: - '@types/node': 20.14.15 - '@vitest/expect': 1.6.0 - '@vitest/runner': 1.6.0 - '@vitest/snapshot': 1.6.0 - '@vitest/spy': 1.6.0 - '@vitest/ui': 1.6.0(vitest@1.6.0) - '@vitest/utils': 1.6.0 - acorn-walk: 8.3.3 - chai: 4.5.0 - debug: 4.3.6 - execa: 8.0.1 - local-pkg: 0.5.0 - magic-string: 0.30.11 - pathe: 1.1.2 - picocolors: 1.0.1 - std-env: 3.7.0 - strip-literal: 2.1.0 + vitest@3.0.4(@types/debug@4.1.12)(@types/node@20.14.15)(@vitest/ui@3.0.4)(jiti@1.21.6)(jsdom@22.1.0)(yaml@2.5.0): + dependencies: + '@vitest/expect': 3.0.4 + '@vitest/mocker': 3.0.4(vite@6.0.11(@types/node@20.14.15)(jiti@1.21.6)(yaml@2.5.0)) + '@vitest/pretty-format': 3.0.4 + '@vitest/runner': 3.0.4 + '@vitest/snapshot': 3.0.4 + '@vitest/spy': 3.0.4 + '@vitest/utils': 3.0.4 + chai: 5.1.2 + debug: 4.4.0 + expect-type: 1.1.0 + magic-string: 0.30.17 + pathe: 2.0.2 + std-env: 3.8.0 tinybench: 2.9.0 - tinypool: 0.8.4 - vite: 5.4.1(@types/node@20.14.15) - vite-node: 1.6.0(@types/node@20.14.15) + tinyexec: 0.3.2 + tinypool: 1.0.2 + tinyrainbow: 2.0.0 + vite: 6.0.11(@types/node@20.14.15)(jiti@1.21.6)(yaml@2.5.0) + vite-node: 3.0.4(@types/node@20.14.15)(jiti@1.21.6)(yaml@2.5.0) why-is-node-running: 2.3.0 + optionalDependencies: + '@types/debug': 4.1.12 + '@types/node': 20.14.15 + '@vitest/ui': 3.0.4(vitest@3.0.4) + jsdom: 22.1.0 transitivePeerDependencies: + - jiti - less - lightningcss + - msw - sass - sass-embedded - stylus - sugarss - supports-color - terser + - tsx + - yaml vscode-oniguruma@1.7.0: {} @@ -11337,8 +12113,6 @@ snapshots: yocto-queue@0.1.0: {} - yocto-queue@1.1.1: {} - z-schema@5.0.5: dependencies: lodash.get: 4.4.2