From c0aa96f59ae71779275d3efca635224adc9a97a7 Mon Sep 17 00:00:00 2001 From: Paul Date: Tue, 29 Oct 2024 11:19:38 -0600 Subject: [PATCH 01/19] fix(ui): missing localization label on text area fields (#8927) --- packages/ui/src/fields/Textarea/index.tsx | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/ui/src/fields/Textarea/index.tsx b/packages/ui/src/fields/Textarea/index.tsx index 2ecc52a3073..ba50d688e7c 100644 --- a/packages/ui/src/fields/Textarea/index.tsx +++ b/packages/ui/src/fields/Textarea/index.tsx @@ -91,6 +91,7 @@ const TextareaFieldComponent: TextareaFieldClientComponent = (props) => { descriptionProps={descriptionProps} Error={field?.admin?.components?.Error} errorProps={errorProps} + field={field} label={label} Label={field?.admin?.components?.Label} labelProps={labelProps} From 7ba19e03d60fa482b3f50efa9de63f83f08cb7b1 Mon Sep 17 00:00:00 2001 From: Elliot DeNolf Date: Tue, 29 Oct 2024 13:47:37 -0400 Subject: [PATCH 02/19] ci: add payload-cloud as valid pr scope --- .github/workflows/pr-title.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/pr-title.yml b/.github/workflows/pr-title.yml index 584ea8011f5..463b1c2ec12 100644 --- a/.github/workflows/pr-title.yml +++ b/.github/workflows/pr-title.yml @@ -46,6 +46,7 @@ jobs: live-preview live-preview-react next + payload-cloud plugin-cloud plugin-cloud-storage plugin-form-builder From 9c530e47bb8228cba8dac1b2a51507d3c796c3bb Mon Sep 17 00:00:00 2001 From: Kendell Joseph <1900724+kendelljoseph@users.noreply.github.com> Date: Tue, 29 Oct 2024 14:56:29 -0400 Subject: [PATCH 03/19] chore: changes admin API key field visuals based on read and update permissions (#8923) Issue: https://github.com/payloadcms/payload/issues/8785 --- .../src/views/Edit/Default/Auth/APIKey.tsx | 10 ++------ .../src/views/Edit/Default/Auth/index.tsx | 24 ++++++++++++------- 2 files changed, 18 insertions(+), 16 deletions(-) diff --git a/packages/next/src/views/Edit/Default/Auth/APIKey.tsx b/packages/next/src/views/Edit/Default/Auth/APIKey.tsx index e4b5c1fd509..241e097954b 100644 --- a/packages/next/src/views/Edit/Default/Auth/APIKey.tsx +++ b/packages/next/src/views/Edit/Default/Auth/APIKey.tsx @@ -28,7 +28,7 @@ export const APIKey: React.FC<{ readonly enabled: boolean; readonly readOnly?: b const [highlightedField, setHighlightedField] = useState(false) const { i18n, t } = useTranslation() const { config } = useConfig() - const { collectionSlug, docPermissions } = useDocumentInfo() + const { collectionSlug } = useDocumentInfo() const apiKey = useFormFields(([fields]) => (fields && fields[path]) || null) @@ -77,12 +77,6 @@ export const APIKey: React.FC<{ readonly enabled: boolean; readonly readOnly?: b [apiKeyLabel, apiKeyValue], ) - const canUpdateAPIKey = useMemo(() => { - if (docPermissions && docPermissions?.fields?.apiKey) { - return docPermissions.fields.apiKey.update.permission - } - }, [docPermissions]) - const fieldType = useField({ path: 'apiKey', validate, @@ -142,7 +136,7 @@ export const APIKey: React.FC<{ readonly enabled: boolean; readonly readOnly?: b value={(value as string) || ''} /> - {!readOnly && canUpdateAPIKey && ( + {!readOnly && ( setValue(uuidv4())} /> )} diff --git a/packages/next/src/views/Edit/Default/Auth/index.tsx b/packages/next/src/views/Edit/Default/Auth/index.tsx index d15b37afb05..ed5b3376c19 100644 --- a/packages/next/src/views/Edit/Default/Auth/index.tsx +++ b/packages/next/src/views/Edit/Default/Auth/index.tsx @@ -73,6 +73,12 @@ export const Auth: React.FC = (props) => { return false }, [permissions, collectionSlug]) + const apiKeyReadOnly = readOnly || !docPermissions?.fields?.apiKey?.update?.permission + const enableAPIKeyReadOnly = readOnly || !docPermissions?.fields?.enableAPIKey?.update?.permission + + const canReadApiKey = docPermissions?.fields?.apiKey?.read?.permission + const canReadEnableAPIKey = docPermissions?.fields?.enableAPIKey?.read?.permission + const handleChangePassword = useCallback( (showPasswordFields: boolean) => { if (showPasswordFields) { @@ -200,14 +206,16 @@ export const Auth: React.FC = (props) => { )} {useAPIKey && (
- - + {canReadEnableAPIKey && ( + + )} + {canReadApiKey && }
)} {verify && ( From 6c341b56617298725cfafb795db505ddcaf24891 Mon Sep 17 00:00:00 2001 From: Sasha <64744993+r1tsuu@users.noreply.github.com> Date: Tue, 29 Oct 2024 22:03:31 +0200 Subject: [PATCH 04/19] fix(ui): sanitize limit for preferences (#8913) ### What? Fixes the issue with passing a string `limit` value from user preferences to the mongodb `.aggregate` function. To reproduce: - click the list view for a collection that has a join field - set "show per page" to 100 - reload, see this: image ### Why? When using `.aggregate`, MongoDB doesn't cast a value for the `$limit` stage to a number automatically as it's not handled by Mongoose. It's also more convenient to store this value as a number. ### How? Stores `limit` inside of preferences in number. --- packages/ui/src/providers/ListQuery/index.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/ui/src/providers/ListQuery/index.tsx b/packages/ui/src/providers/ListQuery/index.tsx index 86ab04cce91..b7c6babe9ab 100644 --- a/packages/ui/src/providers/ListQuery/index.tsx +++ b/packages/ui/src/providers/ListQuery/index.tsx @@ -98,7 +98,7 @@ export const ListQueryProvider: React.FC = ({ let updatePreferences = false if ('limit' in query) { - updatedPreferences.limit = query.limit + updatedPreferences.limit = Number(query.limit) updatePreferences = true } From 29704428bd4ac96b22d85f6fd23492ad3e9d9232 Mon Sep 17 00:00:00 2001 From: Kendell Joseph <1900724+kendelljoseph@users.noreply.github.com> Date: Tue, 29 Oct 2024 16:12:45 -0400 Subject: [PATCH 05/19] chore: corrects package import paths for live preview test (#8925) Corrects package import paths for live preview test. - This would cause a import glitch when trying to run the live-preview test due to incorrect file paths. --- .../src/features/blocks/client/plugin/index.tsx | 2 +- .../experimental_table/client/plugins/TablePlugin/index.tsx | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/richtext-lexical/src/features/blocks/client/plugin/index.tsx b/packages/richtext-lexical/src/features/blocks/client/plugin/index.tsx index a3b62216b02..556da31d8f9 100644 --- a/packages/richtext-lexical/src/features/blocks/client/plugin/index.tsx +++ b/packages/richtext-lexical/src/features/blocks/client/plugin/index.tsx @@ -24,7 +24,6 @@ import { COMMAND_PRIORITY_EDITOR, type RangeSelection, } from 'lexical' -import { useLexicalDrawer } from 'packages/richtext-lexical/src/utilities/fieldsDrawer/useLexicalDrawer.js' import React, { useEffect, useState } from 'react' import type { PluginComponent } from '../../../typesClient.js' @@ -33,6 +32,7 @@ import type { BlocksFeatureClientProps } from '../index.js' import { useEditorConfigContext } from '../../../../lexical/config/client/EditorConfigProvider.js' import { FieldsDrawer } from '../../../../utilities/fieldsDrawer/Drawer.js' +import { useLexicalDrawer } from '../../../../utilities/fieldsDrawer/useLexicalDrawer.js' import { $createBlockNode, BlockNode } from '../nodes/BlocksNode.js' import { $createInlineBlockNode, $isInlineBlockNode } from '../nodes/InlineBlocksNode.js' import { diff --git a/packages/richtext-lexical/src/features/experimental_table/client/plugins/TablePlugin/index.tsx b/packages/richtext-lexical/src/features/experimental_table/client/plugins/TablePlugin/index.tsx index d61d2b9dbe4..eaf032117d2 100644 --- a/packages/richtext-lexical/src/features/experimental_table/client/plugins/TablePlugin/index.tsx +++ b/packages/richtext-lexical/src/features/experimental_table/client/plugins/TablePlugin/index.tsx @@ -16,7 +16,6 @@ import { INSERT_TABLE_COMMAND, TableNode } from '@lexical/table' import { mergeRegister } from '@lexical/utils' import { formatDrawerSlug, useEditDepth, useModal } from '@payloadcms/ui' import { $getSelection, $isRangeSelection, COMMAND_PRIORITY_EDITOR, createCommand } from 'lexical' -import { useLexicalDrawer } from 'packages/richtext-lexical/src/utilities/fieldsDrawer/useLexicalDrawer.js' import { createContext, useContext, useEffect, useMemo, useState } from 'react' import * as React from 'react' @@ -24,6 +23,7 @@ import type { PluginComponent } from '../../../../typesClient.js' import { useEditorConfigContext } from '../../../../../lexical/config/client/EditorConfigProvider.js' import { FieldsDrawer } from '../../../../../utilities/fieldsDrawer/Drawer.js' +import { useLexicalDrawer } from '../../../../../utilities/fieldsDrawer/useLexicalDrawer.js' import './index.scss' export type CellContextShape = { From 6cdf14138034827a65a7edf029d2c972c6599bfd Mon Sep 17 00:00:00 2001 From: Dan Ribbens Date: Tue, 29 Oct 2024 16:49:27 -0400 Subject: [PATCH 06/19] feat: prevent create new for joins (#8929) ### What? Adds a way to prevent creating new documents from the admin UI in a join field. ### Why? There are two reasons: 1. You want to disable this any time as a feature of your admin user experience 2. When creating a new document it is not yet possible to create the relationship, preventing create is necessary for the workflow to make sense. ### How? join field has a new admin property called `allowCreate`, can be set to false. By default the UI will never allow create when the current document being edited does not yet have an `id`. Fixes # #8892 ### Before Even though the document doesn't have an ID yet, the create buttons are shown which doesn't actually work. ![image](https://github.com/user-attachments/assets/152abed4-a174-498b-835c-aa4779c46834) ### After Initial document creation: ![Screenshot 2024-10-29 125132](https://github.com/user-attachments/assets/f33b1532-5b72-4c94-967d-bda618dadd34) Prevented using `allowCreate: false` ![Screenshot 2024-10-29 130409](https://github.com/user-attachments/assets/69c3f601-fab3-4f5a-9df5-93fd133682ca) --- docs/fields/join.mdx | 38 ++++++++++++------- packages/payload/src/fields/config/types.ts | 2 + .../src/elements/RelationshipTable/index.tsx | 11 +++--- packages/ui/src/fields/Join/index.tsx | 2 + 4 files changed, 34 insertions(+), 19 deletions(-) diff --git a/docs/fields/join.mdx b/docs/fields/join.mdx index 151567e9b9b..7ad4d7107cc 100644 --- a/docs/fields/join.mdx +++ b/docs/fields/join.mdx @@ -121,23 +121,33 @@ powerful Admin UI. ## Config Options -| Option | Description | -|------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| **`name`** \* | To be used as the property name when retrieved from the database. [More](/docs/fields/overview#field-names) | -| **`collection`** \* | The `slug`s having the relationship field. | -| **`on`** \* | The name of the relationship or upload field that relates to the collection document. Use dot notation for nested paths, like 'myGroup.relationName'. | -| **`maxDepth`** | Default is 1, Sets a maximum population depth for this field, regardless of the remaining depth when this field is reached. [Max Depth](/docs/getting-started/concepts#field-level-max-depth) | -| **`label`** | Text used as a field label in the Admin Panel or an object with keys for each language. | -| **`hooks`** | Provide Field Hooks to control logic for this field. [More details](../hooks/fields). | -| **`access`** | Provide Field Access Control to denote what users can see and do with this field's data. [More details](../access-control/fields). | -| **`defaultLimit`** | The number of documents to return. Set to 0 to return all related documents. | -| **`defaultSort`** | The field name used to specify the order the joined documents are returned. | -| **`admin`** | Admin-specific configuration. | -| **`custom`** | Extension point for adding custom data (e.g. for plugins). | -| **`typescriptSchema`** | Override field type generation with providing a JSON schema. | +| Option | Description | +|------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| **`name`** \* | To be used as the property name when retrieved from the database. [More](/docs/fields/overview#field-names) | +| **`collection`** \* | The `slug`s having the relationship field. | +| **`on`** \* | The name of the relationship or upload field that relates to the collection document. Use dot notation for nested paths, like 'myGroup.relationName'. | +| **`maxDepth`** | Default is 1, Sets a maximum population depth for this field, regardless of the remaining depth when this field is reached. [Max Depth](/docs/getting-started/concepts#field-level-max-depth). | +| **`label`** | Text used as a field label in the Admin Panel or an object with keys for each language. | +| **`hooks`** | Provide Field Hooks to control logic for this field. [More details](../hooks/fields). | +| **`access`** | Provide Field Access Control to denote what users can see and do with this field's data. [More details](../access-control/fields). | +| **`defaultLimit`** | The number of documents to return. Set to 0 to return all related documents. | +| **`defaultSort`** | The field name used to specify the order the joined documents are returned. | +| **`admin`** | Admin-specific configuration. [More details](#admin-config-options). | +| **`custom`** | Extension point for adding custom data (e.g. for plugins). | +| **`typescriptSchema`** | Override field type generation with providing a JSON schema. | _\* An asterisk denotes that a property is required._ + +## Admin Config Options + +You can control the user experience of the join field using the `admin` config properties. The following options are supported: + +| Option | Description | +|------------------------|----------------------------------------------------------------------------------------| +| **`allowCreate`** | Set to `false` to remove the controls for making new related documents from this field. | +| **`components.Label`** | Override the default Label of the Field Component. [More details](#the-label-component). | + ## Join Field Data When a document is returned that for a Join field is populated with related documents. The structure returned is an diff --git a/packages/payload/src/fields/config/types.ts b/packages/payload/src/fields/config/types.ts index 4e7dd6c5758..f2fdee8a1fa 100644 --- a/packages/payload/src/fields/config/types.ts +++ b/packages/payload/src/fields/config/types.ts @@ -1442,6 +1442,7 @@ export type JoinField = { update?: never } admin?: { + allowCreate?: boolean components?: { Error?: CustomComponent Label?: CustomComponent @@ -1477,6 +1478,7 @@ export type JoinField = { export type JoinFieldClient = { admin?: { + allowCreate?: boolean components?: { Label?: MappedComponent } & AdminClient['components'] diff --git a/packages/ui/src/elements/RelationshipTable/index.tsx b/packages/ui/src/elements/RelationshipTable/index.tsx index 249db848e0d..f1031cccc79 100644 --- a/packages/ui/src/elements/RelationshipTable/index.tsx +++ b/packages/ui/src/elements/RelationshipTable/index.tsx @@ -37,6 +37,7 @@ import { RelationshipTableWrapper } from './TableWrapper.js' const baseClass = 'relationship-table' type RelationshipTableComponentProps = { + readonly allowCreate?: boolean readonly field: JoinFieldClient readonly filterOptions?: boolean | Where readonly initialData?: PaginatedDocs @@ -47,6 +48,7 @@ type RelationshipTableComponentProps = { export const RelationshipTable: React.FC = (props) => { const { + allowCreate = true, field, filterOptions, initialData: initialDataFromProps, @@ -202,16 +204,15 @@ export const RelationshipTable: React.FC = (pro const preferenceKey = `${relationTo}-list` - const hasCreatePermission = permissions?.collections?.[relationTo]?.create?.permission + const canCreate = + allowCreate !== false && permissions?.collections?.[relationTo]?.create?.permission return (
{Label}
- {hasCreatePermission && ( - {i18n.t('fields:addNew')} - )} + {canCreate && {i18n.t('fields:addNew')}} = (pro label: getTranslation(collectionConfig?.labels?.plural, i18n), })}

- {hasCreatePermission && ( + {canCreate && ( +       + +
+
+ ) +} diff --git a/test/admin/config.ts b/test/admin/config.ts index 26d1fdd6911..3532c449219 100644 --- a/test/admin/config.ts +++ b/test/admin/config.ts @@ -26,6 +26,7 @@ import { GlobalGroup1A } from './globals/Group1A.js' import { GlobalGroup1B } from './globals/Group1B.js' import { GlobalHidden } from './globals/Hidden.js' import { GlobalNoApiView } from './globals/NoApiView.js' +import { Settings } from './globals/Settings.js' import { seed } from './seed.js' import { customAdminRoutes, @@ -33,7 +34,11 @@ import { customParamViewPath, customRootViewMetaTitle, customViewPath, + protectedCustomNestedViewPath, + publicCustomViewPath, } from './shared.js' +import { settingsGlobalSlug } from './slugs.js' + export default buildConfigWithDefaults({ admin: { importMap: { @@ -80,6 +85,17 @@ export default buildConfigWithDefaults({ path: customViewPath, strict: true, }, + ProtectedCustomNestedView: { + Component: '/components/views/CustomProtectedView/index.js#CustomProtectedView', + exact: true, + path: protectedCustomNestedViewPath, + }, + PublicCustomView: { + Component: '/components/views/CustomView/index.js#CustomView', + exact: true, + path: publicCustomViewPath, + strict: true, + }, CustomViewWithParam: { Component: '/components/views/CustomViewWithParam/index.js#CustomViewWithParam', path: customParamViewPath, @@ -144,6 +160,7 @@ export default buildConfigWithDefaults({ CustomGlobalViews2, GlobalGroup1A, GlobalGroup1B, + Settings, ], i18n: { translations: { diff --git a/test/admin/e2e/1/e2e.spec.ts b/test/admin/e2e/1/e2e.spec.ts index cbb90522a6b..f4c59ae54ff 100644 --- a/test/admin/e2e/1/e2e.spec.ts +++ b/test/admin/e2e/1/e2e.spec.ts @@ -35,6 +35,8 @@ import { customViewMetaTitle, customViewPath, customViewTitle, + protectedCustomNestedViewPath, + publicCustomViewPath, slugPluralLabel, } from '../../shared.js' import { @@ -50,6 +52,7 @@ import { noApiViewCollectionSlug, noApiViewGlobalSlug, postsCollectionSlug, + settingsGlobalSlug, } from '../../slugs.js' const { beforeAll, beforeEach, describe } = test @@ -494,6 +497,30 @@ describe('admin1', () => { await expect(page.locator('h1#custom-view-title')).toContainText(customNestedViewTitle) }) + test('root — should render public custom view', async () => { + await page.goto(`${serverURL}${adminRoutes.routes.admin}${publicCustomViewPath}`) + await page.waitForURL(`**${adminRoutes.routes.admin}${publicCustomViewPath}`) + await expect(page.locator('h1#custom-view-title')).toContainText(customViewTitle) + }) + + test('root — should render protected nested custom view', async () => { + await page.goto(`${serverURL}${adminRoutes.routes.admin}${protectedCustomNestedViewPath}`) + await page.waitForURL(`**${adminRoutes.routes.admin}/unauthorized`) + await expect(page.locator('.unauthorized')).toBeVisible() + + await page.goto(globalURL.global(settingsGlobalSlug)) + + const checkbox = page.locator('#field-canAccessProtected') + + await checkbox.check() + + await saveDocAndAssert(page) + + await page.goto(`${serverURL}${adminRoutes.routes.admin}${protectedCustomNestedViewPath}`) + await page.waitForURL(`**${adminRoutes.routes.admin}${protectedCustomNestedViewPath}`) + await expect(page.locator('h1#custom-view-title')).toContainText(customNestedViewTitle) + }) + test('collection - should render custom tab view', async () => { await page.goto(customViewsURL.create) await page.locator('#field-title').fill('Test') diff --git a/test/admin/globals/Settings.ts b/test/admin/globals/Settings.ts new file mode 100644 index 00000000000..669137091a6 --- /dev/null +++ b/test/admin/globals/Settings.ts @@ -0,0 +1,13 @@ +import type { GlobalConfig } from 'payload' + +import { settingsGlobalSlug } from '../slugs.js' + +export const Settings: GlobalConfig = { + slug: settingsGlobalSlug, + fields: [ + { + type: 'checkbox', + name: 'canAccessProtected', + }, + ], +} diff --git a/test/admin/payload-types.ts b/test/admin/payload-types.ts index 528c56170e2..74b684358c7 100644 --- a/test/admin/payload-types.ts +++ b/test/admin/payload-types.ts @@ -42,6 +42,7 @@ export interface Config { 'custom-global-views-two': CustomGlobalViewsTwo; 'group-globals-one': GroupGlobalsOne; 'group-globals-two': GroupGlobalsTwo; + settings: Setting; }; locale: 'es' | 'en'; user: User & { @@ -341,7 +342,6 @@ export interface PayloadLockedDocument { relationTo: 'disable-duplicate'; value: string | DisableDuplicate; } | null); - editedAt?: string | null; globalSlug?: string | null; user: { relationTo: 'users'; @@ -455,6 +455,16 @@ export interface GroupGlobalsTwo { updatedAt?: string | null; createdAt?: string | null; } +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "settings". + */ +export interface Setting { + id: string; + canAccessProtected?: boolean | null; + updatedAt?: string | null; + createdAt?: string | null; +} /** * This interface was referenced by `Config`'s JSON-Schema * via the `definition` "auth". diff --git a/test/admin/shared.ts b/test/admin/shared.ts index 41e687bc730..7c24349c1d4 100644 --- a/test/admin/shared.ts +++ b/test/admin/shared.ts @@ -6,6 +6,12 @@ export const slugPluralLabel = 'Posts' export const customViewPath = '/custom-view' +export const customNestedViewPath = `${customViewPath}/nested-view` + +export const publicCustomViewPath = '/public-custom-view' + +export const protectedCustomNestedViewPath = `${publicCustomViewPath}/protected-nested-view` + export const customParamViewPathBase = '/custom-param' export const customParamViewPath = `${customParamViewPathBase}/:id` @@ -14,8 +20,6 @@ export const customViewTitle = 'Custom View' export const customParamViewTitle = 'Custom Param View' -export const customNestedViewPath = `${customViewPath}/nested-view` - export const customNestedViewTitle = 'Custom Nested View' export const customEditLabel = 'Custom Edit Label' diff --git a/test/admin/slugs.ts b/test/admin/slugs.ts index 38899b97c55..9be4911a2e6 100644 --- a/test/admin/slugs.ts +++ b/test/admin/slugs.ts @@ -34,6 +34,8 @@ export const globalSlug = 'global' export const group1GlobalSlug = 'group-globals-one' export const group2GlobalSlug = 'group-globals-two' export const hiddenGlobalSlug = 'hidden-global' + +export const settingsGlobalSlug = 'settings' export const noApiViewGlobalSlug = 'global-no-api-view' export const globalSlugs = [ customGlobalViews1GlobalSlug, From d38d7b8932c0c1bc41495c96898e3e9e5d68d5bf Mon Sep 17 00:00:00 2001 From: Paul Date: Wed, 30 Oct 2024 11:29:58 -0600 Subject: [PATCH 13/19] fix(ui): broken buttons in the bulk upload drawer (#8926) Fixes the mobile bottom interface and the arrow buttons in the actions at the top. Before: ![image](https://github.com/user-attachments/assets/26902eb0-5d1a-480d-b6f5-c36a800a6bff) After: ![image](https://github.com/user-attachments/assets/7837684c-37a7-4b2e-a875-47972cf1671f) --- .../elements/BulkUpload/ActionsBar/index.scss | 4 ++ .../BulkUpload/FileSidebar/index.scss | 42 ++++++++++++++++++- .../elements/BulkUpload/FileSidebar/index.tsx | 8 ++++ 3 files changed, 52 insertions(+), 2 deletions(-) diff --git a/packages/ui/src/elements/BulkUpload/ActionsBar/index.scss b/packages/ui/src/elements/BulkUpload/ActionsBar/index.scss index 5f09ceb7c25..a1e95f4b7f6 100644 --- a/packages/ui/src/elements/BulkUpload/ActionsBar/index.scss +++ b/packages/ui/src/elements/BulkUpload/ActionsBar/index.scss @@ -33,6 +33,10 @@ width: calc(var(--base) * 1.2); height: calc(var(--base) * 1.2); + svg { + max-width: 1rem; + } + &:hover { background-color: var(--theme-elevation-200); } diff --git a/packages/ui/src/elements/BulkUpload/FileSidebar/index.scss b/packages/ui/src/elements/BulkUpload/FileSidebar/index.scss index f4147d1825f..6d200bc6a33 100644 --- a/packages/ui/src/elements/BulkUpload/FileSidebar/index.scss +++ b/packages/ui/src/elements/BulkUpload/FileSidebar/index.scss @@ -192,6 +192,10 @@ display: none; margin: 0; padding-block: 0; + + &__text { + display: none; + } } &__header__mobileDocActions { @@ -245,7 +249,17 @@ &__headerTopRow { border-top: 1px solid var(--theme-border-color); - padding-block: calc(var(--base) * 0.8); + padding-block: 0 calc(var(--base) * 0.8) 0; + } + + &__header__text { + display: none; + } + + &__header__actions { + flex-grow: 2; + display: flex; + justify-content: flex-end; } &__header__mobileDocActions { @@ -268,7 +282,31 @@ &__toggler { padding-right: 0; - display: block; + padding-left: 0; + padding-top: calc(var(--base) * 0.8); + padding-bottom: calc(var(--base) * 0.8); + display: flex; + justify-content: flex-end; + flex-grow: 1; + + .btn__label { + width: 100%; + display: flex; + align-items: center; + justify-content: space-between; + } + + &__text { + display: flex; + } + + svg { + max-width: 1.5rem; + } + + .btn__content { + width: 100%; + } } .btn { diff --git a/packages/ui/src/elements/BulkUpload/FileSidebar/index.tsx b/packages/ui/src/elements/BulkUpload/FileSidebar/index.tsx index b5d9f99da00..c01072a4f83 100644 --- a/packages/ui/src/elements/BulkUpload/FileSidebar/index.tsx +++ b/packages/ui/src/elements/BulkUpload/FileSidebar/index.tsx @@ -98,6 +98,14 @@ export function FileSidebar() { className={`${baseClass}__toggler`} onClick={() => setShowFiles((prev) => !prev)} > + + 1 ? 'upload:filesToUpload' : 'upload:fileToUpload')}`} + > + {totalFileCount}{' '} + {t(totalFileCount > 1 ? 'upload:filesToUpload' : 'upload:fileToUpload')} + + From c41ef65a2bc9cf38fbc465110f112baf77e72baa Mon Sep 17 00:00:00 2001 From: Sasha <64744993+r1tsuu@users.noreply.github.com> Date: Wed, 30 Oct 2024 19:41:34 +0200 Subject: [PATCH 14/19] feat: add `defaultPopulate` property to collection config (#8934) ### What? Adds `defaultPopulate` property to collection config that allows to specify which fields to select when the collection is populated from another document. ```ts import type { CollectionConfig } from 'payload' // The TSlug generic can be passed to have type safety for `defaultPopulate`. // If avoided, the `defaultPopulate` type resolves to `SelectType`. export const Pages: CollectionConfig<'pages'> = { slug: 'pages', // I need only slug, NOT the WHOLE CONTENT! defaultPopulate: { slug: true, }, fields: [ { name: 'slug', type: 'text', required: true, }, ], } ``` ### Why? This is essential for example in case of links. You don't need the whole document, which can contain large data but only the `slug`. ### How? Implements `defaultPopulate` when populating relationships, including inside of lexical / slate rich text fields. --- docs/configuration/collections.mdx | 3 +- docs/queries/select.mdx | 31 ++++ .../payload/src/collections/config/types.ts | 5 +- .../payload/src/collections/dataloader.ts | 10 +- .../relationshipPopulationPromise.ts | 1 + .../src/features/relationship/server/index.ts | 1 + .../features/upload/server/feature.server.ts | 1 + .../src/populateGraphQL/populate.ts | 5 +- packages/richtext-slate/src/data/populate.ts | 5 +- .../src/data/recurseNestedFields.ts | 4 + .../src/data/richTextRelationshipPromise.ts | 2 + test/select/collections/Pages/index.ts | 84 +++++++++++ test/select/config.ts | 2 + test/select/int.spec.ts | 136 ++++++++++++++++++ test/select/payload-types.ts | 90 ++++++++++++ 15 files changed, 375 insertions(+), 5 deletions(-) create mode 100644 test/select/collections/Pages/index.ts diff --git a/docs/configuration/collections.mdx b/docs/configuration/collections.mdx index f20ea6df8d5..59f07480604 100644 --- a/docs/configuration/collections.mdx +++ b/docs/configuration/collections.mdx @@ -58,7 +58,7 @@ export const Posts: CollectionConfig = { The following options are available: | Option | Description | -|------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| ---------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | **`admin`** | The configuration options for the Admin Panel. [More details](../admin/collections). | | **`access`** | Provide Access Control functions to define exactly who should be able to do what with Documents in this Collection. [More details](../access-control/collections). | | **`auth`** | Specify options if you would like this Collection to feature authentication. [More details](../authentication/overview). | @@ -77,6 +77,7 @@ The following options are available: | **`typescript`** | An object with property `interface` as the text used in schema generation. Auto-generated from slug if not defined. | | **`upload`** | Specify options if you would like this Collection to support file uploads. For more, consult the [Uploads](../upload/overview) documentation. | | **`versions`** | Set to true to enable default options, or configure with object properties. [More details](../versions/overview#collection-config). | +| **`defaultPopulate`** | Specify which fields to select when this Collection is populated from another document. [More Details](../queries/select#defaultpopulate-collection-config-property). | _\* An asterisk denotes that a property is required._ diff --git a/docs/queries/select.mdx b/docs/queries/select.mdx index 3bd4fe694ae..6ff7f1fb169 100644 --- a/docs/queries/select.mdx +++ b/docs/queries/select.mdx @@ -97,3 +97,34 @@ const getPosts = async () => { Reminder: This is the same for [Globals](../configuration/globals) using the `/api/globals` endpoint. +``` + + +## `defaultPopulate` collection config property + +The `defaultPopulate` property allows you specify which fields to select when populating the collection from another document. +This is especially useful for links where only the `slug` is needed instead of the entire document. + +```ts +import type { CollectionConfig } from 'payload' + +import { lexicalEditor, LinkFeature } from '@payloadcms/richtext-lexical' +import { slateEditor } from '@payloadcms/richtext-slate' + +// The TSlug generic can be passed to have type safety for `defaultPopulate`. +// If avoided, the `defaultPopulate` type resolves to `SelectType`. +export const Pages: CollectionConfig<'pages'> = { + slug: 'pages', + // Specify `select`. + defaultPopulate: { + slug: true, + }, + fields: [ + { + name: 'slug', + type: 'text', + required: true, + }, + ], +} +``` diff --git a/packages/payload/src/collections/config/types.ts b/packages/payload/src/collections/config/types.ts index 4a83e987492..4243b540803 100644 --- a/packages/payload/src/collections/config/types.ts +++ b/packages/payload/src/collections/config/types.ts @@ -1,5 +1,5 @@ import type { GraphQLInputObjectType, GraphQLNonNull, GraphQLObjectType } from 'graphql' -import type { DeepRequired, MarkOptional } from 'ts-essentials' +import type { DeepRequired, IsAny, MarkOptional } from 'ts-essentials' import type { CustomPreviewButton, @@ -382,6 +382,9 @@ export type CollectionConfig = { * @WARNING: If you change this property with existing data, you will need to handle the renaming of the table in your database or by using migrations */ dbName?: DBIdentifierName + defaultPopulate?: IsAny> extends true + ? SelectType + : SelectFromCollectionSlug /** * Default field to sort by in collection list view */ diff --git a/packages/payload/src/collections/dataloader.ts b/packages/payload/src/collections/dataloader.ts index 3fc55acba81..b846b1add0e 100644 --- a/packages/payload/src/collections/dataloader.ts +++ b/packages/payload/src/collections/dataloader.ts @@ -2,7 +2,7 @@ import type { BatchLoadFn } from 'dataloader' import DataLoader from 'dataloader' -import type { PayloadRequest } from '../types/index.js' +import type { PayloadRequest, SelectType } from '../types/index.js' import type { TypeWithID } from './config/types.js' import { isValidID } from '../utilities/isValidID.js' @@ -55,6 +55,7 @@ const batchAndLoadDocs = overrideAccess, showHiddenFields, draft, + select, ] = JSON.parse(key) const batchKeyArray = [ @@ -67,6 +68,7 @@ const batchAndLoadDocs = overrideAccess, showHiddenFields, draft, + select, ] const batchKey = JSON.stringify(batchKeyArray) @@ -103,6 +105,7 @@ const batchAndLoadDocs = overrideAccess, showHiddenFields, draft, + select, ] = JSON.parse(batchKey) req.transactionID = transactionID @@ -118,6 +121,7 @@ const batchAndLoadDocs = overrideAccess: Boolean(overrideAccess), pagination: false, req, + select, showHiddenFields: Boolean(showHiddenFields), where: { id: { @@ -139,6 +143,7 @@ const batchAndLoadDocs = fallbackLocale, locale, overrideAccess, + select, showHiddenFields, transactionID: req.transactionID, }) @@ -167,6 +172,7 @@ type CreateCacheKeyArgs = { fallbackLocale: string locale: string overrideAccess: boolean + select?: SelectType showHiddenFields: boolean transactionID: number | Promise | string } @@ -179,6 +185,7 @@ export const createDataloaderCacheKey = ({ fallbackLocale, locale, overrideAccess, + select, showHiddenFields, transactionID, }: CreateCacheKeyArgs): string => @@ -193,4 +200,5 @@ export const createDataloaderCacheKey = ({ overrideAccess, showHiddenFields, draft, + select, ]) diff --git a/packages/payload/src/fields/hooks/afterRead/relationshipPopulationPromise.ts b/packages/payload/src/fields/hooks/afterRead/relationshipPopulationPromise.ts index 8d2599094da..1c11a2f3d5b 100644 --- a/packages/payload/src/fields/hooks/afterRead/relationshipPopulationPromise.ts +++ b/packages/payload/src/fields/hooks/afterRead/relationshipPopulationPromise.ts @@ -69,6 +69,7 @@ const populate = async ({ fallbackLocale, locale, overrideAccess, + select: relatedCollection.config.defaultPopulate, showHiddenFields, transactionID: req.transactionID, }), diff --git a/packages/richtext-lexical/src/features/relationship/server/index.ts b/packages/richtext-lexical/src/features/relationship/server/index.ts index bd8edc66e85..cde6b0e0edc 100644 --- a/packages/richtext-lexical/src/features/relationship/server/index.ts +++ b/packages/richtext-lexical/src/features/relationship/server/index.ts @@ -90,6 +90,7 @@ export const RelationshipFeature = createServerFeature< key: 'value', overrideAccess, req, + select: collection.config.defaultPopulate, showHiddenFields, }), ) diff --git a/packages/richtext-lexical/src/features/upload/server/feature.server.ts b/packages/richtext-lexical/src/features/upload/server/feature.server.ts index 5fee0e94cbf..d6a00bbad73 100644 --- a/packages/richtext-lexical/src/features/upload/server/feature.server.ts +++ b/packages/richtext-lexical/src/features/upload/server/feature.server.ts @@ -261,6 +261,7 @@ export const UploadFeature = createServerFeature< key: 'value', overrideAccess, req, + select: collection.config.defaultPopulate, showHiddenFields, }), ) diff --git a/packages/richtext-lexical/src/populateGraphQL/populate.ts b/packages/richtext-lexical/src/populateGraphQL/populate.ts index bf208287fc8..555fa5672fb 100644 --- a/packages/richtext-lexical/src/populateGraphQL/populate.ts +++ b/packages/richtext-lexical/src/populateGraphQL/populate.ts @@ -1,4 +1,4 @@ -import type { PayloadRequest } from 'payload' +import type { PayloadRequest, SelectType } from 'payload' import { createDataloaderCacheKey } from 'payload' @@ -10,6 +10,7 @@ type Arguments = { key: number | string overrideAccess: boolean req: PayloadRequest + select?: SelectType showHiddenFields: boolean } @@ -23,6 +24,7 @@ export const populate = async ({ key, overrideAccess, req, + select, showHiddenFields, }: { collectionSlug: string @@ -46,6 +48,7 @@ export const populate = async ({ fallbackLocale: req.fallbackLocale!, locale: req.locale!, overrideAccess, + select, showHiddenFields, transactionID: req.transactionID!, }), diff --git a/packages/richtext-slate/src/data/populate.ts b/packages/richtext-slate/src/data/populate.ts index 593da378dbd..ec73fbdba1f 100644 --- a/packages/richtext-slate/src/data/populate.ts +++ b/packages/richtext-slate/src/data/populate.ts @@ -1,4 +1,4 @@ -import type { Collection, Field, PayloadRequest, RichTextField } from 'payload' +import type { Collection, Field, PayloadRequest, RichTextField, SelectType } from 'payload' import { createDataloaderCacheKey } from 'payload' @@ -13,6 +13,7 @@ type Arguments = { key: number | string overrideAccess?: boolean req: PayloadRequest + select?: SelectType showHiddenFields: boolean } @@ -26,6 +27,7 @@ export const populate = async ({ key, overrideAccess, req, + select, showHiddenFields, }: { collection: Collection @@ -44,6 +46,7 @@ export const populate = async ({ fallbackLocale: req.locale, locale: req.fallbackLocale, overrideAccess: typeof overrideAccess === 'undefined' ? false : overrideAccess, + select, showHiddenFields, transactionID: req.transactionID, }), diff --git a/packages/richtext-slate/src/data/recurseNestedFields.ts b/packages/richtext-slate/src/data/recurseNestedFields.ts index 0de737cf395..349fd507eb4 100644 --- a/packages/richtext-slate/src/data/recurseNestedFields.ts +++ b/packages/richtext-slate/src/data/recurseNestedFields.ts @@ -48,6 +48,7 @@ export const recurseNestedFields = ({ key: i, overrideAccess, req, + select: collection.config.defaultPopulate, showHiddenFields, }), ) @@ -69,6 +70,7 @@ export const recurseNestedFields = ({ key: i, overrideAccess, req, + select: collection.config.defaultPopulate, showHiddenFields, }), ) @@ -94,6 +96,7 @@ export const recurseNestedFields = ({ key: 'value', overrideAccess, req, + select: collection.config.defaultPopulate, showHiddenFields, }), ) @@ -114,6 +117,7 @@ export const recurseNestedFields = ({ key: field.name, overrideAccess, req, + select: collection.config.defaultPopulate, showHiddenFields, }), ) diff --git a/packages/richtext-slate/src/data/richTextRelationshipPromise.ts b/packages/richtext-slate/src/data/richTextRelationshipPromise.ts index bbcecf78661..e1bd066b850 100644 --- a/packages/richtext-slate/src/data/richTextRelationshipPromise.ts +++ b/packages/richtext-slate/src/data/richTextRelationshipPromise.ts @@ -54,6 +54,7 @@ export const recurseRichText = ({ key: 'value', overrideAccess, req, + select: collection.config.defaultPopulate, showHiddenFields, }), ) @@ -93,6 +94,7 @@ export const recurseRichText = ({ key: 'value', overrideAccess, req, + select: collection.config.defaultPopulate, showHiddenFields, }), ) diff --git a/test/select/collections/Pages/index.ts b/test/select/collections/Pages/index.ts new file mode 100644 index 00000000000..0f4248f7081 --- /dev/null +++ b/test/select/collections/Pages/index.ts @@ -0,0 +1,84 @@ +import type { CollectionConfig } from 'payload' + +import { lexicalEditor, LinkFeature } from '@payloadcms/richtext-lexical' +import { slateEditor } from '@payloadcms/richtext-slate' + +// The TSlug generic can be passed to have type safety for `defaultPopulate`. +// If avoided, the `defaultPopulate` type resolves to `SelectType`. +export const Pages: CollectionConfig<'pages'> = { + slug: 'pages', + // I need only slug, NOT the WHOLE CONTENT! + defaultPopulate: { + slug: true, + }, + fields: [ + { + name: 'content', + type: 'blocks', + blocks: [ + { + slug: 'cta', + fields: [ + { + name: 'title', + type: 'text', + required: true, + }, + { + name: 'link', + type: 'group', + fields: [ + { + name: 'docPoly', + type: 'relationship', + relationTo: ['pages'], + }, + { + name: 'doc', + type: 'relationship', + relationTo: 'pages', + }, + { + name: 'docMany', + hasMany: true, + type: 'relationship', + relationTo: 'pages', + }, + { + name: 'docHasManyPoly', + type: 'relationship', + relationTo: ['pages'], + hasMany: true, + }, + { + name: 'label', + type: 'text', + required: true, + }, + ], + }, + { + name: 'richTextLexical', + type: 'richText', + editor: lexicalEditor({ + features({ defaultFeatures }) { + return [...defaultFeatures, LinkFeature({ enabledCollections: ['pages'] })] + }, + }), + }, + { + name: 'richTextSlate', + type: 'richText', + editor: slateEditor({}), + }, + ], + }, + ], + }, + { + name: 'slug', + type: 'text', + required: true, + }, + ], +} diff --git a/test/select/config.ts b/test/select/config.ts index 00e5710183e..8330e462f75 100644 --- a/test/select/config.ts +++ b/test/select/config.ts @@ -6,6 +6,7 @@ import { buildConfigWithDefaults } from '../buildConfigWithDefaults.js' import { devUser } from '../credentials.js' import { DeepPostsCollection } from './collections/DeepPosts/index.js' import { LocalizedPostsCollection } from './collections/LocalizedPosts/index.js' +import { Pages } from './collections/Pages/index.js' import { PostsCollection } from './collections/Posts/index.js' import { VersionedPostsCollection } from './collections/VersionedPosts/index.js' @@ -19,6 +20,7 @@ export default buildConfigWithDefaults({ LocalizedPostsCollection, VersionedPostsCollection, DeepPostsCollection, + Pages, ], globals: [ { diff --git a/test/select/int.spec.ts b/test/select/int.spec.ts index 86b1fe02f7e..d9ef79e50c4 100644 --- a/test/select/int.spec.ts +++ b/test/select/int.spec.ts @@ -8,6 +8,7 @@ import type { DeepPost, GlobalPost, LocalizedPost, + Page, Post, VersionedPost, } from './payload-types.js' @@ -1561,6 +1562,141 @@ describe('Select', () => { }) }) }) + + describe('defaultPopulate', () => { + let homePage: Page + let aboutPage: Page + let expectedHomePage: { id: number | string; slug: string } + beforeAll(async () => { + homePage = await payload.create({ + depth: 0, + collection: 'pages', + data: { content: [], slug: 'home' }, + }) + expectedHomePage = { id: homePage.id, slug: homePage.slug } + aboutPage = await payload.create({ + depth: 0, + collection: 'pages', + data: { + content: [ + { + blockType: 'cta', + richTextSlate: [ + { + type: 'relationship', + relationTo: 'pages', + value: { id: homePage.id }, + }, + ], + richTextLexical: { + root: { + children: [ + { + format: '', + type: 'relationship', + version: 2, + relationTo: 'pages', + value: homePage.id, + }, + ], + direction: 'ltr', + format: '', + indent: 0, + type: 'root', + version: 1, + }, + }, + link: { + doc: homePage.id, + docHasManyPoly: [ + { + relationTo: 'pages', + value: homePage.id, + }, + ], + docMany: [homePage.id], + docPoly: { + relationTo: 'pages', + value: homePage.id, + }, + label: 'Visit our Home Page!', + }, + title: 'Contact Us', + }, + ], + slug: 'about', + }, + }) + }) + + it('local API - should populate with the defaultPopulate select shape', async () => { + const result = await payload.findByID({ collection: 'pages', depth: 1, id: aboutPage.id }) + + const { + content: [ + { + link: { doc, docHasManyPoly, docMany, docPoly }, + richTextSlate: [richTextSlateRel], + richTextLexical: { + root: { + children: [richTextLexicalRel], + }, + }, + }, + ], + } = result + + expect(doc).toStrictEqual(expectedHomePage) + expect(docMany).toStrictEqual([expectedHomePage]) + expect(docPoly).toStrictEqual({ + relationTo: 'pages', + value: expectedHomePage, + }) + expect(docHasManyPoly).toStrictEqual([ + { + relationTo: 'pages', + value: expectedHomePage, + }, + ]) + expect(richTextLexicalRel.value).toStrictEqual(expectedHomePage) + expect(richTextSlateRel.value).toStrictEqual(expectedHomePage) + }) + + it('REST API - should populate with the defaultPopulate select shape', async () => { + const restResult = await ( + await restClient.GET(`/pages/${aboutPage.id}`, { query: { depth: 1 } }) + ).json() + + const { + content: [ + { + link: { doc, docHasManyPoly, docMany, docPoly }, + richTextSlate: [richTextSlateRel], + richTextLexical: { + root: { + children: [richTextLexicalRel], + }, + }, + }, + ], + } = restResult + + expect(doc).toMatchObject(expectedHomePage) + expect(docMany).toMatchObject([expectedHomePage]) + expect(docPoly).toMatchObject({ + relationTo: 'pages', + value: expectedHomePage, + }) + expect(docHasManyPoly).toMatchObject([ + { + relationTo: 'pages', + value: expectedHomePage, + }, + ]) + expect(richTextLexicalRel.value).toMatchObject(expectedHomePage) + expect(richTextSlateRel.value).toMatchObject(expectedHomePage) + }) + }) }) function createPost() { diff --git a/test/select/payload-types.ts b/test/select/payload-types.ts index 664e8d4a2ed..9a3bee9d14d 100644 --- a/test/select/payload-types.ts +++ b/test/select/payload-types.ts @@ -15,6 +15,7 @@ export interface Config { 'localized-posts': LocalizedPost; 'versioned-posts': VersionedPost; 'deep-posts': DeepPost; + pages: Page; users: User; 'payload-locked-documents': PayloadLockedDocument; 'payload-preferences': PayloadPreference; @@ -25,6 +26,7 @@ export interface Config { 'localized-posts': LocalizedPostsSelect | LocalizedPostsSelect; 'versioned-posts': VersionedPostsSelect | VersionedPostsSelect; 'deep-posts': DeepPostsSelect | DeepPostsSelect; + pages: PagesSelect | PagesSelect; users: UsersSelect | UsersSelect; 'payload-locked-documents': PayloadLockedDocumentsSelect | PayloadLockedDocumentsSelect; 'payload-preferences': PayloadPreferencesSelect | PayloadPreferencesSelect; @@ -239,6 +241,59 @@ export interface DeepPost { updatedAt: string; createdAt: string; } +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "pages". + */ +export interface Page { + id: string; + content?: + | { + title: string; + link: { + docPoly?: { + relationTo: 'pages'; + value: string | Page; + } | null; + doc?: (string | null) | Page; + docMany?: (string | Page)[] | null; + docHasManyPoly?: + | { + relationTo: 'pages'; + value: string | Page; + }[] + | null; + label: string; + }; + richTextLexical?: { + root: { + type: string; + children: { + type: string; + version: number; + [k: string]: unknown; + }[]; + direction: ('ltr' | 'rtl') | null; + format: 'left' | 'start' | 'center' | 'right' | 'end' | 'justify' | ''; + indent: number; + version: number; + }; + [k: string]: unknown; + } | null; + richTextSlate?: + | { + [k: string]: unknown; + }[] + | null; + id?: string | null; + blockName?: string | null; + blockType: 'cta'; + }[] + | null; + slug: string; + updatedAt: string; + createdAt: string; +} /** * This interface was referenced by `Config`'s JSON-Schema * via the `definition` "users". @@ -279,6 +334,10 @@ export interface PayloadLockedDocument { relationTo: 'deep-posts'; value: string | DeepPost; } | null) + | ({ + relationTo: 'pages'; + value: string | Page; + } | null) | ({ relationTo: 'users'; value: string | User; @@ -520,6 +579,37 @@ export interface DeepPostsSelect { updatedAt?: T; createdAt?: T; } +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "pages_select". + */ +export interface PagesSelect { + content?: + | T + | { + cta?: + | T + | { + title?: T; + link?: + | T + | { + docPoly?: T; + doc?: T; + docMany?: T; + docHasManyPoly?: T; + label?: T; + }; + richTextLexical?: T; + richTextSlate?: T; + id?: T; + blockName?: T; + }; + }; + slug?: T; + updatedAt?: T; + createdAt?: T; +} /** * This interface was referenced by `Config`'s JSON-Schema * via the `definition` "users_select". From d64946c2e20acb9dee69405217816c759f3f4726 Mon Sep 17 00:00:00 2001 From: Sasha <64744993+r1tsuu@users.noreply.github.com> Date: Wed, 30 Oct 2024 19:42:07 +0200 Subject: [PATCH 15/19] fix(db-mongodb): ensure relationships are stored in `ObjectID` (#8932) ### What? Since the join field, we do store relationship fields values in `ObjectID`. This wasn't true if the field is nested to an array / blocks. ### Why? All relationship fields values should be stored in `ObjectID`. ### How? Fixes arrays / blocks handling in the `traverseFields.ts` function. Before it didn't run for them. --- .../payload/src/utilities/traverseFields.ts | 60 +++++++++++++++++-- test/joins/int.spec.ts | 1 - 2 files changed, 56 insertions(+), 5 deletions(-) diff --git a/packages/payload/src/utilities/traverseFields.ts b/packages/payload/src/utilities/traverseFields.ts index 189aaa504f2..0c0745756fe 100644 --- a/packages/payload/src/utilities/traverseFields.ts +++ b/packages/payload/src/utilities/traverseFields.ts @@ -1,7 +1,33 @@ -import type { Field, TabAsField } from '../fields/config/types.js' +import type { ArrayField, BlocksField, Field, TabAsField } from '../fields/config/types.js' import { fieldHasSubFields } from '../fields/config/types.js' +const traverseArrayOrBlocksField = ({ + callback, + data, + field, + parentRef, +}: { + callback: TraverseFieldsCallback + data: Record[] + field: ArrayField | BlocksField + parentRef?: unknown +}) => { + for (const ref of data) { + let fields: Field[] + if (field.type === 'blocks' && typeof ref?.blockType === 'string') { + const block = field.blocks.find((block) => block.slug === ref.blockType) + fields = block?.fields + } else if (field.type === 'array') { + fields = field.fields + } + + if (fields) { + traverseFields({ callback, fields, parentRef, ref }) + } + } +} + export type TraverseFieldsCallback = (args: { /** * The current field @@ -68,11 +94,11 @@ export const traverseFields = ({ }) return } - if (field.type !== 'tab' && fieldHasSubFields(field)) { + if (field.type !== 'tab' && (fieldHasSubFields(field) || field.type === 'blocks')) { const parentRef = ref if ('name' in field && field.name) { if (typeof ref[field.name] === 'undefined') { - if (field.type === 'array') { + if (field.type === 'array' || field.type === 'blocks') { if (field.localized) { ref[field.name] = {} } else { @@ -85,7 +111,33 @@ export const traverseFields = ({ } ref = ref[field.name] } - traverseFields({ callback, fields: field.fields, parentRef, ref }) + + if (field.type === 'blocks' || field.type === 'array') { + if (field.localized) { + for (const key in (ref ?? {}) as Record) { + const localeData = ref[key] + if (!Array.isArray(localeData)) { + continue + } + + traverseArrayOrBlocksField({ + callback, + data: localeData, + field, + parentRef, + }) + } + } else if (Array.isArray(ref)) { + traverseArrayOrBlocksField({ + callback, + data: ref, + field, + parentRef, + }) + } + } else { + traverseFields({ callback, fields: field.fields, parentRef, ref }) + } } }) } diff --git a/test/joins/int.spec.ts b/test/joins/int.spec.ts index 1f39896a143..a89dac2c493 100644 --- a/test/joins/int.spec.ts +++ b/test/joins/int.spec.ts @@ -643,7 +643,6 @@ describe('Joins Field', () => { } }` - expect(true).toBeTruthy() const response = await restClient .GRAPHQL_POST({ body: JSON.stringify({ query }) }) .then((res) => res.json()) From 03331de2aca23ec981fd90eceb1fef660e89c488 Mon Sep 17 00:00:00 2001 From: Paul Date: Wed, 30 Oct 2024 11:44:09 -0600 Subject: [PATCH 16/19] fix(ui): perf improvements in bulk upload (#8944) --- .../elements/BulkUpload/FileSidebar/index.tsx | 5 +- .../BulkUpload/FormsManager/index.tsx | 38 ++++++++++++++ .../src/elements/Thumbnail/createThumbnail.ts | 52 +++++++++++++++++++ packages/ui/src/elements/Thumbnail/index.tsx | 3 ++ 4 files changed, 95 insertions(+), 3 deletions(-) create mode 100644 packages/ui/src/elements/Thumbnail/createThumbnail.ts diff --git a/packages/ui/src/elements/BulkUpload/FileSidebar/index.tsx b/packages/ui/src/elements/BulkUpload/FileSidebar/index.tsx index c01072a4f83..f698f6564b1 100644 --- a/packages/ui/src/elements/BulkUpload/FileSidebar/index.tsx +++ b/packages/ui/src/elements/BulkUpload/FileSidebar/index.tsx @@ -36,6 +36,7 @@ export function FileSidebar() { isInitializing, removeFile, setActiveIndex, + thumbnailUrls, totalErrorCount, } = useFormsManager() const { initialFiles, maxFiles } = useBulkUpload() @@ -156,9 +157,7 @@ export function FileSidebar() { >

diff --git a/packages/ui/src/elements/BulkUpload/FormsManager/index.tsx b/packages/ui/src/elements/BulkUpload/FormsManager/index.tsx index 362ac23c877..2f6aeae50c8 100644 --- a/packages/ui/src/elements/BulkUpload/FormsManager/index.tsx +++ b/packages/ui/src/elements/BulkUpload/FormsManager/index.tsx @@ -16,6 +16,7 @@ import { useTranslation } from '../../../providers/Translation/index.js' import { getFormState } from '../../../utilities/getFormState.js' import { hasSavePermission as getHasSavePermission } from '../../../utilities/hasSavePermission.js' import { useLoadingOverlay } from '../../LoadingOverlay/index.js' +import { createThumbnail } from '../../Thumbnail/createThumbnail.js' import { useBulkUpload } from '../index.js' import { createFormData } from './createFormData.js' import { formsManagementReducer } from './reducer.js' @@ -41,6 +42,7 @@ type FormsManagerContext = { errorCount: number index: number }) => void + readonly thumbnailUrls: string[] readonly totalErrorCount?: number } @@ -59,6 +61,7 @@ const Context = React.createContext({ saveAllDocs: () => Promise.resolve(), setActiveIndex: () => 0, setFormTotalErrorCount: () => {}, + thumbnailUrls: [], totalErrorCount: 0, }) @@ -90,6 +93,40 @@ export function FormsManagerProvider({ children }: FormsManagerProps) { const [state, dispatch] = React.useReducer(formsManagementReducer, initialState) const { activeIndex, forms, totalErrorCount } = state + const formsRef = React.useRef(forms) + formsRef.current = forms + const formsCount = forms.length + + const thumbnailUrlsRef = React.useRef([]) + const processedFiles = React.useRef(new Set()) // Track already-processed files + const [renderedThumbnails, setRenderedThumbnails] = React.useState([]) + + React.useEffect(() => { + // eslint-disable-next-line @typescript-eslint/no-floating-promises + ;(async () => { + const newThumbnails = [...thumbnailUrlsRef.current] + + for (let i = 0; i < formsCount; i++) { + const file = formsRef.current[i].formState.file.value as File + + // Skip if already processed + if (processedFiles.current.has(file) || !file) { + continue + } + processedFiles.current.add(file) + + // Generate thumbnail and update ref + const thumbnailUrl = await createThumbnail(file) + newThumbnails[i] = thumbnailUrl + thumbnailUrlsRef.current = newThumbnails + + // Trigger re-render in batches + setRenderedThumbnails([...newThumbnails]) + await new Promise((resolve) => setTimeout(resolve, 100)) + } + })() + }, [formsCount, createThumbnail]) + const { toggleLoadingOverlay } = useLoadingOverlay() const { closeModal } = useModal() const { collectionSlug, drawerSlug, initialFiles, onSuccess } = useBulkUpload() @@ -378,6 +415,7 @@ export function FormsManagerProvider({ children }: FormsManagerProps) { saveAllDocs, setActiveIndex, setFormTotalErrorCount, + thumbnailUrls: renderedThumbnails, totalErrorCount, }} > diff --git a/packages/ui/src/elements/Thumbnail/createThumbnail.ts b/packages/ui/src/elements/Thumbnail/createThumbnail.ts new file mode 100644 index 00000000000..b5dfc859716 --- /dev/null +++ b/packages/ui/src/elements/Thumbnail/createThumbnail.ts @@ -0,0 +1,52 @@ +/** + * Create a thumbnail from a File object by drawing it onto an OffscreenCanvas + */ +export const createThumbnail = (file: File): Promise => { + return new Promise((resolve, reject) => { + const img = new Image() + img.src = URL.createObjectURL(file) // Use Object URL directly + + img.onload = () => { + const maxDimension = 280 + let drawHeight: number, drawWidth: number + + // Calculate aspect ratio + const aspectRatio = img.width / img.height + + // Determine dimensions to fit within maxDimension while maintaining aspect ratio + if (aspectRatio > 1) { + // Image is wider than tall + drawWidth = maxDimension + drawHeight = maxDimension / aspectRatio + } else { + // Image is taller than wide, or square + drawWidth = maxDimension * aspectRatio + drawHeight = maxDimension + } + + const canvas = new OffscreenCanvas(drawWidth, drawHeight) // Create an OffscreenCanvas + const ctx = canvas.getContext('2d') + + // Draw the image onto the OffscreenCanvas with calculated dimensions + ctx.drawImage(img, 0, 0, drawWidth, drawHeight) + + // Convert the OffscreenCanvas to a Blob and free up memory + canvas + .convertToBlob({ type: 'image/jpeg', quality: 0.25 }) + .then((blob) => { + URL.revokeObjectURL(img.src) // Release the Object URL + const reader = new FileReader() + reader.onload = () => resolve(reader.result as string) // Resolve as data URL + reader.onerror = reject + reader.readAsDataURL(blob) + }) + .catch(reject) + } + + img.onerror = (error) => { + URL.revokeObjectURL(img.src) // Release Object URL on error + // eslint-disable-next-line @typescript-eslint/prefer-promise-reject-errors + reject(error) + } + }) +} diff --git a/packages/ui/src/elements/Thumbnail/index.tsx b/packages/ui/src/elements/Thumbnail/index.tsx index 7e7cb651be9..2a6ba88089b 100644 --- a/packages/ui/src/elements/Thumbnail/index.tsx +++ b/packages/ui/src/elements/Thumbnail/index.tsx @@ -8,6 +8,7 @@ const baseClass = 'thumbnail' import type { SanitizedCollectionConfig } from 'payload' import { File } from '../../graphics/File/index.js' +import { useIntersect } from '../../hooks/useIntersect.js' import { ShimmerEffect } from '../ShimmerEffect/index.js' export type ThumbnailProps = { @@ -28,6 +29,7 @@ export const Thumbnail: React.FC = (props) => { React.useEffect(() => { if (!fileSrc) { + // eslint-disable-next-line @eslint-react/hooks-extra/no-direct-set-state-in-use-effect setFileExists(false) return } @@ -72,6 +74,7 @@ export function ThumbnailComponent(props: ThumbnailComponentProps) { React.useEffect(() => { if (!fileSrc) { + // eslint-disable-next-line @eslint-react/hooks-extra/no-direct-set-state-in-use-effect setFileExists(false) return } From 0574155e5997ee0b4198433237d6782bd993d138 Mon Sep 17 00:00:00 2001 From: Said Akhrarov <36972061+akhrarovsaid@users.noreply.github.com> Date: Wed, 30 Oct 2024 13:54:44 -0400 Subject: [PATCH 17/19] docs: fix docs-wide spelling errors and formatting issues (#8942) ### What? I noticed a spelling error in the banner of the beta docs and decided I could save everyone some time by *running the entirety of the beta docs* through a spellchecker. ### Why? To fix many spelling and formatting mistakes at once. ### How? By enabling `edit mode` in my browser and letting the built-in spellchecker perform its magic (and changing _only_ where it made sense). ~~Ironically, the original spelling mistake that inspired me to do this remains unchanged as that is a part of the website repo. [PR for that is here](https://github.com/payloadcms/website/pull/388).~~ --- docs/access-control/overview.mdx | 2 +- docs/admin/hooks.mdx | 2 +- docs/authentication/overview.mdx | 2 +- docs/authentication/token-data.mdx | 2 +- docs/configuration/overview.mdx | 2 +- docs/fields/radio.mdx | 2 +- docs/fields/rich-text.mdx | 6 +----- docs/fields/upload.mdx | 2 +- docs/hooks/context.mdx | 2 +- docs/lexical/migration.mdx | 2 +- docs/plugins/form-builder.mdx | 2 +- docs/queries/select.mdx | 1 + docs/queries/sort.mdx | 2 +- docs/rest-api/overview.mdx | 2 +- docs/upload/overview.mdx | 4 ++-- docs/upload/storage-adapters.mdx | 2 +- 16 files changed, 17 insertions(+), 20 deletions(-) diff --git a/docs/access-control/overview.mdx b/docs/access-control/overview.mdx index 546a886be2d..8aeb847a318 100644 --- a/docs/access-control/overview.mdx +++ b/docs/access-control/overview.mdx @@ -18,7 +18,7 @@ There are many use cases for Access Control, including: - Only allowing public access to posts where a `status` field is equal to `published` - Giving only users with a `role` field equal to `admin` the ability to delete posts - Allowing anyone to submit contact forms, but only logged in users to `read`, `update` or `delete` them -- Restricting a user to only be able to see their own orders, but noone else's +- Restricting a user to only be able to see their own orders, but no-one else's - Allowing users that belong to a certain organization to access only that organization's resources There are three main types of Access Control in Payload: diff --git a/docs/admin/hooks.mdx b/docs/admin/hooks.mdx index 6648399d961..2c494261158 100644 --- a/docs/admin/hooks.mdx +++ b/docs/admin/hooks.mdx @@ -760,7 +760,7 @@ const LinkFromCategoryToPosts: React.FC = () => { ## useLocale -In any Custom Component you can get the selected locale object with the `useLocale` hook. `useLocale`gives you the full locale object, consisting of a `label`, `rtl`(right-to-left) property, and then `code`. Here is a simple example: +In any Custom Component you can get the selected locale object with the `useLocale` hook. `useLocale` gives you the full locale object, consisting of a `label`, `rtl`(right-to-left) property, and then `code`. Here is a simple example: ```tsx 'use client' diff --git a/docs/authentication/overview.mdx b/docs/authentication/overview.mdx index f29eb16de52..fcda566aedb 100644 --- a/docs/authentication/overview.mdx +++ b/docs/authentication/overview.mdx @@ -86,7 +86,7 @@ The following options are available: | **`loginWithUsername`** | Ability to allow users to login with username/password. [More](/docs/authentication/overview#login-with-username) | | **`maxLoginAttempts`** | Only allow a user to attempt logging in X amount of times. Automatically locks out a user from authenticating if this limit is passed. Set to `0` to disable. | | **`removeTokenFromResponses`** | Set to true if you want to remove the token from the returned authentication API responses such as login or refresh. | -| **`strategies`** | Advanced - an array of custom authentification strategies to extend this collection's authentication with. [More details](./custom-strategies). | +| **`strategies`** | Advanced - an array of custom authentication strategies to extend this collection's authentication with. [More details](./custom-strategies). | | **`tokenExpiration`** | How long (in seconds) to keep the user logged in. JWTs and HTTP-only cookies will both expire at the same time. | | **`useAPIKey`** | Payload Authentication provides for API keys to be set on each user within an Authentication-enabled Collection. [More details](./api-keys). | | **`verify`** | Set to `true` or pass an object with verification options to require users to verify by email before they are allowed to log into your app. [More details](./email#email-verification). | diff --git a/docs/authentication/token-data.mdx b/docs/authentication/token-data.mdx index d90c075e022..504ce9f2405 100644 --- a/docs/authentication/token-data.mdx +++ b/docs/authentication/token-data.mdx @@ -6,7 +6,7 @@ desc: Storing data for read on the request object. keywords: authentication, config, configuration, documentation, Content Management System, cms, headless, javascript, node, react, nextjs --- -During the lifecycle of a request you will be able to access the data you have configured to be stored in the JWT by accessing `req.user`. The user object is automatically appeneded to the request for you. +During the lifecycle of a request you will be able to access the data you have configured to be stored in the JWT by accessing `req.user`. The user object is automatically appended to the request for you. ### Definining Token Data diff --git a/docs/configuration/overview.mdx b/docs/configuration/overview.mdx index 1dff3e7116a..a5194d88b63 100644 --- a/docs/configuration/overview.mdx +++ b/docs/configuration/overview.mdx @@ -163,7 +163,7 @@ In development mode, if the configuration file is not found at the root, Payload **Production Mode** -In production mode, Payload will first attempt to find the config file in the `outDir` of your `tsconfig.json`, and if not found, will fallback to the `rootDor` directory: +In production mode, Payload will first attempt to find the config file in the `outDir` of your `tsconfig.json`, and if not found, will fallback to the `rootDir` directory: ```json { diff --git a/docs/fields/radio.mdx b/docs/fields/radio.mdx index ddfc1e90cac..5080e54f485 100644 --- a/docs/fields/radio.mdx +++ b/docs/fields/radio.mdx @@ -83,7 +83,7 @@ The Radio Field inherits all of the default options from the base [Field Admin C | Property | Description | | ------------------------------ | -------------------------------------------------------------------------------------------------------------------------------------- | -| **`layout`** | Allows for the radio group to be styled as a horizonally or vertically distributed list. The default value is `horizontal`. | +| **`layout`** | Allows for the radio group to be styled as a horizontally or vertically distributed list. The default value is `horizontal`. | ## Example diff --git a/docs/fields/rich-text.mdx b/docs/fields/rich-text.mdx index 1c36ccadde3..d4982b48239 100644 --- a/docs/fields/rich-text.mdx +++ b/docs/fields/rich-text.mdx @@ -25,11 +25,7 @@ Right now, Payload is officially supporting two rich text editors: Consistent with Payload's goal of making you learn as little of Payload as possible, customizing - and using the Rich Text Editor does not involve learning how to develop for a - {' '} - Payload - {' '} - rich text editor. + and using the Rich Text Editor does not involve learning how to develop for a{' '}Payload{' '}rich text editor. Instead, you can invest your time and effort into learning the underlying open-source tools that diff --git a/docs/fields/upload.mdx b/docs/fields/upload.mdx index d7db7ca27da..055509723c5 100644 --- a/docs/fields/upload.mdx +++ b/docs/fields/upload.mdx @@ -46,7 +46,7 @@ export const MyUploadField: Field = { | Option | Description | |------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | **`name`** \* | To be used as the property name when stored and retrieved from the database. [More](/docs/fields/overview#field-names) | -| **`*relationTo`** \* | Provide a single collection `slug` to allow this field to accept a relation to. Note: the related collection must be configured to support Uploads. | +| **`relationTo`** \* | Provide a single collection `slug` to allow this field to accept a relation to. Note: the related collection must be configured to support Uploads. | | **`filterOptions`** | A query to filter which options appear in the UI and validate against. [More](#filtering-upload-options). | | **`maxDepth`** | Sets a number limit on iterations of related documents to populate when queried. [Depth](../queries/depth) | | **`label`** | Text used as a field label in the Admin Panel or an object with keys for each language. | diff --git a/docs/hooks/context.mdx b/docs/hooks/context.mdx index 1b0006d8f0c..7004c92b178 100644 --- a/docs/hooks/context.mdx +++ b/docs/hooks/context.mdx @@ -139,4 +139,4 @@ declare module 'payload' { } ``` -This will add a the property `myObject` with a type of string to every context object. Make sure to follow this example correctly, as type augmentation can mess up your types if you do it wrong. +This will add the property `myObject` with a type of string to every context object. Make sure to follow this example correctly, as type augmentation can mess up your types if you do it wrong. diff --git a/docs/lexical/migration.mdx b/docs/lexical/migration.mdx index 8e0b5841c86..dbb8dbe966e 100644 --- a/docs/lexical/migration.mdx +++ b/docs/lexical/migration.mdx @@ -52,7 +52,7 @@ const Pages: CollectionConfig = { } ``` -and done! Now, everytime this lexical editor is initialized, it converts the slate date to lexical on-the-fly. If the data is already in lexical format, it will just pass it through. +and done! Now, every time this lexical editor is initialized, it converts the slate date to lexical on-the-fly. If the data is already in lexical format, it will just pass it through. This is by far the easiest way to migrate from Slate to Lexical, although it does come with a few caveats: diff --git a/docs/plugins/form-builder.mdx b/docs/plugins/form-builder.mdx index 381b5a1f411..6031ad61740 100644 --- a/docs/plugins/form-builder.mdx +++ b/docs/plugins/form-builder.mdx @@ -416,7 +416,7 @@ This plugin relies on the [email configuration](../email/overview) defined in yo ### Email formatting -The email contents supports rich text which will be serialised to HTML on the server before being sent. By default it reads the global configuration of your rich text editor. +The email contents supports rich text which will be serialized to HTML on the server before being sent. By default it reads the global configuration of your rich text editor. The email subject and body supports inserting dynamic fields from the form submission data using the `{{field_name}}` syntax. For example, if you have a field called `name` in your form, you can include this in the email body like so: diff --git a/docs/queries/select.mdx b/docs/queries/select.mdx index 6ff7f1fb169..9477c482dc6 100644 --- a/docs/queries/select.mdx +++ b/docs/queries/select.mdx @@ -92,6 +92,7 @@ const getPosts = async () => { const response = await fetch(`http://localhost:3000/api/posts${stringifiedQuery}`) // Continue to handle the response below... } +``` Reminder: diff --git a/docs/queries/sort.mdx b/docs/queries/sort.mdx index 60b46c3cd2b..293a4d23d39 100644 --- a/docs/queries/sort.mdx +++ b/docs/queries/sort.mdx @@ -6,7 +6,7 @@ desc: Payload sort allows you to order your documents by a field in ascending or keywords: query, documents, pagination, documentation, Content Management System, cms, headless, javascript, node, react, nextjs --- -Documents in Payload can be easily sorted by a specific [Field](../fields/overview). When querying Documents, you can pass the name of any top-level field, and the response will sort the Documents by that field in _ascending_ order. If prefixed with a minus symbol ("-"), they will be sorted in _descending_ order. In Local API multiple fields can be specificed by using an array of strings. In REST API multiple fields can be specified by separating fields with comma. The minus symbol can be in front of individual fields. +Documents in Payload can be easily sorted by a specific [Field](../fields/overview). When querying Documents, you can pass the name of any top-level field, and the response will sort the Documents by that field in _ascending_ order. If prefixed with a minus symbol ("-"), they will be sorted in _descending_ order. In Local API multiple fields can be specified by using an array of strings. In REST API multiple fields can be specified by separating fields with comma. The minus symbol can be in front of individual fields. Because sorting is handled by the database, the field cannot be a [Virtual Field](https://payloadcms.com/blog/learn-how-virtual-fields-can-help-solve-common-cms-challenges). It must be stored in the database to be searchable. diff --git a/docs/rest-api/overview.mdx b/docs/rest-api/overview.mdx index 2f3003f0e63..69f68a7fd7d 100644 --- a/docs/rest-api/overview.mdx +++ b/docs/rest-api/overview.mdx @@ -18,7 +18,7 @@ All Payload API routes are mounted and prefixed to your config's `routes.api` UR - [depth](../queries/depth) - automatically populates relationships and uploads - [locale](/docs/configuration/localization#retrieving-localized-docs) - retrieves document(s) in a specific locale - [fallback-locale](/docs/configuration/localization#retrieving-localized-docs) - specifies a fallback locale if no locale value exists -- [select](../queries/select) - speicifes which fields to include to the result +- [select](../queries/select) - specifies which fields to include to the result ## Collections diff --git a/docs/upload/overview.mdx b/docs/upload/overview.mdx index f4110266521..026203889ae 100644 --- a/docs/upload/overview.mdx +++ b/docs/upload/overview.mdx @@ -98,7 +98,7 @@ _An asterisk denotes that an option is required._ | **`displayPreview`** | Enable displaying preview of the uploaded file in Upload fields related to this Collection. Can be locally overridden by `displayPreview` option in Upload field. [More](/docs/fields/upload#config-options). | | **`externalFileHeaderFilter`** | Accepts existing headers and returns the headers after filtering or modifying. | | **`filesRequiredOnCreate`** | Mandate file data on creation, default is true. | -| **`filenameCompoundIndex`** | Field slugs to use for a compount index instead of the default filename index. +| **`filenameCompoundIndex`** | Field slugs to use for a compound index instead of the default filename index. | **`focalPoint`** | Set to `false` to disable the focal point selection tool in the [Admin Panel](../admin/overview). The focal point selector is only available when `imageSizes` or `resizeOptions` are defined. [More](#crop-and-focal-point-selector) | | **`formatOptions`** | An object with `format` and `options` that are used with the Sharp image library to format the upload file. [More](https://sharp.pixelplumbing.com/api-output#toformat) | | **`handlers`** | Array of Request handlers to execute when fetching a file, if a handler returns a Response it will be sent to the client. Otherwise Payload will retrieve and send back the file. | @@ -144,7 +144,7 @@ export default buildConfig({ If you specify an array of `imageSizes` to your `upload` config, Payload will automatically crop and resize your uploads to fit each of the sizes specified by your config. -The [Admin Panel](../admin/overview) will also automatically display all available files, including width, height, and filesize, for each of your uploaded files. +The [Admin Panel](../admin/overview) will also automatically display all available files, including width, height, and file size, for each of your uploaded files. Behind the scenes, Payload relies on [`sharp`](https://sharp.pixelplumbing.com/api-resize#resize) to perform its image resizing. You can specify additional options for `sharp` to use while resizing your images. diff --git a/docs/upload/storage-adapters.mdx b/docs/upload/storage-adapters.mdx index d02dfdd5b76..644922a22bc 100644 --- a/docs/upload/storage-adapters.mdx +++ b/docs/upload/storage-adapters.mdx @@ -310,7 +310,7 @@ This plugin is configurable to work across many different Payload collections. A | Option | Type | Description | | ---------------- | ----------------------------------- | --------------------------------------------------------------------------------------------------------------------------------- | | `collections` \* | `Record` | Object with keys set to the slug of collections you want to enable the plugin for, and values set to collection-specific options. | -| `enabled` | | `boolean` to conditionally enable/disable plugin. Default: true. | +| `enabled` | `boolean` | To conditionally enable/disable plugin. Default: `true`. | ## Collection-specific options From 8970c6b3a6c75d2fd1968a19feee8697777c5bda Mon Sep 17 00:00:00 2001 From: James Mikrut Date: Wed, 30 Oct 2024 13:56:50 -0400 Subject: [PATCH 18/19] feat: adds jobs queue (#8228) Adds a jobs queue to Payload. - [x] Docs, w/ examples for Vercel Cron, additional services - [x] Type the `job` using GeneratedTypes in `JobRunnerArgs` (@AlessioGr) - [x] Write the `runJobs` function - [x] Allow for some type of `payload.runTask` - [x] Open up a new bin script for running jobs - [x] Determine strategy for runner endpoint to either await jobs successfully or return early and stay open until job work completes (serverless ramifications here) - [x] Allow for job runner to accept how many jobs to run in one invocation - [x] Make a Payload local API method for creating a new job easily (payload.createJob) or similar which is strongly typed (@AlessioGr) - [x] Make `payload.runJobs` or similar (@AlessioGr) - [x] Write tests for retrying up to max retries for a given step - [x] Write tests for dynamic import of a runner The shape of the config should permit the definition of steps separate from the job workflows themselves. ```js const config = { // Not sure if we need this property anymore queues: { }, // A job is an instance of a workflow, stored in DB // and triggered by something at some point jobs: { // Be able to override the jobs collection collectionOverrides: () => {}, // Workflows are groups of tasks that handle // the flow from task to task. // When defined on the config, they are considered as predefined workflows // BUT - in the future, we'll allow for UI-based workflow definition as well. workflows: [ { slug: 'job-name', // Temporary name for this // should be able to pass function // or path to it for Node to dynamically import controlFlowInJS: '/my-runner.js', // Temporary name as well // should be able to eventually define workflows // in UI (meaning they need to be serialized in JSON) // Should not be able to define both control flows controlFlowInJSON: [ { task: 'myTask', next: { // etc } } ], // Workflows take input // which are a group of fields input: [ { name: 'post', type: 'relationship', relationTo: 'posts', maxDepth: 0, required: true, }, { name: 'message', type: 'text', required: true, }, ], }, ], // Tasks are defined separately as isolated functions // that can be retried on fail tasks: [ { slug: 'myTask', retries: 2, // Each task takes input // Used to auto-type the task func args input: [ { name: 'post', type: 'relationship', relationTo: 'posts', maxDepth: 0, required: true, }, { name: 'message', type: 'text', required: true, }, ], // Each task takes output // Used to auto-type the function signature output: [ { name: 'success', type: 'checkbox', } ], onSuccess: () => {}, onFail: () => {}, run: myRunner, }, ] } } ``` ### `payload.createJob` This function should allow for the creation of jobs based on either a workflow (group of tasks) or an individual task. To create a job using a workflow: ```js const job = await payload.createJob({ // Accept the `name` of a workflow so we can match to either a // code-based workflow OR a workflow defined in the DB // Should auto-type the input workflowName: 'myWorkflow', input: { // typed to the args of the workflow by name } }) ``` To create a job using a task: ```js const job = await payload.createJob({ // Accept the `name` of a task task: 'myTask', input: { // typed to the args of the task by name } }) ``` --------- Co-authored-by: Alessio Gravili Co-authored-by: Dan Ribbens --- docs/database/transactions.mdx | 2 +- docs/jobs-queue/overview.mdx | 382 ++++ packages/payload/package.json | 1 + packages/payload/src/bin/index.ts | 26 + packages/payload/src/config/client.ts | 2 + packages/payload/src/config/defaults.ts | 10 + packages/payload/src/config/sanitize.ts | 24 + packages/payload/src/config/types.ts | 7 +- packages/payload/src/index.ts | 43 +- .../queues/config/generateJobsJSONSchemas.ts | 173 ++ .../src/queues/config/jobsCollection.ts | 206 ++ .../payload/src/queues/config/types/index.ts | 45 + .../src/queues/config/types/taskTypes.ts | 171 ++ .../queues/config/types/workflowJSONTypes.ts | 36 + .../src/queues/config/types/workflowTypes.ts | 127 ++ packages/payload/src/queues/localAPI.ts | 66 + .../src/queues/operations/runJobs/index.ts | 228 ++ .../operations/runJobs/runJSONJob/index.ts | 159 ++ .../runJob/calculateBackoffWaitUntil.ts | 36 + .../runJobs/runJob/getRunTaskFunction.ts | 309 +++ .../runJobs/runJob/getUpdateJobFunction.ts | 23 + .../runJobs/runJob/handleWorkflowError.ts | 66 + .../runJobs/runJob/importHandlerPath.ts | 28 + .../queues/operations/runJobs/runJob/index.ts | 83 + .../payload/src/queues/restEndpointRun.ts | 81 + .../src/queues/utilities/getJobTaskStatus.ts | 38 + .../src/utilities/configToJSONSchema.ts | 44 +- .../toolbars/fixed/client/Toolbar/index.tsx | 2 +- packages/storage-vercel-blob/src/index.ts | 2 +- .../storage-vercel-blob/src/staticHandler.ts | 6 +- pnpm-lock.yaml | 9 + .../src/app/(payload)/admin/importMap.js | 43 +- test/_community/payload-types.ts | 4 + test/fields/payload-types.ts | 6 + test/helpers/e2e/reorderColumns.ts | 4 +- test/import-test/import-all-2-exports.ts | 178 +- test/queues/.gitignore | 2 + test/queues/config.ts | 580 +++++ test/queues/e2e.spec.ts | 65 + test/queues/eslint.config.js | 20 + test/queues/int.spec.ts | 668 ++++++ test/queues/payload-types.ts | 446 ++++ test/queues/runners/externalTask.ts | 16 + test/queues/runners/externalWorkflow.ts | 12 + test/queues/runners/updatePost.ts | 55 + test/queues/schema.graphql | 1902 +++++++++++++++++ test/queues/seed.ts | 30 + test/queues/tsconfig.eslint.json | 13 + test/queues/tsconfig.json | 3 + 49 files changed, 6357 insertions(+), 125 deletions(-) create mode 100644 docs/jobs-queue/overview.mdx create mode 100644 packages/payload/src/queues/config/generateJobsJSONSchemas.ts create mode 100644 packages/payload/src/queues/config/jobsCollection.ts create mode 100644 packages/payload/src/queues/config/types/index.ts create mode 100644 packages/payload/src/queues/config/types/taskTypes.ts create mode 100644 packages/payload/src/queues/config/types/workflowJSONTypes.ts create mode 100644 packages/payload/src/queues/config/types/workflowTypes.ts create mode 100644 packages/payload/src/queues/localAPI.ts create mode 100644 packages/payload/src/queues/operations/runJobs/index.ts create mode 100644 packages/payload/src/queues/operations/runJobs/runJSONJob/index.ts create mode 100644 packages/payload/src/queues/operations/runJobs/runJob/calculateBackoffWaitUntil.ts create mode 100644 packages/payload/src/queues/operations/runJobs/runJob/getRunTaskFunction.ts create mode 100644 packages/payload/src/queues/operations/runJobs/runJob/getUpdateJobFunction.ts create mode 100644 packages/payload/src/queues/operations/runJobs/runJob/handleWorkflowError.ts create mode 100644 packages/payload/src/queues/operations/runJobs/runJob/importHandlerPath.ts create mode 100644 packages/payload/src/queues/operations/runJobs/runJob/index.ts create mode 100644 packages/payload/src/queues/restEndpointRun.ts create mode 100644 packages/payload/src/queues/utilities/getJobTaskStatus.ts create mode 100644 test/queues/.gitignore create mode 100644 test/queues/config.ts create mode 100644 test/queues/e2e.spec.ts create mode 100644 test/queues/eslint.config.js create mode 100644 test/queues/int.spec.ts create mode 100644 test/queues/payload-types.ts create mode 100644 test/queues/runners/externalTask.ts create mode 100644 test/queues/runners/externalWorkflow.ts create mode 100644 test/queues/runners/updatePost.ts create mode 100644 test/queues/schema.graphql create mode 100644 test/queues/seed.ts create mode 100644 test/queues/tsconfig.eslint.json create mode 100644 test/queues/tsconfig.json diff --git a/docs/database/transactions.mdx b/docs/database/transactions.mdx index 70bb0268387..55c24cfb031 100644 --- a/docs/database/transactions.mdx +++ b/docs/database/transactions.mdx @@ -126,6 +126,6 @@ await payload.update({ where: { slug: { equals: 'my-slug' } }, - req: { disableTransaction: true }, + disableTransaction: true, }) ``` diff --git a/docs/jobs-queue/overview.mdx b/docs/jobs-queue/overview.mdx new file mode 100644 index 00000000000..4e386be70f4 --- /dev/null +++ b/docs/jobs-queue/overview.mdx @@ -0,0 +1,382 @@ +--- +title: Jobs Queue +label: Jobs Queue +order: 10 +desc: Payload provides all you need to run job queues, which are helpful to offload long-running processes into separate workers. +keywords: jobs queue, application framework, typescript, node, react, nextjs +--- + +## Defining tasks + +A task is a simple function that can be executed directly or within a workflow. The difference between tasks and functions is that tasks can be run in the background, and can be retried if they fail. + +Tasks can either be defined within the `jobs.tasks` array in your payload config, or they can be run inline within a workflow. + +### Defining tasks in the config + +Simply add a task to the `jobs.tasks` array in your Payload config. A task consists of the following fields: + +| Option | Description | +| --------------------------- | -------------------------------------------------------------------------------- | +| `slug` | Define a slug-based name for this job. This slug needs to be unique among both tasks and workflows.| +| `handler` | The function that should be responsible for running the job. You can either pass a string-based path to the job function file, or the job function itself. If you are using large dependencies within your job, you might prefer to pass the string path because that will avoid bundling large dependencies in your Next.js app. | +| `inputSchema` | Define the input field schema - payload will generate a type for this schema. | +| `interfaceName` | You can use interfaceName to change the name of the interface that is generated for this task. By default, this is "Task" + the capitalized task slug. | +| `outputSchema` | Define the output field schema - payload will generate a type for this schema. | +| `label` | Define a human-friendly label for this task. | +| `onFail` | Function to be executed if the task fails. | +| `onSuccess` | Function to be executed if the task fails. | +| `retries` | Specify the number of times that this step should be retried if it fails. | + +The handler is the function, or a path to the function, that will run once the job picks up this task. The handler function should return an object with an `output` key, which should contain the output of the task. + +Example: + +```ts +export default buildConfig({ + // ... + jobs: { + tasks: [ + { + retries: 2, + slug: 'createPost', + inputSchema: [ + { + name: 'title', + type: 'text', + required: true, + }, + ], + outputSchema: [ + { + name: 'postID', + type: 'text', + required: true, + }, + ], + handler: async ({ input, job, req }) => { + const newPost = await req.payload.create({ + collection: 'post', + req, + data: { + title: input.title, + }, + }) + return { + output: { + postID: newPost.id, + }, + } + }, + } as TaskConfig<'createPost'>, + ] + } +}) +``` + +### Example: defining external tasks + +payload.config.ts: + +```ts +import { fileURLToPath } from 'node:url' +import path from 'path' + +const filename = fileURLToPath(import.meta.url) +const dirname = path.dirname(filename) + +export default buildConfig({ + // ... + jobs: { + tasks: [ + { + retries: 2, + slug: 'createPost', + inputSchema: [ + { + name: 'title', + type: 'text', + required: true, + }, + ], + outputSchema: [ + { + name: 'postID', + type: 'text', + required: true, + }, + ], + handler: path.resolve(dirname, 'src/tasks/createPost.ts') + '#createPostHandler', + } + ] + } +}) +``` + +src/tasks/createPost.ts: + +```ts +import type { TaskHandler } from 'payload' + +export const createPostHandler: TaskHandler<'createPost'> = async ({ input, job, req }) => { + const newPost = await req.payload.create({ + collection: 'post', + req, + data: { + title: input.title, + }, + }) + return { + output: { + postID: newPost.id, + }, + } +} +``` + + +## Defining workflows + +There are two types of workflows - JS-based workflows and JSON-based workflows. + +### Defining JS-based workflows + +A JS-based function is a function in which you decide yourself when the tasks should run, by simply calling the `runTask` function. If the job, or any task within the job, fails, the entire function will re-run. + +Tasks that have successfully been completed will simply re-return the cached output without running again, and failed tasks will be re-run. + +Simply add a workflow to the `jobs.wokflows` array in your Payload config. A wokflow consists of the following fields: + +| Option | Description | +| --------------------------- | -------------------------------------------------------------------------------- | +| `slug` | Define a slug-based name for this workflow. This slug needs to be unique among both tasks and workflows.| +| `handler` | The function that should be responsible for running the workflow. You can either pass a string-based path to the workflow function file, or workflow job function itself. If you are using large dependencies within your workflow, you might prefer to pass the string path because that will avoid bundling large dependencies in your Next.js app. | +| `inputSchema` | Define the input field schema - payload will generate a type for this schema. | +| `interfaceName` | You can use interfaceName to change the name of the interface that is generated for this workflow. By default, this is "Workflow" + the capitalized workflow slug. | +| `label` | Define a human-friendly label for this workflow. | +| `queue` | Optionally, define the queue name that this workflow should be tied to. Defaults to "default". | + +Example: + +```ts +export default buildConfig({ + // ... + jobs: { + tasks: [ + // ... + ] + workflows: [ + { + slug: 'createPostAndUpdate', + inputSchema: [ + { + name: 'title', + type: 'text', + required: true, + }, + ], + handler: async ({ job, runTask }) => { + const output = await runTask({ + task: 'createPost', + id: '1', + input: { + title: job.input.title, + }, + }) + + await runTask({ + task: 'updatePost', + id: '2', + input: { + post: job.taskStatus.createPost['1'].output.postID, // or output.postID + title: job.input.title + '2', + }, + }) + }, + } as WorkflowConfig<'updatePost'> + ] + } +}) +``` + +#### Running tasks inline + +In order to run tasks inline without predefining them, you can use the `runTaskInline` function. + +The drawbacks of this approach are that tasks cannot be re-used as easily, and the **task data stored in the job** will not be typed. In the following example, the inline task data will be stored on the job under `job.taskStatus.inline['2']` but completely untyped, as types for dynamic tasks like these cannot be generated beforehand. + +Example: + +```ts +export default buildConfig({ + // ... + jobs: { + tasks: [ + // ... + ] + workflows: [ + { + slug: 'createPostAndUpdate', + inputSchema: [ + { + name: 'title', + type: 'text', + required: true, + }, + ], + handler: async ({ job, runTask }) => { + const output = await runTask({ + task: 'createPost', + id: '1', + input: { + title: job.input.title, + }, + }) + + const { newPost } = await runTaskInline({ + task: async ({ req }) => { + const newPost = await req.payload.update({ + collection: 'post', + id: output.postID, + req, + retries: 3, + data: { + title: 'updated!', + }, + }) + return { + output: { + newPost + }, + } + }, + id: '2', + }) + }, + } as WorkflowConfig<'updatePost'> + ] + } +}) +``` + +### Defining JSON-based workflows + +JSON-based workflows are a way to define the tasks the workflow should run in an array. The relationships between the tasks, their run order and their conditions are defined in the JSON object, which allows payload to statically analyze the workflow and will generate more helpful graphs. + +This functionality is not available yet, but it will be available in the future. + +## Queueing workflows and tasks + +In order to queue a workflow or a task (= create them and add them to the queue), you can use the `payload.jobs.queue` function. + +Example: queueing workflows: + +```ts +const createdJob = await payload.jobs.queue({ + workflows: 'createPostAndUpdate', + input: { + title: 'my title', + }, +}) +``` + +Example: queueing tasks: + +```ts +const createdJob = await payload.jobs.queue({ + task: 'createPost', + input: { + title: 'my title', + }, +}) +``` + +## Running workflows and tasks + +Workflows and tasks added to the queue will not run unless a worker picks it up and runs it. This can be done in two ways: + +### Endpoint + +Make a fetch request to the `api/payload-jobs/run` endpoint: + +```ts +await fetch('/api/payload-jobs/run', { + method: 'GET', + headers: { + 'Authorization': `JWT ${token}`, + }, +}); +``` + +### Local API + +Run the payload.jobs.run function: + +```ts +const results = await payload.jobs.run() + +// You can customize the queue name by passing it as an argument +await payload.jobs.run({ queue: 'posts' }) +``` + +### Script + +You can run the jobs:run script from the command line: + +```sh +npx payload jobs:run --queue default --limit 10 +``` + +#### Triggering jobs as cronjob + +You can pass the --cron flag to the jobs:run script to run the jobs in a cronjob: + +```sh +npx payload jobs:run --cron "*/5 * * * *" +``` + +### Vercel Cron + +Vercel Cron allows scheduled tasks to be executed automatically by triggering specific endpoints. Below is a step-by-step guide to configuring Vercel Cron for running queued jobs on apps hosted on Vercel: + +1. Add Vercel Cron Configuration: Place a vercel.json file at the root of your project with the following content: + +```json +{ + "crons": [ + { + "path": "/api/payload-jobs/run", + "schedule": "*/5 * * * *" + } + ] +} +``` + +This configuration schedules the endpoint `/api/payload-jobs/run` to be triggered every 5 minutes. This endpoint is added automatically by payload and is responsible for running the queued jobs. + +2. Environment Variable Setup: By default, the endpoint may require a JWT token for authorization. However, Vercel Cron jobs cannot pass JWT tokens. Instead, you can use an environment variable to secure the endpoint: + +Add a new environment variable named `CRON_SECRET` to your Vercel project settings. This should be a random string, ideally 16 characters or longer. + +3. Modify Authentication for Job Running: Adjust the job running authorization logic in your project to accept the `CRON_SECRET` as a valid token. Modify your `payload.config.ts` file as follows: + +```ts +export default buildConfig({ + // Other configurations... + jobs: { + access: { + run: ({ req }: { req: PayloadRequest }): boolean => { + const authHeader = req.headers.get('authorization'); + return authHeader === `Bearer ${process.env.CRON_SECRET}`; + }, + }, + // Other job configurations... + } +}) +``` + +This code snippet ensures that the jobs can only be triggered if the correct `CRON_SECRET` is provided in the authorization header. + +Vercel will automatically make the `CRON_SECRET` environment variable available to the endpoint when triggered by the Vercel Cron, ensuring that the jobs can be run securely. + +After the project is deployed to Vercel, the Vercel Cron job will automatically trigger the `/api/payload-jobs/run` endpoint in the specified schedule, running the queued jobs in the background. diff --git a/packages/payload/package.json b/packages/payload/package.json index bc2d7d265e7..3731fa1bdf9 100644 --- a/packages/payload/package.json +++ b/packages/payload/package.json @@ -92,6 +92,7 @@ "bson-objectid": "2.0.4", "ci-info": "^4.0.0", "console-table-printer": "2.11.2", + "croner": "8.1.2", "dataloader": "2.2.2", "deepmerge": "4.3.1", "file-type": "19.3.0", diff --git a/packages/payload/src/bin/index.ts b/packages/payload/src/bin/index.ts index 97633b514b1..ca3f16bea00 100755 --- a/packages/payload/src/bin/index.ts +++ b/packages/payload/src/bin/index.ts @@ -1,3 +1,4 @@ +import { Cron } from 'croner' import minimist from 'minimist' import { pathToFileURL } from 'node:url' import path from 'path' @@ -5,6 +6,7 @@ import path from 'path' import type { BinScript } from '../config/types.js' import { findConfig } from '../config/find.js' +import { getPayload } from '../index.js' import { generateImportMap } from './generateImportMap/index.js' import { generateTypes } from './generateTypes.js' import { info } from './info.js' @@ -83,6 +85,30 @@ export const bin = async () => { return generateImportMap(config) } + if (script === 'jobs:run') { + const payload = await getPayload({ config }) + const limit = args.limit ? parseInt(args.limit, 10) : undefined + const queue = args.queue ? args.queue : undefined + + if (args.cron) { + Cron(args.cron, async () => { + await payload.jobs.run({ + limit, + queue, + }) + }) + + process.stdin.resume() // Keep the process alive + + return + } else { + return await payload.jobs.run({ + limit, + queue, + }) + } + } + console.error(`Unknown script: "${script}".`) process.exit(1) } diff --git a/packages/payload/src/config/client.ts b/packages/payload/src/config/client.ts index f0d3ea9cf19..4befe1e44d9 100644 --- a/packages/payload/src/config/client.ts +++ b/packages/payload/src/config/client.ts @@ -19,6 +19,7 @@ export type ServerOnlyRootProperties = keyof Pick< | 'endpoints' | 'graphQL' | 'hooks' + | 'jobs' | 'logger' | 'onInit' | 'plugins' @@ -64,6 +65,7 @@ export const serverOnlyConfigProperties: readonly Partial = { admin: { avatar: 'gravatar', @@ -44,6 +47,13 @@ export const defaults: Omit = { }, hooks: {}, i18n: {}, + jobs: { + access: { + run: defaultAccess, + }, + deleteJobOnComplete: true, + depth: 0, + } as JobsConfig, localization: false, maxDepth: 10, routes: { diff --git a/packages/payload/src/config/sanitize.ts b/packages/payload/src/config/sanitize.ts index 010437196c2..a16544f0d64 100644 --- a/packages/payload/src/config/sanitize.ts +++ b/packages/payload/src/config/sanitize.ts @@ -17,6 +17,7 @@ import { InvalidConfiguration } from '../errors/index.js' import { sanitizeGlobals } from '../globals/config/sanitize.js' import { getLockedDocumentsCollection } from '../lockedDocuments/lockedDocumentsCollection.js' import getPreferencesCollection from '../preferences/preferencesCollection.js' +import { getDefaultJobsCollection } from '../queues/config/jobsCollection.js' import checkDuplicateCollections from '../utilities/checkDuplicateCollections.js' import { defaults } from './defaults.js' @@ -66,6 +67,16 @@ export const sanitizeConfig = async (incomingConfig: Config): Promise 0) { + let defaultJobsCollection = getDefaultJobsCollection(config as unknown as Config) + + if (typeof configWithDefaults.jobs.jobsCollectionOverrides === 'function') { + defaultJobsCollection = configWithDefaults.jobs.jobsCollectionOverrides({ + defaultJobsCollection, + }) + } + + configWithDefaults.collections.push(defaultJobsCollection) + } + configWithDefaults.collections.push(getLockedDocumentsCollection(config as unknown as Config)) configWithDefaults.collections.push(getPreferencesCollection(config as unknown as Config)) configWithDefaults.collections.push(migrationsCollection) diff --git a/packages/payload/src/config/types.ts b/packages/payload/src/config/types.ts index a1e5af09cae..6189427a78b 100644 --- a/packages/payload/src/config/types.ts +++ b/packages/payload/src/config/types.ts @@ -31,7 +31,7 @@ import type { import type { DatabaseAdapterResult } from '../database/types.js' import type { EmailAdapter, SendEmailOptions } from '../email/types.js' import type { GlobalConfig, Globals, SanitizedGlobalConfig } from '../globals/config/types.js' -import type { Payload, RequestContext, TypedUser } from '../index.js' +import type { JobsConfig, Payload, RequestContext, TypedUser } from '../index.js' import type { PayloadRequest, Where } from '../types/index.js' import type { PayloadLogger } from '../utilities/logger.js' @@ -935,6 +935,10 @@ export type Config = { i18n?: I18nOptions<{} | DefaultTranslationsObject> // loosen the type here to allow for custom translations /** Automatically index all sortable top-level fields in the database to improve sort performance and add database compatibility for Azure Cosmos and similar. */ indexSortableFields?: boolean + /** + * @experimental There may be frequent breaking changes to this API + */ + jobs?: JobsConfig /** * Translate your content to different languages/locales. * @@ -1058,6 +1062,7 @@ export type SanitizedConfig = { endpoints: Endpoint[] globals: SanitizedGlobalConfig[] i18n: Required + jobs: JobsConfig // Redefine here, as the DeepRequired can break its type localization: false | SanitizedLocalizationConfig paths: { config: string diff --git a/packages/payload/src/index.ts b/packages/payload/src/index.ts index 4e3281404d2..d8c8ac7ff14 100644 --- a/packages/payload/src/index.ts +++ b/packages/payload/src/index.ts @@ -73,6 +73,7 @@ import localOperations from './collections/operations/local/index.js' import { consoleEmailAdapter } from './email/consoleEmailAdapter.js' import { fieldAffectsData } from './fields/config/types.js' import localGlobalOperations from './globals/operations/local/index.js' +import { getJobsLocalAPI } from './queues/localAPI.js' import { getLogger } from './utilities/logger.js' import { serverInit as serverInitTelemetry } from './utilities/telemetry/events/serverInit.js' import { traverseFields } from './utilities/traverseFields.js' @@ -113,6 +114,19 @@ export interface GeneratedTypes { globalsUntyped: { [slug: string]: JsonObject } + jobsUntyped: { + tasks: { + [slug: string]: { + input?: JsonObject + output?: JsonObject + } + } + workflows: { + [slug: string]: { + input: JsonObject + } + } + } localeUntyped: null | string userUntyped: User } @@ -146,7 +160,7 @@ export type TypedGlobal = ResolveGlobalType export type TypedGlobalSelect = ResolveGlobalSelectType // Extract string keys from the type -type StringKeyOf = Extract +export type StringKeyOf = Extract // Define the types for slugs using the appropriate collections and globals export type CollectionSlug = StringKeyOf @@ -173,6 +187,10 @@ export type TypedUser = ResolveUserType type ResolveAuthOperationsType = 'auth' extends keyof T ? T['auth'] : T['authUntyped'] export type TypedAuthOperations = ResolveAuthOperationsType +// @ts-expect-error +type ResolveJobOperationsType = 'jobs' extends keyof T ? T['jobs'] : T['jobsUntyped'] +export type TypedJobs = ResolveJobOperationsType + const filename = fileURLToPath(import.meta.url) const dirname = path.dirname(filename) @@ -344,6 +362,8 @@ export class BasePayload { importMap: ImportMap + jobs = getJobsLocalAPI(this) + logger: Logger login = async ( @@ -1052,6 +1072,27 @@ export type { PreferenceUpdateRequest, TabsPreferences, } from './preferences/types.js' +export type { JobsConfig, RunJobAccess, RunJobAccessArgs } from './queues/config/types/index.js' +export type { + RunTaskFunction, + TaskConfig, + TaskHandler, + TaskHandlerArgs, + TaskHandlerResult, + TaskHandlerResults, + TaskInput, + TaskOutput, + TaskType, +} from './queues/config/types/taskTypes.js' +export type { + BaseJob, + JobTaskStatus, + RunningJob, + SingleTaskStatus, + WorkflowConfig, + WorkflowHandler, + WorkflowTypes, +} from './queues/config/types/workflowTypes.js' export { getLocalI18n } from './translations/getLocalI18n.js' export * from './types/index.js' export { getFileByPath } from './uploads/getFileByPath.js' diff --git a/packages/payload/src/queues/config/generateJobsJSONSchemas.ts b/packages/payload/src/queues/config/generateJobsJSONSchemas.ts new file mode 100644 index 00000000000..b9744c1a782 --- /dev/null +++ b/packages/payload/src/queues/config/generateJobsJSONSchemas.ts @@ -0,0 +1,173 @@ +import type { JSONSchema4 } from 'json-schema' + +import type { SanitizedConfig } from '../../config/types.js' +import type { JobsConfig } from './types/index.js' + +import { fieldsToJSONSchema } from '../../utilities/configToJSONSchema.js' + +export function generateJobsJSONSchemas( + config: SanitizedConfig, + jobsConfig: JobsConfig, + interfaceNameDefinitions: Map, + /** + * Used for relationship fields, to determine whether to use a string or number type for the ID. + * While there is a default ID field type set by the db adapter, they can differ on a collection-level + * if they have custom ID fields. + */ + collectionIDFieldTypes: { [key: string]: 'number' | 'string' }, +): { + definitions?: Map + properties?: { tasks: JSONSchema4 } +} { + const properties: { tasks: JSONSchema4; workflows: JSONSchema4 } = { + tasks: {}, + workflows: {}, + } + const definitions: Map = new Map() + + if (jobsConfig?.tasks?.length) { + for (const task of jobsConfig.tasks) { + const fullTaskJsonSchema: JSONSchema4 = { + type: 'object', + additionalProperties: false, + properties: { + input: {}, + output: {}, + }, + required: [], + } + if (task?.inputSchema?.length) { + const inputJsonSchema = fieldsToJSONSchema( + collectionIDFieldTypes, + task.inputSchema, + interfaceNameDefinitions, + config, + ) + + const fullInputJsonSchema: JSONSchema4 = { + type: 'object', + additionalProperties: false, + properties: inputJsonSchema.properties, + required: inputJsonSchema.required, + } + + fullTaskJsonSchema.properties.input = fullInputJsonSchema + ;(fullTaskJsonSchema.required as string[]).push('input') + } + if (task?.outputSchema?.length) { + const outputJsonSchema = fieldsToJSONSchema( + collectionIDFieldTypes, + task.outputSchema, + interfaceNameDefinitions, + config, + ) + + const fullOutputJsonSchema: JSONSchema4 = { + type: 'object', + additionalProperties: false, + properties: outputJsonSchema.properties, + required: outputJsonSchema.required, + } + + fullTaskJsonSchema.properties.output = fullOutputJsonSchema + ;(fullTaskJsonSchema.required as string[]).push('output') + } + + const normalizedTaskSlug = task.slug[0].toUpperCase() + task.slug.slice(1) + + definitions.set(task.interfaceName ?? `Task${normalizedTaskSlug}`, fullTaskJsonSchema) + } + // Now add properties.tasks definition that references the types in definitions keyed by task slug: + properties.tasks = { + type: 'object', + additionalProperties: false, + properties: { + ...Object.fromEntries( + jobsConfig.tasks.map((task) => { + const normalizedTaskSlug = task.slug[0].toUpperCase() + task.slug.slice(1) + + const toReturn: JSONSchema4 = { + $ref: task.interfaceName + ? `#/definitions/${task.interfaceName}` + : `#/definitions/Task${normalizedTaskSlug}`, + } + + return [task.slug, toReturn] + }), + ), + inline: { + type: 'object', + additionalProperties: false, + properties: { + input: {}, + output: {}, + }, + required: ['input', 'output'], + }, + }, + required: jobsConfig.tasks.map((task) => task.slug), + } + } + + if (jobsConfig?.workflows?.length) { + for (const workflow of jobsConfig.workflows) { + const fullWorkflowJsonSchema: JSONSchema4 = { + type: 'object', + additionalProperties: false, + properties: { + input: {}, + }, + required: [], + } + + if (workflow?.inputSchema?.length) { + const inputJsonSchema = fieldsToJSONSchema( + collectionIDFieldTypes, + workflow.inputSchema, + interfaceNameDefinitions, + config, + ) + + const fullInputJsonSchema: JSONSchema4 = { + type: 'object', + additionalProperties: false, + properties: inputJsonSchema.properties, + required: inputJsonSchema.required, + } + + fullWorkflowJsonSchema.properties.input = fullInputJsonSchema + ;(fullWorkflowJsonSchema.required as string[]).push('input') + } + const normalizedWorkflowSlug = workflow.slug[0].toUpperCase() + workflow.slug.slice(1) + + definitions.set( + workflow.interfaceName ?? `Workflow${normalizedWorkflowSlug}`, + fullWorkflowJsonSchema, + ) + + properties.workflows = { + type: 'object', + additionalProperties: false, + properties: Object.fromEntries( + jobsConfig.workflows.map((workflow) => { + const normalizedWorkflowSlug = workflow.slug[0].toUpperCase() + workflow.slug.slice(1) + + const toReturn: JSONSchema4 = { + $ref: workflow.interfaceName + ? `#/definitions/${workflow.interfaceName}` + : `#/definitions/Workflow${normalizedWorkflowSlug}`, + } + + return [workflow.slug, toReturn] + }), + ), + required: jobsConfig.tasks.map((task) => task.slug), + } + } + } + + return { + definitions, + properties, + } +} diff --git a/packages/payload/src/queues/config/jobsCollection.ts b/packages/payload/src/queues/config/jobsCollection.ts new file mode 100644 index 00000000000..b8bc6ab5354 --- /dev/null +++ b/packages/payload/src/queues/config/jobsCollection.ts @@ -0,0 +1,206 @@ +import type { CollectionConfig } from '../../collections/config/types.js' +import type { Config } from '../../config/types.js' + +import { runJobsEndpoint } from '../restEndpointRun.js' +import { getJobTaskStatus } from '../utilities/getJobTaskStatus.js' + +export const getDefaultJobsCollection: (config: Config) => CollectionConfig | null = (config) => { + if (!Array.isArray(config?.jobs?.workflows)) { + return null + } + + const workflowSlugs: Set = new Set() + const taskSlugs: Set = new Set(['inline']) + + const queueNames: Set = new Set(['default']) + + config.jobs.workflows.forEach((workflow) => { + workflowSlugs.add(workflow.slug) + + if (workflow.queue) { + queueNames.add(workflow.queue) + } + }) + + config.jobs.tasks.forEach((task) => { + if (workflowSlugs.has(task.slug)) { + throw new Error( + `Task slug "${task.slug}" is already used by a workflow. No tasks are allowed to have the same slug as a workflow.`, + ) + } + taskSlugs.add(task.slug) + }) + + const jobsCollection: CollectionConfig = { + slug: 'payload-jobs', + admin: { + group: 'System', + hidden: true, + }, + endpoints: [runJobsEndpoint], + fields: [ + { + name: 'input', + type: 'json', + admin: { + description: 'Input data provided to the job', + }, + }, + { + name: 'taskStatus', + type: 'json', + virtual: true, + }, + { + type: 'tabs', + tabs: [ + { + fields: [ + { + name: 'completedAt', + type: 'date', + index: true, + }, + { + name: 'totalTried', + type: 'number', + defaultValue: 0, + index: true, + }, + { + name: 'hasError', + type: 'checkbox', + admin: { + description: 'If hasError is true this job will not be retried', + }, + defaultValue: false, + index: true, + }, + { + name: 'error', + type: 'json', + admin: { + condition: (data) => data.hasError, + description: 'If hasError is true, this is the error that caused it', + }, + }, + { + name: 'log', + type: 'array', + admin: { + description: 'Task execution log', + }, + fields: [ + { + name: 'executedAt', + type: 'date', + required: true, + }, + { + name: 'completedAt', + type: 'date', + required: true, + }, + { + name: 'taskSlug', + type: 'select', + options: [...taskSlugs], + required: true, + }, + { + name: 'taskID', + type: 'text', + required: true, + }, + { + name: 'input', + type: 'json', + }, + { + name: 'output', + type: 'json', + }, + { + name: 'state', + type: 'radio', + options: ['failed', 'succeeded'], + required: true, + }, + { + name: 'error', + type: 'json', + admin: { + condition: (_, data) => data.state === 'failed', + }, + required: true, + }, + ], + }, + ], + label: 'Status', + }, + ], + }, + { + name: 'workflowSlug', + type: 'select', + admin: { + position: 'sidebar', + }, + index: true, + options: [...workflowSlugs], + required: false, + }, + { + name: 'taskSlug', + type: 'select', + admin: { + position: 'sidebar', + }, + index: true, + options: [...taskSlugs], + required: false, + }, + { + name: 'queue', + type: 'select', + admin: { + position: 'sidebar', + }, + defaultValue: 'default', + index: true, + options: [...queueNames], + }, + { + name: 'waitUntil', + type: 'date', + index: true, + }, + { + name: 'processing', + type: 'checkbox', + admin: { + position: 'sidebar', + }, + defaultValue: false, + index: true, + }, + ], + hooks: { + afterRead: [ + ({ doc, req }) => { + // This hook is used to add the virtual `tasks` field to the document, that is computed from the `log` field + + doc.taskStatus = getJobTaskStatus({ + jobLog: doc.log, + tasksConfig: req.payload.config.jobs.tasks, + }) + + return doc + }, + ], + }, + lockDocuments: false, + } + return jobsCollection +} diff --git a/packages/payload/src/queues/config/types/index.ts b/packages/payload/src/queues/config/types/index.ts new file mode 100644 index 00000000000..33704a62813 --- /dev/null +++ b/packages/payload/src/queues/config/types/index.ts @@ -0,0 +1,45 @@ +import type { CollectionConfig } from '../../../index.js' +import type { PayloadRequest } from '../../../types/index.js' +import type { TaskConfig } from './taskTypes.js' +import type { WorkflowConfig } from './workflowTypes.js' + +export type RunJobAccessArgs = { + req: PayloadRequest +} + +export type RunJobAccess = (args: RunJobAccessArgs) => boolean | Promise + +export type JobsConfig = { + /** + * Specify access control to determine who can interact with jobs. + */ + access?: { + /** + * By default, all logged-in users can trigger jobs. + */ + run?: RunJobAccess + } + /** + * Determine whether or not to delete a job after it has successfully completed. + */ + deleteJobOnComplete?: boolean + /** + * Specify depth for retrieving jobs from the queue. + * This should be as low as possible in order for job retrieval + * to be as efficient as possible. Defaults to 0. + */ + depth?: number + /** + * Override any settings on the default Jobs collection. Accepts the default collection and allows you to return + * a new collection. + */ + jobsCollectionOverrides?: (args: { defaultJobsCollection: CollectionConfig }) => CollectionConfig + /** + * Define all possible tasks here + */ + tasks: TaskConfig[] + /** + * Define all the workflows here. Workflows orchestrate the flow of multiple tasks. + */ + workflows: WorkflowConfig[] +} diff --git a/packages/payload/src/queues/config/types/taskTypes.ts b/packages/payload/src/queues/config/types/taskTypes.ts new file mode 100644 index 00000000000..dc5753c2902 --- /dev/null +++ b/packages/payload/src/queues/config/types/taskTypes.ts @@ -0,0 +1,171 @@ +import type { Field, PayloadRequest, StringKeyOf, TypedJobs } from '../../../index.js' +import type { RunningJob, RunningJobSimple } from './workflowTypes.js' + +export type TaskInputOutput = { + input: object + output: object +} +export type TaskHandlerResult< + TTaskSlugOrInputOutput extends keyof TypedJobs['tasks'] | TaskInputOutput, +> = { + output: TTaskSlugOrInputOutput extends keyof TypedJobs['tasks'] + ? TypedJobs['tasks'][TTaskSlugOrInputOutput]['output'] + : TTaskSlugOrInputOutput extends TaskInputOutput // Check if it's actually TaskInputOutput type + ? TTaskSlugOrInputOutput['output'] + : never + state?: 'failed' | 'succeeded' +} + +export type TaskHandlerArgs< + TTaskSlugOrInputOutput extends keyof TypedJobs['tasks'] | TaskInputOutput, + TWorkflowSlug extends keyof TypedJobs['workflows'] = string, +> = { + input: TTaskSlugOrInputOutput extends keyof TypedJobs['tasks'] + ? TypedJobs['tasks'][TTaskSlugOrInputOutput]['input'] + : TTaskSlugOrInputOutput extends TaskInputOutput // Check if it's actually TaskInputOutput type + ? TTaskSlugOrInputOutput['input'] + : never + job: RunningJob + req: PayloadRequest +} + +/** + * Inline tasks in JSON workflows have no input, as they can just get the input from job.taskStatus + */ +export type TaskHandlerArgsNoInput = { + job: RunningJobSimple + req: PayloadRequest +} + +export type TaskHandler< + TTaskSlugOrInputOutput extends keyof TypedJobs['tasks'] | TaskInputOutput, + TWorkflowSlug extends keyof TypedJobs['workflows'] = string, +> = ( + args: TaskHandlerArgs, +) => Promise> | TaskHandlerResult + +export type TaskType = StringKeyOf + +// Extracts the type of `input` corresponding to each task +export type TaskInput = TypedJobs['tasks'][T]['input'] + +export type TaskOutput = TypedJobs['tasks'][T]['output'] + +export type TaskHandlerResults = { + [TTaskSlug in keyof TypedJobs['tasks']]: { + [id: string]: TaskHandlerResult + } +} + +// Helper type to create correct argument type for the function corresponding to each task. +export type RunTaskFunctionArgs = { + input?: TaskInput + retries?: number | RetryConfig +} + +export type RunTaskFunction = ( + taskID: string, + taskArgs?: RunTaskFunctionArgs, +) => Promise> + +export type RunTaskFunctions = { + [TTaskSlug in keyof TypedJobs['tasks']]: RunTaskFunction +} + +export type RunInlineTaskFunction = ( + taskID: string, + taskArgs: { + input?: TTaskInput + retries?: number | RetryConfig + // This is the same as TaskHandler, but typed out explicitly in order to improve type inference + task: (args: { input: TTaskInput; job: RunningJob; req: PayloadRequest }) => + | { + output: TTaskOutput + state?: 'failed' | 'succeeded' + } + | Promise<{ output: TTaskOutput; state?: 'failed' | 'succeeded' }> + }, +) => Promise + +export type RetryConfig = { + attempts: number + /** + * The backoff strategy to use when retrying the task. This determines how long to wait before retrying the task. + * + * If this is set on a single task, the longest backoff time of a task will determine the time until the entire workflow is retried. + */ + backoff?: { + /** + * Base delay between running jobs in ms + */ + delay?: number + /** + * @default fixed + * + * The backoff strategy to use when retrying the task. This determines how long to wait before retrying the task. + * If fixed (default) is used, the delay will be the same between each retry. + * + * If exponential is used, the delay will increase exponentially with each retry. + * + * @example + * delay = 1000 + * attempts = 3 + * type = 'fixed' + * + * The task will be retried 3 times with a delay of 1000ms between each retry. + * + * @example + * delay = 1000 + * attempts = 3 + * type = 'exponential' + * + * The task will be retried 3 times with a delay of 1000ms, 2000ms, and 4000ms between each retry. + */ + type: 'exponential' | 'fixed' + } +} + +export type TaskConfig< + TTaskSlugOrInputOutput extends keyof TypedJobs['tasks'] | TaskInputOutput = TaskType, +> = { + /** + * The function that should be responsible for running the job. + * You can either pass a string-based path to the job function file, or the job function itself. + * + * If you are using large dependencies within your job, you might prefer to pass the string path + * because that will avoid bundling large dependencies in your Next.js app. + */ + handler: string | TaskHandler + /** + * Define the input field schema - payload will generate a type for this schema. + */ + inputSchema?: Field[] + /** + * You can use interfaceName to change the name of the interface that is generated for this task. By default, this is "Task" + the capitalized task slug. + */ + interfaceName?: string + /** + * Define a human-friendly label for this task. + */ + label?: string + /** + * Function to be executed if the task fails. + */ + onFail?: () => Promise | void + /** + * Function to be executed if the task succeeds. + */ + onSuccess?: () => Promise | void + /** + * Define the output field schema - payload will generate a type for this schema. + */ + outputSchema?: Field[] + /** + * Specify the number of times that this step should be retried if it fails. + */ + retries?: number | RetryConfig + /** + * Define a slug-based name for this job. This slug needs to be unique among both tasks and workflows. + */ + slug: TTaskSlugOrInputOutput extends keyof TypedJobs['tasks'] ? TTaskSlugOrInputOutput : string +} diff --git a/packages/payload/src/queues/config/types/workflowJSONTypes.ts b/packages/payload/src/queues/config/types/workflowJSONTypes.ts new file mode 100644 index 00000000000..00174685155 --- /dev/null +++ b/packages/payload/src/queues/config/types/workflowJSONTypes.ts @@ -0,0 +1,36 @@ +import type { RunningJob, TaskHandlerResult, TypedJobs } from '../../../index.js' +import type { RetryConfig, TaskHandlerArgsNoInput } from './taskTypes.js' + +export type WorkflowStep< + TTaskSlug extends keyof TypedJobs['tasks'], + TWorkflowSlug extends keyof TypedJobs['workflows'], +> = { + /** + * If this step is completed, the workflow will be marked as completed + */ + completesJob?: boolean + condition?: (args: { job: RunningJob }) => boolean + /** + * Each task needs to have a unique ID to track its status + */ + id: string + retries?: number | RetryConfig +} & ( + | { + inlineTask?: ( + args: TaskHandlerArgsNoInput, + ) => Promise> | TaskHandlerResult + } + | { + input: (args: { job: RunningJob }) => TypedJobs['tasks'][TTaskSlug]['input'] + task: TTaskSlug + } +) + +type AllWorkflowSteps = { + [TTaskSlug in keyof TypedJobs['tasks']]: WorkflowStep +}[keyof TypedJobs['tasks']] + +export type WorkflowJSON = Array< + AllWorkflowSteps +> diff --git a/packages/payload/src/queues/config/types/workflowTypes.ts b/packages/payload/src/queues/config/types/workflowTypes.ts new file mode 100644 index 00000000000..fc58f5764c9 --- /dev/null +++ b/packages/payload/src/queues/config/types/workflowTypes.ts @@ -0,0 +1,127 @@ +import type { Field } from '../../../fields/config/types.js' +import type { PayloadRequest, StringKeyOf, TypedCollection, TypedJobs } from '../../../index.js' +import type { + RetryConfig, + RunInlineTaskFunction, + RunTaskFunctions, + TaskInput, + TaskOutput, + TaskType, +} from './taskTypes.js' +import type { WorkflowJSON } from './workflowJSONTypes.js' + +export type JobLog = { + completedAt: string + error?: unknown + executedAt: string + input?: any + output?: any + state: 'failed' | 'succeeded' + taskID: string + taskSlug: string +} + +export type BaseJob = { + completedAt?: string + error?: unknown + hasError?: boolean + id: number | string + input?: any + log: JobLog[] + processing?: boolean + queue: string + taskSlug?: string + taskStatus?: JobTaskStatus + totalTried: number + waitUntil?: string + workflowSlug?: string +} + +export type WorkflowTypes = StringKeyOf + +// TODO: Type job.taskStatus once available - for JSON-defined workflows +export type RunningJob = { + input: TWorkflowSlugOrInput extends keyof TypedJobs['workflows'] + ? TypedJobs['workflows'][TWorkflowSlugOrInput]['input'] + : TWorkflowSlugOrInput + taskStatus: JobTaskStatus +} & Omit + +export type RunningJobSimple = { + input: TWorkflowInput +} & TypedCollection['payload-jobs'] + +// Simplified version of RunningJob that doesn't break TypeScript (TypeScript seems to stop evaluating RunningJob when it's too complex) +export type RunningJobFromTask = { + input: TypedJobs['tasks'][TTaskSlug]['input'] +} & TypedCollection['payload-jobs'] + +export type WorkflowHandler = + (args: { + inlineTask: RunInlineTaskFunction + job: RunningJob + req: PayloadRequest + tasks: RunTaskFunctions + }) => Promise + +export type SingleTaskStatus = { + complete: boolean + input: TaskInput + output: TaskOutput + taskSlug: TaskType + totalTried: number +} + +/** + * Task IDs mapped to their status + */ +export type JobTaskStatus = { + // Wrap in taskSlug to improve typing + [taskSlug in TaskType]: { + [taskID: string]: SingleTaskStatus + } +} + +export type WorkflowConfig = { + /** + * You can either pass a string-based path to the workflow function file, or the workflow function itself. + * + * If you are using large dependencies within your workflow control flow, you might prefer to pass the string path + * because that will avoid bundling large dependencies in your Next.js app. + * + * + */ + handler: + | string + | WorkflowHandler + | WorkflowJSON + /** + * Define the input field schema - payload will generate a type for this schema. + */ + inputSchema?: Field[] + /** + * You can use interfaceName to change the name of the interface that is generated for this workflow. By default, this is "Workflow" + the capitalized workflow slug. + */ + interfaceName?: string + /** + * Define a human-friendly label for this workflow. + */ + label?: string + /** + * Optionally, define the queue name that this workflow should be tied to. + * Defaults to "default". + */ + queue?: string + /** + * Specify the number of times that this workflow should be retried if it fails for any reason. + */ + retries?: number | RetryConfig + /** + * Define a slug-based name for this job. + */ + slug: TWorkflowSlugOrInput extends keyof TypedJobs['workflows'] ? TWorkflowSlugOrInput : string +} + +type AllWorkflowConfigs = { + [TWorkflowSlug in keyof TypedJobs['workflows']]: WorkflowConfig +}[keyof TypedJobs['workflows']] diff --git a/packages/payload/src/queues/localAPI.ts b/packages/payload/src/queues/localAPI.ts new file mode 100644 index 00000000000..7ffa1a5069d --- /dev/null +++ b/packages/payload/src/queues/localAPI.ts @@ -0,0 +1,66 @@ +import type { RunningJobFromTask } from './config/types/workflowTypes.js' + +import { + createLocalReq, + type Payload, + type PayloadRequest, + type RunningJob, + type TypedJobs, +} from '../index.js' +import { runJobs } from './operations/runJobs/index.js' + +export const getJobsLocalAPI = (payload: Payload) => ({ + queue: async < + // eslint-disable-next-line @typescript-eslint/no-duplicate-type-constituents + TTaskOrWorkflowSlug extends keyof TypedJobs['tasks'] | keyof TypedJobs['workflows'], + >( + args: + | { + input: TypedJobs['tasks'][TTaskOrWorkflowSlug]['input'] + req?: PayloadRequest + // TTaskOrWorkflowlug with keyof TypedJobs['workflows'] removed: + task: TTaskOrWorkflowSlug extends keyof TypedJobs['tasks'] ? TTaskOrWorkflowSlug : never + workflow?: never + } + | { + input: TypedJobs['workflows'][TTaskOrWorkflowSlug]['input'] + req?: PayloadRequest + task?: never + workflow: TTaskOrWorkflowSlug extends keyof TypedJobs['workflows'] + ? TTaskOrWorkflowSlug + : never + }, + ): Promise< + TTaskOrWorkflowSlug extends keyof TypedJobs['workflows'] + ? RunningJob + : RunningJobFromTask + > => { + return (await payload.create({ + collection: 'payload-jobs', + data: { + input: args.input, + taskSlug: 'task' in args ? args.task : undefined, + workflowSlug: 'workflow' in args ? args.workflow : undefined, + }, + req: args.req, + })) as TTaskOrWorkflowSlug extends keyof TypedJobs['workflows'] + ? RunningJob + : RunningJobFromTask // Type assertion is still needed here + }, + + run: async (args?: { + limit?: number + overrideAccess?: boolean + queue?: string + req?: PayloadRequest + }): Promise> => { + const newReq: PayloadRequest = args?.req ?? (await createLocalReq({}, payload)) + const result = await runJobs({ + limit: args?.limit, + overrideAccess: args?.overrideAccess !== false, + queue: args?.queue, + req: newReq, + }) + return result + }, +}) diff --git a/packages/payload/src/queues/operations/runJobs/index.ts b/packages/payload/src/queues/operations/runJobs/index.ts new file mode 100644 index 00000000000..cfca8032d0a --- /dev/null +++ b/packages/payload/src/queues/operations/runJobs/index.ts @@ -0,0 +1,228 @@ +import type { PaginatedDocs } from '../../../database/types.js' +import type { PayloadRequest, Where } from '../../../types/index.js' +import type { WorkflowJSON } from '../../config/types/workflowJSONTypes.js' +import type { + BaseJob, + WorkflowConfig, + WorkflowHandler, + WorkflowTypes, +} from '../../config/types/workflowTypes.js' +import type { RunJobResult } from './runJob/index.js' + +import { Forbidden } from '../../../errors/Forbidden.js' +import isolateObjectProperty from '../../../utilities/isolateObjectProperty.js' +import { getUpdateJobFunction } from './runJob/getUpdateJobFunction.js' +import { importHandlerPath } from './runJob/importHandlerPath.js' +import { runJob } from './runJob/index.js' +import { runJSONJob } from './runJSONJob/index.js' + +export type RunJobsArgs = { + limit?: number + overrideAccess?: boolean + queue?: string + req: PayloadRequest +} + +export type RunJobsResult = { + jobStatus?: Record + /** + * If this is false, there for sure are no jobs remaining, regardless of the limit + */ + noJobsRemaining?: boolean + /** + * Out of the jobs that were queried & processed (within the set limit), how many are remaining and retryable? + */ + remainingJobsFromQueried: number +} + +export const runJobs = async ({ + limit = 10, + overrideAccess, + queue, + req, +}: RunJobsArgs): Promise => { + if (!overrideAccess) { + const hasAccess = await req.payload.config.jobs.access.run({ req }) + if (!hasAccess) { + throw new Forbidden(req.t) + } + } + const where: Where = { + and: [ + { + completedAt: { + exists: false, + }, + }, + { + hasError: { + not_equals: true, + }, + }, + { + processing: { + equals: false, + }, + }, + { + or: [ + { + waitUntil: { + exists: false, + }, + }, + { + waitUntil: { + less_than: new Date().toISOString(), + }, + }, + ], + }, + ], + } + + if (queue) { + where.and.push({ + queue: { + equals: queue, + }, + }) + } + + // Find all jobs and ensure we set job to processing: true as early as possible to reduce the chance of + // the same job being picked up by another worker + const jobsQuery = (await req.payload.update({ + collection: 'payload-jobs', + data: { + processing: true, + seenByWorker: true, + }, + depth: req.payload.config.jobs.depth, + disableTransaction: true, + limit, + showHiddenFields: true, + where, + })) as unknown as PaginatedDocs + + /** + * Just for logging purposes, we want to know how many jobs are new and how many are existing (= already been tried). + * This is only for logs - in the end we still want to run all jobs, regardless of whether they are new or existing. + */ + const { newJobs } = jobsQuery.docs.reduce( + (acc, job) => { + if (job.totalTried > 0) { + acc.existingJobs.push(job) + } else { + acc.newJobs.push(job) + } + return acc + }, + { existingJobs: [], newJobs: [] }, + ) + + if (!jobsQuery.docs.length) { + return { + noJobsRemaining: true, + remainingJobsFromQueried: 0, + } + } + + if (jobsQuery?.docs?.length) { + req.payload.logger.info(`Running ${jobsQuery.docs.length} jobs.`) + } + + const jobPromises = jobsQuery.docs.map(async (job) => { + if (!job.workflowSlug && !job.taskSlug) { + throw new Error('Job must have either a workflowSlug or a taskSlug') + } + const jobReq = isolateObjectProperty(req, 'transactionID') + + const workflowConfig: WorkflowConfig = job.workflowSlug + ? req.payload.config.jobs.workflows.find(({ slug }) => slug === job.workflowSlug) + : { + slug: 'singleTask', + handler: async ({ job, tasks }) => { + await tasks[job.taskSlug as string]('1', { + input: job.input, + }) + }, + } + + if (!workflowConfig) { + return null // Skip jobs with no workflow configuration + } + + const updateJob = getUpdateJobFunction(job, jobReq) + + // the runner will either be passed to the config + // OR it will be a path, which we will need to import via eval to avoid + // Next.js compiler dynamic import expression errors + let workflowHandler: WorkflowHandler | WorkflowJSON + + if ( + typeof workflowConfig.handler === 'function' || + (typeof workflowConfig.handler === 'object' && Array.isArray(workflowConfig.handler)) + ) { + workflowHandler = workflowConfig.handler + } else { + workflowHandler = await importHandlerPath(workflowConfig.handler) + + if (!workflowHandler) { + const errorMessage = `Can't find runner while importing with the path ${workflowConfig.handler} in job type ${job.workflowSlug}.` + req.payload.logger.error(errorMessage) + + await updateJob({ + error: { + error: errorMessage, + }, + hasError: true, + processing: false, + }) + + return + } + } + + if (typeof workflowHandler === 'function') { + const result = await runJob({ + job, + req: jobReq, + updateJob, + workflowConfig, + workflowHandler, + }) + return { id: job.id, result } + } else { + const result = await runJSONJob({ + job, + req: jobReq, + updateJob, + workflowConfig, + workflowHandler, + }) + return { id: job.id, result } + } + }) + + const resultsArray = await Promise.all(jobPromises) + const resultsObject: RunJobsResult['jobStatus'] = resultsArray.reduce((acc, cur) => { + if (cur !== null) { + // Check if there's a valid result to include + acc[cur.id] = cur.result + } + return acc + }, {}) + + let remainingJobsFromQueried = 0 + for (const jobID in resultsObject) { + const jobResult = resultsObject[jobID] + if (jobResult.status === 'error') { + remainingJobsFromQueried++ // Can be retried + } + } + + return { + jobStatus: resultsObject, + remainingJobsFromQueried, + } +} diff --git a/packages/payload/src/queues/operations/runJobs/runJSONJob/index.ts b/packages/payload/src/queues/operations/runJobs/runJSONJob/index.ts new file mode 100644 index 00000000000..3009e672f62 --- /dev/null +++ b/packages/payload/src/queues/operations/runJobs/runJSONJob/index.ts @@ -0,0 +1,159 @@ +import type { PayloadRequest } from '../../../../types/index.js' +import type { WorkflowJSON, WorkflowStep } from '../../../config/types/workflowJSONTypes.js' +import type { + BaseJob, + RunningJob, + WorkflowConfig, + WorkflowTypes, +} from '../../../config/types/workflowTypes.js' +import type { UpdateJobFunction } from '../runJob/getUpdateJobFunction.js' +import type { JobRunStatus } from '../runJob/index.js' + +import { getRunTaskFunction, type RunTaskFunctionState } from '../runJob/getRunTaskFunction.js' +import { handleWorkflowError } from '../runJob/handleWorkflowError.js' + +type Args = { + job: BaseJob + req: PayloadRequest + updateJob: UpdateJobFunction + workflowConfig: WorkflowConfig + workflowHandler: WorkflowJSON +} + +export type RunJSONJobResult = { + status: JobRunStatus +} + +export const runJSONJob = async ({ + job, + req, + updateJob, + workflowConfig, + workflowHandler, +}: Args): Promise => { + // Object so that we can pass contents by reference, not value. + // We want any mutations to be reflected in here. + const state: RunTaskFunctionState = { + reachedMaxRetries: false, + } + + const stepsToRun: WorkflowStep[] = [] + + for (const step of workflowHandler) { + if ('task' in step) { + if (job?.taskStatus?.[step.task]?.[step.id]?.complete) { + continue + } + } else { + if (job?.taskStatus?.['inline']?.[step.id]?.complete) { + continue + } + } + if (step.condition && !step.condition({ job: job as RunningJob })) { + // TODO: Improve RunningJob type see todo below + continue + } + stepsToRun.push(step) + } + + const tasks = getRunTaskFunction(state, job, workflowConfig, req, false, updateJob) + const inlineTask = getRunTaskFunction(state, job, workflowConfig, req, true, updateJob) + + // Run the job + let hasFinalError = false + let error: Error | undefined + try { + await Promise.all( + stepsToRun.map(async (step) => { + if ('task' in step) { + await tasks[step.task](step.id, { + input: step.input ? step.input({ job: job as RunningJob }) : {}, // TODO: Type better. We should use RunningJob anywhere and make TypedCollection['payload-jobs'] be BaseJob if type not generated + retries: step.retries, + }) + } else { + await inlineTask(step.id, { + retries: step.retries, + task: step.inlineTask as any, // TODO: Fix type + }) + } + }), + ) + } catch (err) { + const errorResult = handleWorkflowError({ + error: err, + job, + req, + state, + workflowConfig, + }) + error = err + hasFinalError = errorResult.hasFinalError + } + + // Check if workflow has completed + let workflowCompleted = false + for (const [slug, map] of Object.entries(job.taskStatus)) { + for (const [id, taskStatus] of Object.entries(map)) { + if (taskStatus.complete) { + const step = workflowHandler.find((step) => { + if ('task' in step) { + return step.task === slug && step.id === id + } else { + return step.id === id && slug === 'inline' + } + }) + if (step.completesJob) { + workflowCompleted = true + break + } + } + } + } + + if (workflowCompleted) { + if (error) { + // Tasks update the job if they error - but in case there is an unhandled error (e.g. in the workflow itself, not in a task) + // we need to ensure the job is updated to reflect the error + await updateJob({ + completedAt: new Date().toISOString(), + error: hasFinalError ? error : undefined, + hasError: hasFinalError, // If reached max retries => final error. If hasError is true this job will not be retried + processing: false, + totalTried: (job.totalTried ?? 0) + 1, + }) + } else { + await updateJob({ + completedAt: new Date().toISOString(), + processing: false, + totalTried: (job.totalTried ?? 0) + 1, + }) + } + + return { + status: 'success', + } + } else { + if (error) { + // Tasks update the job if they error - but in case there is an unhandled error (e.g. in the workflow itself, not in a task) + // we need to ensure the job is updated to reflect the error + await updateJob({ + error: hasFinalError ? error : undefined, + hasError: hasFinalError, // If reached max retries => final error. If hasError is true this job will not be retried + processing: false, + totalTried: (job.totalTried ?? 0) + 1, + }) + return { + status: hasFinalError ? 'error-reached-max-retries' : 'error', + } + } else { + // Retry the job - no need to bump processing or totalTried as this does not count as a retry. A condition of a different task might have just opened up! + return await runJSONJob({ + job, + req, + updateJob, + workflowConfig, + workflowHandler, + }) + } + } +} diff --git a/packages/payload/src/queues/operations/runJobs/runJob/calculateBackoffWaitUntil.ts b/packages/payload/src/queues/operations/runJobs/runJob/calculateBackoffWaitUntil.ts new file mode 100644 index 00000000000..4e532587809 --- /dev/null +++ b/packages/payload/src/queues/operations/runJobs/runJob/calculateBackoffWaitUntil.ts @@ -0,0 +1,36 @@ +import type { RetryConfig } from '../../../config/types/taskTypes.js' + +export function calculateBackoffWaitUntil({ + retriesConfig, + totalTried, +}: { + retriesConfig: number | RetryConfig + totalTried: number +}): Date { + let waitUntil: Date = new Date() + if (typeof retriesConfig === 'object') { + if (retriesConfig.backoff) { + if (retriesConfig.backoff.type === 'fixed') { + waitUntil = retriesConfig.backoff.delay + ? new Date(new Date().getTime() + retriesConfig.backoff.delay) + : new Date() + } else if (retriesConfig.backoff.type === 'exponential') { + // 2 ^ (attempts - 1) * delay (current attempt is not included in totalTried, thus no need for -1) + const delay = retriesConfig.backoff.delay ? retriesConfig.backoff.delay : 0 + waitUntil = new Date(new Date().getTime() + Math.pow(2, totalTried) * delay) + } + } + } + + /* + const differenceInMSBetweenNowAndWaitUntil = waitUntil.getTime() - new Date().getTime() + + const differenceInSBetweenNowAndWaitUntil = differenceInMSBetweenNowAndWaitUntil / 1000 + console.log('Calculated backoff', { + differenceInMSBetweenNowAndWaitUntil, + differenceInSBetweenNowAndWaitUntil, + retriesConfig, + totalTried, + })*/ + return waitUntil +} diff --git a/packages/payload/src/queues/operations/runJobs/runJob/getRunTaskFunction.ts b/packages/payload/src/queues/operations/runJobs/runJob/getRunTaskFunction.ts new file mode 100644 index 00000000000..19ba5524faa --- /dev/null +++ b/packages/payload/src/queues/operations/runJobs/runJob/getRunTaskFunction.ts @@ -0,0 +1,309 @@ +import type { PayloadRequest } from '../../../../types/index.js' +import type { + RetryConfig, + RunInlineTaskFunction, + RunTaskFunction, + RunTaskFunctions, + TaskConfig, + TaskHandler, + TaskHandlerResult, + TaskType, +} from '../../../config/types/taskTypes.js' +import type { + BaseJob, + RunningJob, + SingleTaskStatus, + WorkflowConfig, + WorkflowTypes, +} from '../../../config/types/workflowTypes.js' +import type { UpdateJobFunction } from './getUpdateJobFunction.js' + +import { calculateBackoffWaitUntil } from './calculateBackoffWaitUntil.js' +import { importHandlerPath } from './importHandlerPath.js' + +// Helper object type to force being passed by reference +export type RunTaskFunctionState = { + reachedMaxRetries: boolean +} + +async function getTaskHandlerFromConfig(taskConfig: TaskConfig) { + let handler: TaskHandler + + if (typeof taskConfig.handler === 'function') { + handler = taskConfig.handler + } else { + handler = await importHandlerPath>(taskConfig.handler) + } + return handler +} + +export async function handleTaskFailed({ + error, + executedAt, + input, + job, + maxRetries, + output, + req, + retriesConfig, + runnerOutput, + state, + taskConfig, + taskID, + taskSlug, + taskStatus, + updateJob, +}: { + error?: Error + executedAt: Date + input: object + job: BaseJob + maxRetries: number + output: object + req: PayloadRequest + retriesConfig: number | RetryConfig + runnerOutput?: TaskHandlerResult + state: RunTaskFunctionState + taskConfig?: TaskConfig + taskID: string + taskSlug: string + taskStatus: null | SingleTaskStatus + updateJob: UpdateJobFunction +}): Promise { + req.payload.logger.error({ err: error, job, msg: 'Error running task', taskSlug }) + + if (taskConfig?.onFail) { + await taskConfig.onFail() + } + + if (!job.log) { + job.log = [] + } + job.log.push({ + completedAt: new Date().toISOString(), + error: error ?? runnerOutput.state, + executedAt: executedAt.toISOString(), + input, + output, + state: 'failed', + taskID, + taskSlug, + }) + + if (job.waitUntil) { + // Check if waitUntil is in the past + const waitUntil = new Date(job.waitUntil) + if (waitUntil < new Date()) { + // Outdated waitUntil, remove it + delete job.waitUntil + } + } + + if (taskStatus && !taskStatus.complete && taskStatus.totalTried >= maxRetries) { + state.reachedMaxRetries = true + + await updateJob({ + error, + hasError: true, + log: job.log, + processing: false, + waitUntil: job.waitUntil, + }) + + throw new Error( + `Task ${taskSlug} has failed more than the allowed retries in workflow ${job.workflowSlug}${error ? `. Error: ${String(error)}` : ''}`, + ) + } else { + // Job will retry. Let's determine when! + const waitUntil: Date = calculateBackoffWaitUntil({ + retriesConfig, + totalTried: taskStatus?.totalTried ?? 0, + }) + + // Update job's waitUntil only if this waitUntil is later than the current one + if (!job.waitUntil || waitUntil > new Date(job.waitUntil)) { + job.waitUntil = waitUntil.toISOString() + } + + await updateJob({ + log: job.log, + processing: false, + waitUntil: job.waitUntil, + }) + throw error ?? new Error('Task failed') + } +} + +export const getRunTaskFunction = ( + state: RunTaskFunctionState, + job: BaseJob, + workflowConfig: WorkflowConfig, + req: PayloadRequest, + isInline: TIsInline, + updateJob: UpdateJobFunction, +): TIsInline extends true ? RunInlineTaskFunction : RunTaskFunctions => { + const runTask: ( + taskSlug: TTaskSlug, + ) => TTaskSlug extends 'inline' ? RunInlineTaskFunction : RunTaskFunction = ( + taskSlug, + ) => + (async ( + taskID: Parameters[0], + { + input, + retries, + task, + }: Parameters[1] & Parameters>[1], + ) => { + const executedAt = new Date() + + let inlineRunner: TaskHandler = null + if (isInline) { + inlineRunner = task + } + + let retriesConfig: number | RetryConfig = retries + let taskConfig: TaskConfig + if (!isInline) { + taskConfig = req.payload.config.jobs.tasks.find((t) => t.slug === taskSlug) + if (!retriesConfig) { + retriesConfig = taskConfig.retries + } + + if (!taskConfig) { + throw new Error(`Task ${taskSlug} not found in workflow ${job.workflowSlug}`) + } + } + const maxRetries: number = + typeof retriesConfig === 'object' ? retriesConfig?.attempts : retriesConfig + + const taskStatus: null | SingleTaskStatus = job?.taskStatus?.[taskSlug] + ? job.taskStatus[taskSlug][taskID] + : null + + if (taskStatus && taskStatus.complete === true) { + return taskStatus.output + } + + let runner: TaskHandler + if (isInline) { + runner = inlineRunner + } else { + if (!taskConfig) { + throw new Error(`Task ${taskSlug} not found in workflow ${job.workflowSlug}`) + } + runner = await getTaskHandlerFromConfig(taskConfig) + } + + if (!runner || typeof runner !== 'function') { + const errorMessage = isInline + ? `Can't find runner for inline task with ID ${taskID}` + : `Can't find runner while importing with the path ${typeof workflowConfig.handler === 'string' ? workflowConfig.handler : 'unknown - no string path'} in job type ${job.workflowSlug} for task ${taskSlug}.` + req.payload.logger.error(errorMessage) + + await updateJob({ + error: { + error: errorMessage, + }, + hasError: true, + log: [ + ...job.log, + { + completedAt: new Date().toISOString(), + error: errorMessage, + executedAt: executedAt.toISOString(), + state: 'failed', + taskID, + taskSlug, + }, + ], + processing: false, + }) + + return + } + + let output: object + + try { + const runnerOutput = await runner({ + input, + job: job as unknown as RunningJob, // TODO: Type this better + req, + }) + + if (runnerOutput.state === 'failed') { + await handleTaskFailed({ + executedAt, + input, + job, + maxRetries, + output, + req, + retriesConfig, + runnerOutput, + state, + taskConfig, + taskID, + taskSlug, + taskStatus, + updateJob, + }) + throw new Error('Task failed') + } else { + output = runnerOutput.output + } + } catch (err) { + await handleTaskFailed({ + error: err, + executedAt, + input, + job, + maxRetries, + output, + req, + retriesConfig, + state, + taskConfig, + taskID, + taskSlug, + taskStatus, + updateJob, + }) + throw new Error('Task failed') + } + + if (taskConfig?.onSuccess) { + await taskConfig.onSuccess() + } + + if (!job.log) { + job.log = [] + } + job.log.push({ + completedAt: new Date().toISOString(), + executedAt: executedAt.toISOString(), + input, + output, + state: 'succeeded', + taskID, + taskSlug, + }) + + await updateJob({ + log: job.log, + }) + + return output + }) as any + + if (isInline) { + return runTask('inline') as TIsInline extends true ? RunInlineTaskFunction : RunTaskFunctions + } else { + const tasks: RunTaskFunctions = {} + for (const task of req?.payload?.config?.jobs?.tasks ?? []) { + tasks[task.slug] = runTask(task.slug) + } + return tasks as TIsInline extends true ? RunInlineTaskFunction : RunTaskFunctions + } +} diff --git a/packages/payload/src/queues/operations/runJobs/runJob/getUpdateJobFunction.ts b/packages/payload/src/queues/operations/runJobs/runJob/getUpdateJobFunction.ts new file mode 100644 index 00000000000..0ad34702261 --- /dev/null +++ b/packages/payload/src/queues/operations/runJobs/runJob/getUpdateJobFunction.ts @@ -0,0 +1,23 @@ +import type { PayloadRequest } from '../../../../types/index.js' +import type { BaseJob } from '../../../config/types/workflowTypes.js' + +export type UpdateJobFunction = (jobData: Partial) => Promise + +export function getUpdateJobFunction(job: BaseJob, req: PayloadRequest): UpdateJobFunction { + return async (jobData) => { + const updatedJob = (await req.payload.update({ + id: job.id, + collection: 'payload-jobs', + data: jobData, + depth: 0, + disableTransaction: true, + })) as BaseJob + + // Update job object like this to modify the original object - that way, incoming changes (e.g. taskStatus field that will be re-generated through the hook) will be reflected in the calling function + for (const key in updatedJob) { + job[key] = updatedJob[key] + } + + return updatedJob + } +} diff --git a/packages/payload/src/queues/operations/runJobs/runJob/handleWorkflowError.ts b/packages/payload/src/queues/operations/runJobs/runJob/handleWorkflowError.ts new file mode 100644 index 00000000000..15f58622113 --- /dev/null +++ b/packages/payload/src/queues/operations/runJobs/runJob/handleWorkflowError.ts @@ -0,0 +1,66 @@ +import type { PayloadRequest } from '../../../../types/index.js' +import type { BaseJob, WorkflowConfig, WorkflowTypes } from '../../../config/types/workflowTypes.js' +import type { RunTaskFunctionState } from './getRunTaskFunction.js' + +import { calculateBackoffWaitUntil } from './calculateBackoffWaitUntil.js' + +/** + * This is called if a workflow catches an error. It determines if it's a final error + * or not and handles logging. + */ +export function handleWorkflowError({ + error, + job, + req, + state, + workflowConfig, +}: { + error: Error + job: BaseJob + req: PayloadRequest + state: RunTaskFunctionState + workflowConfig: WorkflowConfig +}): { + hasFinalError: boolean +} { + let hasFinalError = state.reachedMaxRetries // If any TASK reached max retries, the job has an error + const maxRetries = + typeof workflowConfig.retries === 'object' + ? workflowConfig.retries.attempts + : workflowConfig.retries + // Now let's handle workflow retries + if (!hasFinalError && workflowConfig.retries) { + if (job.waitUntil) { + // Check if waitUntil is in the past + const waitUntil = new Date(job.waitUntil) + if (waitUntil < new Date()) { + // Outdated waitUntil, remove it + delete job.waitUntil + } + } + if (job.totalTried >= maxRetries) { + state.reachedMaxRetries = true + hasFinalError = true + } else { + // Job will retry. Let's determine when! + const waitUntil: Date = calculateBackoffWaitUntil({ + retriesConfig: workflowConfig.retries, + totalTried: job.totalTried ?? 0, + }) + + // Update job's waitUntil only if this waitUntil is later than the current one + if (!job.waitUntil || waitUntil > new Date(job.waitUntil)) { + job.waitUntil = waitUntil.toISOString() + } + } + } + + req.payload.logger.error({ + err: error, + msg: `Error running job ${job.workflowSlug} ${job.taskSlug} id: ${job.id} attempt ${job.totalTried}/${maxRetries}`, + }) + + return { + hasFinalError, + } +} diff --git a/packages/payload/src/queues/operations/runJobs/runJob/importHandlerPath.ts b/packages/payload/src/queues/operations/runJobs/runJob/importHandlerPath.ts new file mode 100644 index 00000000000..68bf01844bf --- /dev/null +++ b/packages/payload/src/queues/operations/runJobs/runJob/importHandlerPath.ts @@ -0,0 +1,28 @@ +import { pathToFileURL } from 'url' + +export async function importHandlerPath(path: string): Promise { + let runner: T + const [runnerPath, runnerImportName] = path.split('#') + + const runnerModule = + typeof require === 'function' + ? await eval(`require('${runnerPath.replaceAll('\\', '/')}')`) + : await eval(`import('${pathToFileURL(runnerPath).href}')`) + + // If the path has indicated an #exportName, try to get it + if (runnerImportName && runnerModule[runnerImportName]) { + runner = runnerModule[runnerImportName] + } + + // If there is a default export, use it + if (!runner && runnerModule.default) { + runner = runnerModule.default + } + + // Finally, use whatever was imported + if (!runner) { + runner = runnerModule + } + + return runner +} diff --git a/packages/payload/src/queues/operations/runJobs/runJob/index.ts b/packages/payload/src/queues/operations/runJobs/runJob/index.ts new file mode 100644 index 00000000000..f2f38a128ef --- /dev/null +++ b/packages/payload/src/queues/operations/runJobs/runJob/index.ts @@ -0,0 +1,83 @@ +import type { PayloadRequest } from '../../../../types/index.js' +import type { + BaseJob, + RunningJob, + WorkflowConfig, + WorkflowHandler, + WorkflowTypes, +} from '../../../config/types/workflowTypes.js' +import type { RunTaskFunctionState } from './getRunTaskFunction.js' +import type { UpdateJobFunction } from './getUpdateJobFunction.js' + +import { getRunTaskFunction } from './getRunTaskFunction.js' +import { handleWorkflowError } from './handleWorkflowError.js' + +type Args = { + job: BaseJob + req: PayloadRequest + updateJob: UpdateJobFunction + workflowConfig: WorkflowConfig + workflowHandler: WorkflowHandler +} + +export type JobRunStatus = 'error' | 'error-reached-max-retries' | 'success' + +export type RunJobResult = { + status: JobRunStatus +} + +export const runJob = async ({ + job, + req, + updateJob, + workflowConfig, + workflowHandler, +}: Args): Promise => { + // Object so that we can pass contents by reference, not value. + // We want any mutations to be reflected in here. + const state: RunTaskFunctionState = { + reachedMaxRetries: false, + } + + // Run the job + try { + await workflowHandler({ + inlineTask: getRunTaskFunction(state, job, workflowConfig, req, true, updateJob), + job: job as unknown as RunningJob, //TODO: Type this better + req, + tasks: getRunTaskFunction(state, job, workflowConfig, req, false, updateJob), + }) + } catch (err) { + const { hasFinalError } = handleWorkflowError({ + error: err, + job, + req, + state, + workflowConfig, + }) + + // Tasks update the job if they error - but in case there is an unhandled error (e.g. in the workflow itself, not in a task) + // we need to ensure the job is updated to reflect the error + await updateJob({ + error: hasFinalError ? err : undefined, + hasError: hasFinalError, // If reached max retries => final error. If hasError is true this job will not be retried + processing: false, + totalTried: (job.totalTried ?? 0) + 1, + }) + + return { + status: hasFinalError ? 'error-reached-max-retries' : 'error', + } + } + + // Workflow has completed + await updateJob({ + completedAt: new Date().toISOString(), + processing: false, + totalTried: (job.totalTried ?? 0) + 1, + }) + + return { + status: 'success', + } +} diff --git a/packages/payload/src/queues/restEndpointRun.ts b/packages/payload/src/queues/restEndpointRun.ts new file mode 100644 index 00000000000..59626aec23e --- /dev/null +++ b/packages/payload/src/queues/restEndpointRun.ts @@ -0,0 +1,81 @@ +import type { Endpoint } from '../config/types.js' + +import { runJobs, type RunJobsArgs } from './operations/runJobs/index.js' + +export const runJobsEndpoint: Endpoint = { + handler: async (req) => { + if ( + !Array.isArray(req.payload.config.jobs.workflows) || + !(req.payload.config.jobs?.workflows?.length > 0) + ) { + return Response.json( + { + message: 'No jobs to run.', + }, + { status: 200 }, + ) + } + + const hasAccess = await req.payload.config.jobs.access.run({ req }) + + if (!hasAccess) { + return Response.json( + { + message: req.i18n.t('error:unauthorized'), + }, + { status: 401 }, + ) + } + + const { limit, queue } = req.query + + const runJobsArgs: RunJobsArgs = { + queue: 'default', + req, + // We are checking access above, so we can override it here + overrideAccess: true, + } + + if (typeof queue === 'string') { + runJobsArgs.queue = queue + } + + if (typeof limit !== 'undefined') { + runJobsArgs.limit = Number(limit) + } + + let noJobsRemaining = false + let remainingJobsFromQueried = 0 + try { + const result = await runJobs(runJobsArgs) + noJobsRemaining = result.noJobsRemaining + remainingJobsFromQueried = result.remainingJobsFromQueried + } catch (err) { + req.payload.logger.error({ + err, + msg: 'There was an error running jobs:', + queue: runJobsArgs.queue, + }) + + return Response.json( + { + message: req.i18n.t('error:unknown'), + noJobsRemaining: true, + remainingJobsFromQueried, + }, + { status: 500 }, + ) + } + + return Response.json( + { + message: req.i18n.t('general:success'), + noJobsRemaining, + remainingJobsFromQueried, + }, + { status: 200 }, + ) + }, + method: 'get', + path: '/run', +} diff --git a/packages/payload/src/queues/utilities/getJobTaskStatus.ts b/packages/payload/src/queues/utilities/getJobTaskStatus.ts new file mode 100644 index 00000000000..bc900703184 --- /dev/null +++ b/packages/payload/src/queues/utilities/getJobTaskStatus.ts @@ -0,0 +1,38 @@ +import type { TaskConfig, TaskType } from '../config/types/taskTypes.js' +import type { BaseJob, JobTaskStatus } from '../config/types/workflowTypes.js' + +type Args = { + jobLog: BaseJob['log'] + tasksConfig: TaskConfig[] +} + +export const getJobTaskStatus = ({ jobLog }: Args): JobTaskStatus => { + const taskStatus: JobTaskStatus = {} + + // First, add (in order) the steps from the config to + // our status map + for (const loggedJob of jobLog) { + if (!taskStatus[loggedJob.taskSlug]) { + taskStatus[loggedJob.taskSlug] = {} + } + if (!taskStatus[loggedJob.taskSlug][loggedJob.taskID]) { + taskStatus[loggedJob.taskSlug][loggedJob.taskID] = { + complete: loggedJob.state === 'succeeded', + input: loggedJob.input, + output: loggedJob.output, + taskSlug: loggedJob.taskSlug, + totalTried: 1, + } + } else { + const newTaskStatus = taskStatus[loggedJob.taskSlug][loggedJob.taskID] + newTaskStatus.totalTried += 1 + + if (loggedJob.state === 'succeeded') { + newTaskStatus.complete = true + } + taskStatus[loggedJob.taskSlug][loggedJob.taskID] = newTaskStatus + } + } + + return taskStatus +} diff --git a/packages/payload/src/utilities/configToJSONSchema.ts b/packages/payload/src/utilities/configToJSONSchema.ts index cd8720a69f2..0dcd194decb 100644 --- a/packages/payload/src/utilities/configToJSONSchema.ts +++ b/packages/payload/src/utilities/configToJSONSchema.ts @@ -11,6 +11,7 @@ import type { SanitizedGlobalConfig } from '../globals/config/types.js' import { MissingEditorProp } from '../errors/MissingEditorProp.js' import { fieldAffectsData, tabHasName } from '../fields/config/types.js' +import { generateJobsJSONSchemas } from '../queues/config/generateJobsJSONSchemas.js' import { deepCopyObject } from './deepCopyObject.js' import { toWords } from './formatLabels.js' import { getCollectionIDFieldTypes } from './getCollectionIDFieldTypes.js' @@ -288,13 +289,17 @@ export function fieldsToJSONSchema( type: withNullableJSONSchemaType('array', isRequired), items: { type: 'string', - enum: optionEnums, }, } + if (optionEnums?.length) { + ;(fieldSchema.items as JSONSchema4).enum = optionEnums + } } else { fieldSchema = { type: withNullableJSONSchemaType('string', isRequired), - enum: optionEnums, + } + if (optionEnums?.length) { + fieldSchema.enum = optionEnums } } @@ -604,7 +609,11 @@ export function entityToJSONSchema( incomingEntity: SanitizedCollectionConfig | SanitizedGlobalConfig, interfaceNameDefinitions: Map, defaultIDType: 'number' | 'text', + collectionIDFieldTypes?: { [key: string]: 'number' | 'string' }, ): JSONSchema4 { + if (!collectionIDFieldTypes) { + collectionIDFieldTypes = getCollectionIDFieldTypes({ config, defaultIDType }) + } const entity: SanitizedCollectionConfig | SanitizedGlobalConfig = deepCopyObject(incomingEntity) const title = entity.typescript?.interface ? entity.typescript.interface @@ -641,9 +650,6 @@ export function entityToJSONSchema( }) } - // Used for relationship fields, to determine whether to use a string or number type for the ID. - const collectionIDFieldTypes = getCollectionIDFieldTypes({ config, defaultIDType }) - return { type: 'object', additionalProperties: false, @@ -912,6 +918,9 @@ export function configToJSONSchema( // a mutable Map to store custom top-level `interfaceName` types. Fields with an `interfaceName` property will be moved to the top-level definitions here const interfaceNameDefinitions: Map = new Map() + // Used for relationship fields, to determine whether to use a string or number type for the ID. + const collectionIDFieldTypes = getCollectionIDFieldTypes({ config, defaultIDType }) + // Collections and Globals have to be moved to the top-level definitions as well. Reason: The top-level type will be the `Config` type - we don't want all collection and global // types to be inlined inside the `Config` type @@ -928,7 +937,13 @@ export function configToJSONSchema( const entityDefinitions: { [k: string]: JSONSchema4 } = entities.reduce( (acc, { type, entity }) => { - acc[entity.slug] = entityToJSONSchema(config, entity, interfaceNameDefinitions, defaultIDType) + acc[entity.slug] = entityToJSONSchema( + config, + entity, + interfaceNameDefinitions, + defaultIDType, + collectionIDFieldTypes, + ) const select = fieldsToSelectJSONSchema({ fields: entity.fields }) if (type === 'global') { @@ -958,6 +973,10 @@ export function configToJSONSchema( { auth: {} }, ) + const jobsSchemas = config.jobs + ? generateJobsJSONSchemas(config, config.jobs, interfaceNameDefinitions, collectionIDFieldTypes) + : {} + let jsonSchema: JSONSchema4 = { additionalProperties: false, definitions: { @@ -980,6 +999,19 @@ export function configToJSONSchema( required: ['user', 'locale', 'collections', 'globals', 'auth', 'db'], title: 'Config', } + if (jobsSchemas.definitions?.size) { + for (const [key, value] of jobsSchemas.definitions) { + jsonSchema.definitions[key] = value + } + } + if (jobsSchemas.properties) { + jsonSchema.properties.jobs = { + type: 'object', + additionalProperties: false, + properties: jobsSchemas.properties, + required: ['tasks'], + } + } if (config?.typescript?.schema?.length) { for (const schema of config.typescript.schema) { diff --git a/packages/richtext-lexical/src/features/toolbars/fixed/client/Toolbar/index.tsx b/packages/richtext-lexical/src/features/toolbars/fixed/client/Toolbar/index.tsx index 8806fcc0a18..adcb49885d2 100644 --- a/packages/richtext-lexical/src/features/toolbars/fixed/client/Toolbar/index.tsx +++ b/packages/richtext-lexical/src/features/toolbars/fixed/client/Toolbar/index.tsx @@ -155,7 +155,7 @@ function FixedToolbar({ }): React.ReactNode { const currentToolbarRef = React.useRef(null) - const { y } = useScrollInfo!() + const { y } = useScrollInfo() // Memoize the parent toolbar element const parentToolbarElem = useMemo(() => { diff --git a/packages/storage-vercel-blob/src/index.ts b/packages/storage-vercel-blob/src/index.ts index 6c24118b256..2afc82b756e 100644 --- a/packages/storage-vercel-blob/src/index.ts +++ b/packages/storage-vercel-blob/src/index.ts @@ -148,7 +148,7 @@ function vercelBlobStorageInternal( prefix, token, }), - staticHandler: getStaticHandler({ baseUrl, token, cacheControlMaxAge }, collection), + staticHandler: getStaticHandler({ baseUrl, cacheControlMaxAge, token }, collection), } } } diff --git a/packages/storage-vercel-blob/src/staticHandler.ts b/packages/storage-vercel-blob/src/staticHandler.ts index bee586782a3..649f4b82856 100644 --- a/packages/storage-vercel-blob/src/staticHandler.ts +++ b/packages/storage-vercel-blob/src/staticHandler.ts @@ -7,12 +7,12 @@ import path from 'path' type StaticHandlerArgs = { baseUrl: string - token: string cacheControlMaxAge?: number + token: string } export const getStaticHandler = ( - { baseUrl, token, cacheControlMaxAge = 0 }: StaticHandlerArgs, + { baseUrl, cacheControlMaxAge = 0, token }: StaticHandlerArgs, collection: CollectionConfig, ): StaticHandler => { return async (req, { params: { filename } }) => { @@ -38,10 +38,10 @@ export const getStaticHandler = ( return new Response(bodyBuffer, { headers: new Headers({ + 'Cache-Control': `public, max-age=${cacheControlMaxAge}`, 'Content-Disposition': contentDisposition, 'Content-Length': String(size), 'Content-Type': contentType, - 'Cache-Control': `public, max-age=${cacheControlMaxAge}` }), status: 200, }) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 2fd93d7bb89..40c5744d990 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -827,6 +827,9 @@ importers: console-table-printer: specifier: 2.11.2 version: 2.11.2 + croner: + specifier: 8.1.2 + version: 8.1.2 dataloader: specifier: 2.2.2 version: 2.2.2 @@ -5602,6 +5605,10 @@ packages: engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} hasBin: true + croner@8.1.2: + resolution: {integrity: sha512-ypfPFcAXHuAZRCzo3vJL6ltENzniTjwe/qsLleH1V2/7SRDjgvRQyrLmumFTLmjFax4IuSxfGXEn79fozXcJog==} + engines: {node: '>=18.0'} + cross-env@7.0.3: resolution: {integrity: sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw==} engines: {node: '>=10.14', npm: '>=6', yarn: '>=1'} @@ -15151,6 +15158,8 @@ snapshots: - supports-color - ts-node + croner@8.1.2: {} + cross-env@7.0.3: dependencies: cross-spawn: 7.0.3 diff --git a/templates/website/src/app/(payload)/admin/importMap.js b/templates/website/src/app/(payload)/admin/importMap.js index d994a7b4f4d..3ee5298ab10 100644 --- a/templates/website/src/app/(payload)/admin/importMap.js +++ b/templates/website/src/app/(payload)/admin/importMap.js @@ -21,25 +21,26 @@ import { default as default_19 } from '@/components/BeforeDashboard' import { default as default_20 } from '@/components/BeforeLogin' export const importMap = { - "@payloadcms/richtext-lexical/client#RichTextCell": RichTextCell_0, - "@payloadcms/richtext-lexical/client#RichTextField": RichTextField_1, - "@payloadcms/richtext-lexical/generateComponentMap#getGenerateComponentMap": getGenerateComponentMap_2, - "@payloadcms/richtext-lexical/client#InlineToolbarFeatureClient": InlineToolbarFeatureClient_3, - "@payloadcms/richtext-lexical/client#FixedToolbarFeatureClient": FixedToolbarFeatureClient_4, - "@payloadcms/richtext-lexical/client#HeadingFeatureClient": HeadingFeatureClient_5, - "@payloadcms/richtext-lexical/client#UnderlineFeatureClient": UnderlineFeatureClient_6, - "@payloadcms/richtext-lexical/client#BoldFeatureClient": BoldFeatureClient_7, - "@payloadcms/richtext-lexical/client#ItalicFeatureClient": ItalicFeatureClient_8, - "@payloadcms/richtext-lexical/client#LinkFeatureClient": LinkFeatureClient_9, - "@payloadcms/plugin-seo/client#OverviewComponent": OverviewComponent_10, - "@payloadcms/plugin-seo/client#MetaTitleComponent": MetaTitleComponent_11, - "@payloadcms/plugin-seo/client#MetaImageComponent": MetaImageComponent_12, - "@payloadcms/plugin-seo/client#MetaDescriptionComponent": MetaDescriptionComponent_13, - "@payloadcms/plugin-seo/client#PreviewComponent": PreviewComponent_14, - "@/fields/slug/SlugComponent#SlugComponent": SlugComponent_15, - "@payloadcms/richtext-lexical/client#HorizontalRuleFeatureClient": HorizontalRuleFeatureClient_16, - "@payloadcms/richtext-lexical/client#BlocksFeatureClient": BlocksFeatureClient_17, - "@payloadcms/plugin-search/client#LinkToDoc": LinkToDoc_18, - "@/components/BeforeDashboard#default": default_19, - "@/components/BeforeLogin#default": default_20 + '@payloadcms/richtext-lexical/client#RichTextCell': RichTextCell_0, + '@payloadcms/richtext-lexical/client#RichTextField': RichTextField_1, + '@payloadcms/richtext-lexical/generateComponentMap#getGenerateComponentMap': + getGenerateComponentMap_2, + '@payloadcms/richtext-lexical/client#InlineToolbarFeatureClient': InlineToolbarFeatureClient_3, + '@payloadcms/richtext-lexical/client#FixedToolbarFeatureClient': FixedToolbarFeatureClient_4, + '@payloadcms/richtext-lexical/client#HeadingFeatureClient': HeadingFeatureClient_5, + '@payloadcms/richtext-lexical/client#UnderlineFeatureClient': UnderlineFeatureClient_6, + '@payloadcms/richtext-lexical/client#BoldFeatureClient': BoldFeatureClient_7, + '@payloadcms/richtext-lexical/client#ItalicFeatureClient': ItalicFeatureClient_8, + '@payloadcms/richtext-lexical/client#LinkFeatureClient': LinkFeatureClient_9, + '@payloadcms/plugin-seo/client#OverviewComponent': OverviewComponent_10, + '@payloadcms/plugin-seo/client#MetaTitleComponent': MetaTitleComponent_11, + '@payloadcms/plugin-seo/client#MetaImageComponent': MetaImageComponent_12, + '@payloadcms/plugin-seo/client#MetaDescriptionComponent': MetaDescriptionComponent_13, + '@payloadcms/plugin-seo/client#PreviewComponent': PreviewComponent_14, + '@/fields/slug/SlugComponent#SlugComponent': SlugComponent_15, + '@payloadcms/richtext-lexical/client#HorizontalRuleFeatureClient': HorizontalRuleFeatureClient_16, + '@payloadcms/richtext-lexical/client#BlocksFeatureClient': BlocksFeatureClient_17, + '@payloadcms/plugin-search/client#LinkToDoc': LinkToDoc_18, + '@/components/BeforeDashboard#default': default_19, + '@/components/BeforeLogin#default': default_20, } diff --git a/test/_community/payload-types.ts b/test/_community/payload-types.ts index 74282f914a5..bea169827e1 100644 --- a/test/_community/payload-types.ts +++ b/test/_community/payload-types.ts @@ -30,6 +30,10 @@ export interface Config { user: User & { collection: 'users'; }; + jobs?: { + tasks: unknown; + workflows?: unknown; + }; } export interface UserAuthOperations { forgotPassword: { diff --git a/test/fields/payload-types.ts b/test/fields/payload-types.ts index 3f0c63fb100..6d27f64df11 100644 --- a/test/fields/payload-types.ts +++ b/test/fields/payload-types.ts @@ -1,3 +1,5 @@ + + /* tslint:disable */ /* eslint-disable */ /** @@ -77,6 +79,10 @@ export interface Config { user: User & { collection: 'users'; }; + jobs?: { + tasks: unknown; + workflows?: unknown; + }; } export interface UserAuthOperations { forgotPassword: { diff --git a/test/helpers/e2e/reorderColumns.ts b/test/helpers/e2e/reorderColumns.ts index 453dc2f0a8c..2f214821845 100644 --- a/test/helpers/e2e/reorderColumns.ts +++ b/test/helpers/e2e/reorderColumns.ts @@ -40,7 +40,9 @@ export const reorderColumns = async ( }) .boundingBox() - if (!fromBoundingBox || !toBoundingBox) {return} + if (!fromBoundingBox || !toBoundingBox) { + return + } // drag the "from" column to the left of the "to" column await page.mouse.move(fromBoundingBox.x + 2, fromBoundingBox.y + 2, { steps: 10 }) diff --git a/test/import-test/import-all-2-exports.ts b/test/import-test/import-all-2-exports.ts index 580da50fcf6..0ee55638e3f 100644 --- a/test/import-test/import-all-2-exports.ts +++ b/test/import-test/import-all-2-exports.ts @@ -44,8 +44,12 @@ import { AdminViewComponent, AdminViewConfig, AdminViewProps, + baseBlockFields, + baseIDField, BaseLocalizationConfig, + buildConfig, Config, + defaults, EditView, EditViewConfig, EmailOptions, @@ -56,6 +60,8 @@ import { FieldTypes, GeneratePreviewURL, GraphQLExtension, + hasTransport, + hasTransportOptions, InitOptions, LivePreviewConfig, Locale, @@ -64,31 +70,28 @@ import { LocalizationConfigWithNoLabels, PayloadHandler, Plugin, + sanitizeConfig, SanitizedConfig, SanitizedLocalizationConfig, - baseBlockFields, - baseIDField, - buildConfig, - defaults, - hasTransport, - hasTransportOptions, - sanitizeConfig, sanitizeFields, } from 'payload/config' import { BaseDatabaseAdapter, BeginTransaction, + combineQueries, CommitTransaction, Connect, Count, CountArgs, Create, CreateArgs, + createDatabaseAdapter, CreateGlobal, CreateGlobalArgs, CreateGlobalVersion, CreateGlobalVersionArgs, CreateMigration, + createMigration, CreateVersion, CreateVersionArgs, DBIdentifierName, @@ -110,13 +113,24 @@ import { FindOneArgs, FindVersions, FindVersionsArgs, + flattenWhereToOperators, + getLocalizedPaths, + getMigrations, Init, + migrate, + migrateDown, + migrateRefresh, + migrateReset, + migrateStatus, Migration, MigrationData, + migrationsCollection, + migrationTemplate, PaginatedDocs, PathToQuery, QueryDrafts, QueryDraftsArgs, + readMigrationFiles, RollbackTransaction, Transaction, TypeWithVersion, @@ -128,20 +142,6 @@ import { UpdateOneArgs, UpdateVersion, UpdateVersionArgs, - combineQueries, - createDatabaseAdapter, - createMigration, - flattenWhereToOperators, - getLocalizedPaths, - getMigrations, - migrate, - migrateDown, - migrateRefresh, - migrateReset, - migrateStatus, - migrationTemplate, - migrationsCollection, - readMigrationFiles, validateQueryPaths, validateSearchParam, } from 'payload/database' @@ -165,7 +165,7 @@ import { QueryError, ValidationError, } from 'payload/errors' -import { GraphQL, buildPaginatedListType } from 'payload/graphql' +import { buildPaginatedListType, GraphQL } from 'payload/graphql' import { AccessArgs as AccessArgsType, Access as AccessType, @@ -201,21 +201,31 @@ import { CustomSaveDraftButtonProps, Data, DateField, + docHasTimestamps, Document, EmailField, Field, FieldAccess, FieldAffectingData, + fieldAffectsData, FieldBase, + fieldHasMaxDepth, + fieldHasSubFields, FieldHook, FieldHookArgs, + fieldIsArrayType, + fieldIsBlockType, + fieldIsGroupType, + fieldIsLocalized, + fieldIsPresentationalOnly, FieldPresentationalOnly, + Fields, + fieldSupportsMany, FieldWithMany, FieldWithMaxDepth, FieldWithPath, FieldWithRichTextRequiredEditor, FieldWithSubFields, - Fields, FileData, FilterOptions, FilterOptionsProps, @@ -239,7 +249,10 @@ import { Operation, Operator, Option, + optionIsObject, + optionIsValue, OptionObject, + optionsAreObjects, PayloadRequest, PointField, PolymorphicRelationshipField, @@ -259,36 +272,23 @@ import { SingleRelationshipField, Tab, TabAsField, + tabHasName, TabsAdmin, TabsField, - TextField, TextareaField, + TextField, TypeWithID, UIField, UnnamedTab, UploadField, Validate, ValidateOptions, + validOperators, + valueIsValueWithRelation, ValueWithRelation, VersionOperations, Where, WhereField, - docHasTimestamps, - fieldAffectsData, - fieldHasMaxDepth, - fieldHasSubFields, - fieldIsArrayType, - fieldIsBlockType, - fieldIsGroupType, - fieldIsLocalized, - fieldIsPresentationalOnly, - fieldSupportsMany, - optionIsObject, - optionIsValue, - optionsAreObjects, - tabHasName, - validOperators, - valueIsValueWithRelation, } from 'payload/types' import { afterReadPromise, @@ -351,17 +351,18 @@ import { CountryField, Email, FieldConfig, - FieldValues, FieldsConfig, + FieldValues, Form, + FormattedEmail, CheckboxField as FormBuilderCheckboxField, EmailField as FormBuilderEmailField, SelectField as FormBuilderSelectField, TextField as FormBuilderTextField, FormFieldBlock, FormSubmission, - FormattedEmail, HandlePayment, + isValidBlockConfig, MessageField, PaymentField, PaymentFieldConfig, @@ -372,7 +373,6 @@ import { StateField, SubmissionValue, TextAreaField, - isValidBlockConfig, } from '@payloadcms/plugin-form-builder/types' import nestedDocs from '@payloadcms/plugin-nested-docs' import { createBreadcrumbsField, createParentField } from '@payloadcms/plugin-nested-docs/fields' @@ -400,8 +400,8 @@ import { GenerateImage, GenerateTitle, Meta, - PluginConfig as SeoPluginConfig, GenerateURL as seoGenerateURL, + PluginConfig as SeoPluginConfig, } from '@payloadcms/plugin-seo/types' import stripePlugin from '@payloadcms/plugin-stripe' import { @@ -425,6 +425,10 @@ import { $isRelationshipNode, $isUploadNode, AdapterProps, + addSwipeDownListener, + addSwipeLeftListener, + addSwipeRightListener, + addSwipeUpListener, AlignFeature, AutoLinkNode, BlockFields, @@ -435,30 +439,51 @@ import { BoldTextFeature, CAN_USE_DOM, CheckListFeature, + cloneDeep, + consolidateHTMLConverters, + convertLexicalNodesToHTML, + convertLexicalToHTML, + convertSlateNodesToLexical, + convertSlateToLexical, + createBlockNode, + defaultEditorConfig, + defaultEditorFeatures, + defaultHTMLConverters, + defaultRichTextValue, + defaultSanitizedEditorConfig, + defaultSlateConverters, DETAIL_TYPE_TO_DETAIL, DOUBLE_LINE_BREAK, + EditorConfig, + EditorConfigProvider, ELEMENT_FORMAT_TO_TYPE, ELEMENT_TYPE_TO_FORMAT, ENABLE_SLASH_MENU_COMMAND, - EditorConfig, - EditorConfigProvider, Feature, FeatureProvider, FeatureProviderMap, FloatingToolbarSection, FloatingToolbarSectionEntry, FormatSectionWithEntries, + getDOMRangeRect, + getEnabledNodes, + getSelectedNode, + HeadingFeature, HTMLConverter, HTMLConverterFeature, HTMLConverterFeatureProps, - HeadingFeature, - IS_ALL_FORMATTING, IndentFeature, InlineCodeTextFeature, + invariant, + IS_ALL_FORMATTING, + isHTMLElement, + isPoint, ItalicTextFeature, - LTR_REGEX, + joinClasses, LexicalBlock, + lexicalEditor, LexicalEditorProps, + lexicalHTML, LexicalPluginToLexicalFeature, LexicalRichTextAdapter, LinebreakHTMLConverter, @@ -466,15 +491,16 @@ import { LinkFeatureProps, LinkFields, LinkNode, - NON_BREAKING_SPACE, + loadFeatures, + LTR_REGEX, NodeFormat, NodeValidation, + NON_BREAKING_SPACE, OrderedListFeature, ParagraphFeature, ParagraphHTMLConverter, Point, PopulationPromise, - RTL_REGEX, RawUploadPayload, Rect, RelationshipData, @@ -482,13 +508,19 @@ import { RelationshipNode, ResolvedFeature, ResolvedFeatureMap, + RTL_REGEX, SanitizedEditorConfig, SanitizedFeatures, + sanitizeEditorConfig, + sanitizeFeatures, + sanitizeUrl, SerializedAutoLinkNode, SerializedBlockNode, SerializedLinkNode, SerializedRelationshipNode, SerializedUploadNode, + setFloatingElemPosition, + setFloatingElemPositionForLinkEditor, SlashMenuGroup, SlashMenuOption, SlateBlockquoteConverter, @@ -504,16 +536,17 @@ import { SlateUnknownConverter, SlateUnorderedListConverter, SlateUploadConverter, + sortFeaturesForOptimalLoading, StrikethroughTextFeature, SubscriptTextFeature, SuperscriptTextFeature, + TestRecorderFeature, TEXT_MODE_TO_TYPE, TEXT_TYPE_TO_FORMAT, TEXT_TYPE_TO_MODE, - TOGGLE_LINK_COMMAND, - TestRecorderFeature, TextDropdownSectionWithEntries, TextHTMLConverter, + TOGGLE_LINK_COMMAND, TreeViewFeature, UnderlineTextFeature, UnorderedListFeature, @@ -521,48 +554,15 @@ import { UploadFeature, UploadFeatureProps, UploadNode, - addSwipeDownListener, - addSwipeLeftListener, - addSwipeRightListener, - addSwipeUpListener, - cloneDeep, - consolidateHTMLConverters, - convertLexicalNodesToHTML, - convertLexicalToHTML, - convertSlateNodesToLexical, - convertSlateToLexical, - createBlockNode, - defaultEditorConfig, - defaultEditorFeatures, - defaultHTMLConverters, - defaultRichTextValue, - defaultSanitizedEditorConfig, - defaultSlateConverters, - getDOMRangeRect, - getEnabledNodes, - getSelectedNode, - invariant, - isHTMLElement, - isPoint, - joinClasses, - lexicalEditor, - lexicalHTML, - loadFeatures, - sanitizeEditorConfig, - sanitizeFeatures, - sanitizeUrl, - setFloatingElemPosition, - setFloatingElemPositionForLinkEditor, - sortFeaturesForOptimalLoading, useEditorConfigContext, validateUrl, } from '@payloadcms/richtext-lexical' import { + defaultEditorLexicalConfig, RichTextCell, RichTextField, ToolbarButton, ToolbarDropdown, - defaultEditorLexicalConfig, } from '@payloadcms/richtext-lexical/components' import { AdapterArguments, @@ -570,12 +570,12 @@ import { ElementNode, FieldProps, LeafButton, + nodeIsTextNode, RichTextCustomElement, RichTextCustomLeaf, RichTextElement, RichTextLeaf, - TextNode, - nodeIsTextNode, slateEditor, + TextNode, toggleElement, } from '@payloadcms/richtext-slate' diff --git a/test/queues/.gitignore b/test/queues/.gitignore new file mode 100644 index 00000000000..cce01755f4f --- /dev/null +++ b/test/queues/.gitignore @@ -0,0 +1,2 @@ +/media +/media-gif diff --git a/test/queues/config.ts b/test/queues/config.ts new file mode 100644 index 00000000000..d758301e873 --- /dev/null +++ b/test/queues/config.ts @@ -0,0 +1,580 @@ +import type { TaskConfig, WorkflowConfig } from 'payload' + +import { lexicalEditor } from '@payloadcms/richtext-lexical' +import { fileURLToPath } from 'node:url' +import path from 'path' + +import { buildConfigWithDefaults } from '../buildConfigWithDefaults.js' +import { devUser } from '../credentials.js' +import { updatePostStep1, updatePostStep2 } from './runners/updatePost.js' +import { clearAndSeedEverything } from './seed.js' + +const filename = fileURLToPath(import.meta.url) +const dirname = path.dirname(filename) + +export default buildConfigWithDefaults({ + collections: [ + { + slug: 'posts', + admin: { + useAsTitle: 'title', + }, + hooks: { + afterChange: [ + async ({ req, doc, context }) => { + await req.payload.jobs.queue({ + workflow: context.useJSONWorkflow ? 'updatePostJSONWorkflow' : 'updatePost', + input: { + post: doc.id, + message: 'hello', + }, + req, + }) + }, + ], + }, + fields: [ + { + name: 'title', + type: 'text', + required: true, + }, + { + name: 'content', + type: 'richText', + }, + { + name: 'jobStep1Ran', + type: 'text', + }, + { + name: 'jobStep2Ran', + type: 'text', + }, + ], + }, + { + slug: 'simple', + admin: { + useAsTitle: 'title', + }, + fields: [ + { + name: 'title', + type: 'text', + required: true, + }, + ], + }, + ], + admin: { + importMap: { + baseDir: path.resolve(dirname), + }, + autoLogin: { + prefillOnly: true, + email: devUser.email, + password: devUser.password, + }, + }, + jobs: { + jobsCollectionOverrides: ({ defaultJobsCollection }) => { + return { + ...defaultJobsCollection, + admin: { + ...(defaultJobsCollection?.admin || {}), + hidden: false, + }, + } + }, + tasks: [ + { + retries: 2, + slug: 'UpdatePost', + interfaceName: 'MyUpdatePostType', + inputSchema: [ + { + name: 'post', + type: 'relationship', + relationTo: 'posts', + maxDepth: 0, + required: true, + }, + { + name: 'message', + type: 'text', + required: true, + }, + ], + outputSchema: [ + { + name: 'messageTwice', + type: 'text', + required: true, + }, + ], + handler: updatePostStep1, + } as TaskConfig<'UpdatePost'>, + { + retries: 2, + slug: 'UpdatePostStep2', + inputSchema: [ + { + name: 'post', + type: 'relationship', + relationTo: 'posts', + maxDepth: 0, + required: true, + }, + { + name: 'messageTwice', + type: 'text', + required: true, + }, + ], + handler: updatePostStep2, + } as TaskConfig<'UpdatePostStep2'>, + { + retries: 3, + slug: 'CreateSimple', + inputSchema: [ + { + name: 'message', + type: 'text', + required: true, + }, + { + name: 'shouldFail', + type: 'checkbox', + }, + ], + outputSchema: [ + { + name: 'simpleID', + type: 'text', + required: true, + }, + ], + handler: async ({ input, req }) => { + if (input.shouldFail) { + throw new Error('Failed on purpose') + } + const newSimple = await req.payload.create({ + collection: 'simple', + req, + data: { + title: input.message, + }, + }) + return { + output: { + simpleID: newSimple.id, + }, + } + }, + } as TaskConfig<'CreateSimple'>, + { + retries: 2, + slug: 'CreateSimpleWithDuplicateMessage', + inputSchema: [ + { + name: 'message', + type: 'text', + required: true, + }, + { + name: 'shouldFail', + type: 'checkbox', + }, + ], + outputSchema: [ + { + name: 'simpleID', + type: 'text', + required: true, + }, + ], + handler: async ({ input, req }) => { + if (input.shouldFail) { + throw new Error('Failed on purpose') + } + const newSimple = await req.payload.create({ + collection: 'simple', + req, + data: { + title: input.message + input.message, + }, + }) + return { + output: { + simpleID: newSimple.id, + }, + } + }, + } as TaskConfig<'CreateSimpleWithDuplicateMessage'>, + { + retries: 2, + slug: 'ExternalTask', + inputSchema: [ + { + name: 'message', + type: 'text', + required: true, + }, + ], + outputSchema: [ + { + name: 'simpleID', + type: 'text', + required: true, + }, + ], + handler: path.resolve(dirname, 'runners/externalTask.ts') + '#externalTaskHandler', + } as TaskConfig<'ExternalTask'>, + ], + workflows: [ + { + slug: 'updatePost', + interfaceName: 'MyUpdatePostWorkflowType', + inputSchema: [ + { + name: 'post', + type: 'relationship', + relationTo: 'posts', + maxDepth: 0, + required: true, + }, + { + name: 'message', + type: 'text', + required: true, + }, + ], + handler: async ({ job, tasks }) => { + await tasks.UpdatePost('1', { + input: { + post: job.input.post, + message: job.input.message, + }, + }) + + await tasks.UpdatePostStep2('2', { + input: { + post: job.taskStatus.UpdatePost['1'].input.post, + messageTwice: job.taskStatus.UpdatePost['1'].output.messageTwice, + }, + }) + }, + } as WorkflowConfig<'updatePost'>, + { + slug: 'updatePostJSONWorkflow', + inputSchema: [ + { + name: 'post', + type: 'relationship', + relationTo: 'posts', + maxDepth: 0, + required: true, + }, + { + name: 'message', + type: 'text', + required: true, + }, + ], + handler: [ + { + task: 'UpdatePost', + id: '1', + input: ({ job }) => ({ + post: job.input.post, + message: job.input.message, + }), + }, + { + task: 'UpdatePostStep2', + id: '2', + input: ({ job }) => ({ + post: job.taskStatus.UpdatePost['1'].input.post, + messageTwice: job.taskStatus.UpdatePost['1'].output.messageTwice, + }), + condition({ job }) { + return job?.taskStatus?.UpdatePost?.['1']?.complete + }, + completesJob: true, + }, + ], + } as WorkflowConfig<'updatePostJSONWorkflow'>, + { + slug: 'retriesTest', + inputSchema: [ + { + name: 'message', + type: 'text', + required: true, + }, + ], + handler: async ({ job, tasks, req }) => { + await req.payload.update({ + collection: 'payload-jobs', + data: { + input: { + ...job.input, + amountRetried: + // @ts-expect-error amountRetried is new arbitrary data and not in the type + job.input.amountRetried !== undefined ? job.input.amountRetried + 1 : 0, + }, + }, + id: job.id, + }) + + await tasks.CreateSimple('1', { + input: { + message: job.input.message, + }, + }) + + // At this point there should always be one post created. + // job.input.amountRetried will go up to 2 as CreatePost has 2 retries + await tasks.CreateSimple('2', { + input: { + message: job.input.message, + shouldFail: true, + }, + }) + // This will never be reached + }, + } as WorkflowConfig<'retriesTest'>, + { + slug: 'retriesRollbackTest', + inputSchema: [ + { + name: 'message', + type: 'text', + required: true, + }, + ], + handler: async ({ job, inlineTask, req }) => { + await req.payload.update({ + collection: 'payload-jobs', + data: { + input: { + ...job.input, + amountRetried: + // @ts-expect-error amountRetried is new arbitrary data and not in the type + job.input.amountRetried !== undefined ? job.input.amountRetried + 1 : 0, + }, + }, + id: job.id, + }) + + await inlineTask('1', { + task: async ({ req }) => { + const newSimple = await req.payload.create({ + collection: 'simple', + req, + data: { + title: job.input.message, + }, + }) + return { + output: { + simpleID: newSimple.id, + }, + } + }, + }) + + await inlineTask('2', { + task: async ({ req }) => { + await req.payload.create({ + collection: 'simple', + req, + data: { + title: 'should not exist', + }, + }) + // Fail afterwards, so that we can also test that transactions work (i.e. the job is rolled back) + + throw new Error('Failed on purpose') + }, + retries: { + attempts: 4, + }, + }) + }, + } as WorkflowConfig<'retriesRollbackTest'>, + { + slug: 'retriesWorkflowLevelTest', + inputSchema: [ + { + name: 'message', + type: 'text', + required: true, + }, + ], + retries: 2, // Even though CreateSimple has 3 retries, this workflow only has 2. Thus, it will only retry once + handler: async ({ job, tasks, req }) => { + await req.payload.update({ + collection: 'payload-jobs', + data: { + input: { + ...job.input, + amountRetried: + // @ts-expect-error amountRetried is new arbitrary data and not in the type + job.input.amountRetried !== undefined ? job.input.amountRetried + 1 : 0, + }, + }, + id: job.id, + }) + + await tasks.CreateSimple('1', { + input: { + message: job.input.message, + }, + }) + + // At this point there should always be one post created. + // job.input.amountRetried will go up to 2 as CreatePost has 2 retries + await tasks.CreateSimple('2', { + input: { + message: job.input.message, + shouldFail: true, + }, + }) + // This will never be reached + }, + } as WorkflowConfig<'retriesWorkflowLevelTest'>, + { + slug: 'inlineTaskTest', + inputSchema: [ + { + name: 'message', + type: 'text', + required: true, + }, + ], + handler: async ({ job, inlineTask }) => { + await inlineTask('1', { + task: async ({ input, req }) => { + const newSimple = await req.payload.create({ + collection: 'simple', + req, + data: { + title: input.message, + }, + }) + return { + output: { + simpleID: newSimple.id, + }, + } + }, + input: { + message: job.input.message, + }, + }) + }, + } as WorkflowConfig<'inlineTaskTest'>, + { + slug: 'externalWorkflow', + inputSchema: [ + { + name: 'message', + type: 'text', + required: true, + }, + ], + handler: path.resolve(dirname, 'runners/externalWorkflow.ts') + '#externalWorkflowHandler', + } as WorkflowConfig<'externalWorkflow'>, + { + slug: 'retriesBackoffTest', + inputSchema: [ + { + name: 'message', + type: 'text', + required: true, + }, + ], + handler: async ({ job, inlineTask, req }) => { + const newJob = await req.payload.update({ + collection: 'payload-jobs', + data: { + input: { + ...job.input, + amountRetried: + // @ts-expect-error amountRetried is new arbitrary data and not in the type + job.input.amountRetried !== undefined ? job.input.amountRetried + 1 : 0, + }, + }, + id: job.id, + }) + job.input = newJob.input as any + + await inlineTask('1', { + task: async ({ req }) => { + const totalTried = job?.taskStatus?.inline?.['1']?.totalTried || 0 + + const { id } = await req.payload.create({ + collection: 'simple', + req, + data: { + title: 'should not exist', + }, + }) + + // @ts-expect-error timeTried is new arbitrary data and not in the type + if (!job.input.timeTried) { + // @ts-expect-error timeTried is new arbitrary data and not in the type + job.input.timeTried = {} + } + + // @ts-expect-error timeTried is new arbitrary data and not in the type + job.input.timeTried[totalTried] = new Date().toISOString() + + await req.payload.update({ + collection: 'payload-jobs', + data: { + input: job.input, + }, + id: job.id, + }) + + if (totalTried < 4) { + // Cleanup the post + await req.payload.delete({ + collection: 'simple', + id, + req, + }) + + // Last try it should succeed + throw new Error('Failed on purpose') + } + return { + output: {}, + } + }, + retries: { + attempts: 4, + backoff: { + type: 'exponential', + // Should retry in 300ms, then 600, then 1200, then 2400, then succeed + delay: 300, + }, + }, + }) + }, + } as WorkflowConfig<'retriesBackoffTest'>, + ], + }, + editor: lexicalEditor(), + onInit: async (payload) => { + if (process.env.SEED_IN_CONFIG_ONINIT !== 'false') { + await clearAndSeedEverything(payload) + } + }, + typescript: { + outputFile: path.resolve(dirname, 'payload-types.ts'), + }, +}) diff --git a/test/queues/e2e.spec.ts b/test/queues/e2e.spec.ts new file mode 100644 index 00000000000..26b9030bbf2 --- /dev/null +++ b/test/queues/e2e.spec.ts @@ -0,0 +1,65 @@ +import type { Page } from '@playwright/test' + +import { expect, test } from '@playwright/test' +import * as path from 'path' +import { fileURLToPath } from 'url' + +import type { PayloadTestSDK } from '../helpers/sdk/index.js' +import type { Config } from './payload-types.js' + +import { ensureCompilationIsDone, initPageConsoleErrorCatch } from '../helpers.js' +import { AdminUrlUtil } from '../helpers/adminUrlUtil.js' +import { initPayloadE2ENoConfig } from '../helpers/initPayloadE2ENoConfig.js' +import { reInitializeDB } from '../helpers/reInitializeDB.js' +import { RESTClient } from '../helpers/rest.js' +import { TEST_TIMEOUT } from '../playwright.config.js' + +const filename = fileURLToPath(import.meta.url) +const dirname = path.dirname(filename) +let serverURL: string +let payload: PayloadTestSDK +let client: RESTClient + +test.describe('Queues', () => { + let page: Page + let url: AdminUrlUtil + + test.beforeAll(async ({ browser }, testInfo) => { + testInfo.setTimeout(TEST_TIMEOUT) + process.env.SEED_IN_CONFIG_ONINIT = 'false' // Makes it so the payload config onInit seed is not run. Otherwise, the seed would be run unnecessarily twice for the initial test run - once for beforeEach and once for onInit + ;({ payload, serverURL } = await initPayloadE2ENoConfig({ dirname })) + url = new AdminUrlUtil(serverURL, 'payload-jobs') + + const context = await browser.newContext() + page = await context.newPage() + initPageConsoleErrorCatch(page) + await reInitializeDB({ + serverURL, + snapshotKey: 'queuesTest', + }) + await ensureCompilationIsDone({ page, serverURL }) + }) + + test.beforeEach(async () => { + await reInitializeDB({ + serverURL, + snapshotKey: 'fieldsTest', + uploadsDir: path.resolve(dirname, './collections/Upload/uploads'), + }) + + if (client) { + await client.logout() + } + client = new RESTClient(null, { defaultSlug: 'users', serverURL }) + await client.login() + + await ensureCompilationIsDone({ page, serverURL }) + }) + + test('example test', async () => { + await page.goto(url.list) + + const textCell = page.locator('.row-1 .cell-text') + await expect(textCell).toHaveText('example post') + }) +}) diff --git a/test/queues/eslint.config.js b/test/queues/eslint.config.js new file mode 100644 index 00000000000..53d58f1641d --- /dev/null +++ b/test/queues/eslint.config.js @@ -0,0 +1,20 @@ +import { rootParserOptions } from '../../eslint.config.js' +import { testEslintConfig } from '../eslint.config.js' + +/** @typedef {import('eslint').Linter.FlatConfig} */ +let FlatConfig + +/** @type {FlatConfig[]} */ +export const index = [ + ...testEslintConfig, + { + languageOptions: { + parserOptions: { + ...rootParserOptions, + tsconfigRootDir: import.meta.dirname, + }, + }, + }, +] + +export default index diff --git a/test/queues/int.spec.ts b/test/queues/int.spec.ts new file mode 100644 index 00000000000..186379af328 --- /dev/null +++ b/test/queues/int.spec.ts @@ -0,0 +1,668 @@ +import type { JobTaskStatus, Payload } from 'payload' + +import path from 'path' +import { fileURLToPath } from 'url' + +import type { NextRESTClient } from '../helpers/NextRESTClient.js' + +import { devUser } from '../credentials.js' +import { initPayloadInt } from '../helpers/initPayloadInt.js' +import { clearAndSeedEverything } from './seed.js' + +let payload: Payload +let restClient: NextRESTClient +let token: string + +const { email, password } = devUser +const filename = fileURLToPath(import.meta.url) +const dirname = path.dirname(filename) + +describe('Queues', () => { + beforeAll(async () => { + process.env.SEED_IN_CONFIG_ONINIT = 'false' // Makes it so the payload config onInit seed is not run. Otherwise, the seed would be run unnecessarily twice for the initial test run - once for beforeEach and once for onInit + ;({ payload, restClient } = await initPayloadInt(dirname)) + }) + + afterAll(async () => { + if (typeof payload.db.destroy === 'function') { + await payload.db.destroy() + } + }) + + beforeEach(async () => { + await clearAndSeedEverything(payload) + const data = await restClient + .POST('/users/login', { + body: JSON.stringify({ + email, + password, + }), + }) + .then((res) => res.json()) + + if (data.token) { + token = data.token + } + }) + + it('will run access control on jobs runner', async () => { + const response = await restClient.GET('/payload-jobs/run', { + headers: { + // Authorization: `JWT ${token}`, + }, + }) // Needs to be a rest call to test auth + expect(response.status).toBe(401) + }) + + it('will return 200 from jobs runner', async () => { + const response = await restClient.GET('/payload-jobs/run', { + headers: { + Authorization: `JWT ${token}`, + }, + }) // Needs to be a rest call to test auth + + expect(response.status).toBe(200) + }) + + // There used to be a bug in payload where updating the job threw the following error - only in + // postgres: + // QueryError: The following path cannot be queried: document.relationTo + // This test is to ensure that the bug is fixed + it('can create and update new jobs', async () => { + const job = await payload.create({ + collection: 'payload-jobs', + data: { + input: { + message: '1', + }, + }, + }) + // @ts-expect-error + expect(job.input.message).toBe('1') + + const updatedJob = await payload.update({ + collection: 'payload-jobs', + id: job.id, + data: { + input: { + message: '2', + }, + }, + }) + // @ts-expect-error + expect(updatedJob.input.message).toBe('2') + }) + + it('can create new jobs', async () => { + const newPost = await payload.create({ + collection: 'posts', + data: { + title: 'my post', + }, + }) + + const retrievedPost = await payload.findByID({ + collection: 'posts', + id: newPost.id, + }) + + expect(retrievedPost.jobStep1Ran).toBeFalsy() + expect(retrievedPost.jobStep2Ran).toBeFalsy() + + await payload.jobs.run() + + const postAfterJobs = await payload.findByID({ + collection: 'posts', + id: newPost.id, + }) + + expect(postAfterJobs.jobStep1Ran).toBe('hello') + expect(postAfterJobs.jobStep2Ran).toBe('hellohellohellohello') + }) + + it('can create new JSON-workflow jobs', async () => { + const newPost = await payload.create({ + collection: 'posts', + data: { + title: 'my post', + }, + context: { + useJSONWorkflow: true, + }, + }) + + const retrievedPost = await payload.findByID({ + collection: 'posts', + id: newPost.id, + }) + + expect(retrievedPost.jobStep1Ran).toBeFalsy() + expect(retrievedPost.jobStep2Ran).toBeFalsy() + + await payload.jobs.run() + + const postAfterJobs = await payload.findByID({ + collection: 'posts', + id: newPost.id, + }) + + expect(postAfterJobs.jobStep1Ran).toBe('hello') + expect(postAfterJobs.jobStep2Ran).toBe('hellohellohellohello') + }) + + it('ensure job retrying works', async () => { + const job = await payload.jobs.queue({ + workflow: 'retriesTest', + input: { + message: 'hello', + }, + }) + + let hasJobsRemaining = true + + while (hasJobsRemaining) { + const response = await payload.jobs.run() + + if (response.noJobsRemaining) { + hasJobsRemaining = false + } + } + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(1) + + const jobAfterRun = await payload.findByID({ + collection: 'payload-jobs', + id: job.id, + }) + + // @ts-expect-error amountRetried is new arbitrary data and not in the type + expect(jobAfterRun.input.amountRetried).toBe(3) + }) + + it('ensure workflow-level retries are respected', async () => { + const job = await payload.jobs.queue({ + workflow: 'retriesWorkflowLevelTest', + input: { + message: 'hello', + }, + }) + + let hasJobsRemaining = true + + while (hasJobsRemaining) { + const response = await payload.jobs.run() + + if (response.noJobsRemaining) { + hasJobsRemaining = false + } + } + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(1) + + const jobAfterRun = await payload.findByID({ + collection: 'payload-jobs', + id: job.id, + }) + + // @ts-expect-error amountRetried is new arbitrary data and not in the type + expect(jobAfterRun.input.amountRetried).toBe(2) + }) + + /* + // Task rollbacks are not supported in the current version of Payload. This test will be re-enabled when task rollbacks are supported once we figure out the transaction issues + it('ensure failed tasks are rolled back via transactions', async () => { + const job = await payload.jobs.queue({ + workflow: 'retriesRollbackTest', + input: { + message: 'hello', + }, + }) + + let hasJobsRemaining = true + + while (hasJobsRemaining) { + const response = await payload.jobs.run() + + if (response.noJobsRemaining) { + hasJobsRemaining = false + } + } + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(1) // Failure happens after task creates a simple document, but still within the task => any document creation should be rolled back + + const jobAfterRun = await payload.findByID({ + collection: 'payload-jobs', + id: job.id, + }) + + // @ts-expect-error amountRetried is new arbitrary data and not in the type + expect(jobAfterRun.input.amountRetried).toBe(4) + })*/ + + it('ensure backoff strategy of task is respected', async () => { + const job = await payload.jobs.queue({ + workflow: 'retriesBackoffTest', + input: { + message: 'hello', + }, + }) + + let hasJobsRemaining = true + let firstGotNoJobs = null + + const delay = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms)) + + // Keep running until no jobs found. If no jobs found, wait for 1.6 seconds to see if any new jobs are added + // (Specifically here we want to see if the backoff strategy is respected and thus need to wait for `waitUntil`) + while ( + hasJobsRemaining || + !firstGotNoJobs || + new Date().getTime() - firstGotNoJobs.getTime() < 3000 + ) { + const response = await payload.jobs.run() + + if (response.noJobsRemaining) { + if (hasJobsRemaining) { + firstGotNoJobs = new Date() + hasJobsRemaining = false + } + } else { + firstGotNoJobs = null + hasJobsRemaining = true + } + + // Add a 100ms delay before the next iteration + await delay(100) + } + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(1) + + const jobAfterRun = await payload.findByID({ + collection: 'payload-jobs', + id: job.id, + }) + expect(jobAfterRun.totalTried).toBe(5) + expect((jobAfterRun.taskStatus as JobTaskStatus).inline['1'].totalTried).toBe(5) + + // @ts-expect-error amountRetried is new arbitrary data and not in the type + expect(jobAfterRun.input.amountRetried).toBe(4) + + /* + Job.input.timeTried may look something like this: + timeTried: { + '0': '2024-10-07T16:05:49.300Z', + '1': '2024-10-07T16:05:49.469Z', + '2': '2024-10-07T16:05:49.779Z', + '3': '2024-10-07T16:05:50.388Z', + '4': '2024-10-07T16:05:51.597Z' + } + Convert this into an array, each item is the duration between the fails. So this should have 4 items + */ + const timeTried: { + [key: string]: string + // @ts-expect-error timeTried is new arbitrary data and not in the type + } = jobAfterRun.input.timeTried + + const durations = Object.values(timeTried) + .map((time, index, arr) => { + if (index === arr.length - 1) { + return null + } + return new Date(arr[index + 1]).getTime() - new Date(time).getTime() + }) + .filter((p) => p !== null) + + expect(durations).toHaveLength(4) + expect(durations[0]).toBeGreaterThan(300) + expect(durations[1]).toBeGreaterThan(600) + expect(durations[2]).toBeGreaterThan(1200) + expect(durations[3]).toBeGreaterThan(2400) + }) + + it('can create new inline jobs', async () => { + await payload.jobs.queue({ + workflow: 'inlineTaskTest', + input: { + message: 'hello!', + }, + }) + + await payload.jobs.run() + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(1) + expect(allSimples.docs[0].title).toBe('hello!') + }) + + it('can queue single tasks', async () => { + await payload.jobs.queue({ + task: 'CreateSimple', + input: { + message: 'from single task', + }, + }) + + await payload.jobs.run() + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(1) + expect(allSimples.docs[0].title).toBe('from single task') + }) + + /* + // Task rollbacks are not supported in the current version of Payload. This test will be re-enabled when task rollbacks are supported once we figure out the transaction issues + it('transaction test against payload-jobs collection', async () => { + // This kinds of emulates what happens when multiple jobs are queued and then run in parallel. + const runWorkflowFN = async (i: number) => { + const { id } = await payload.create({ + collection: 'payload-jobs', + data: { + input: { + message: 'Number ' + i, + }, + taskSlug: 'CreateSimple', + }, + }) + + const _req = await createLocalReq({}, payload) + const t1Req = isolateObjectProperty(_req, 'transactionID') + delete t1Req.transactionID + + await initTransaction(t1Req) + + await payload.update({ + collection: 'payload-jobs', + id, + req: t1Req, + data: { + input: { + message: 'Number ' + i + ' Update 1', + }, + processing: true, + taskSlug: 'CreateSimple', + }, + }) + + /** + * T1 start + */ + /* + const t2Req = isolateObjectProperty(t1Req, 'transactionID') + delete t2Req.transactionID + // + await initTransaction(t2Req) + + await payload.update({ + collection: 'payload-jobs', + id, + req: t1Req, + data: { + input: { + message: 'Number ' + i + ' Update 2', + }, + processing: true, + taskSlug: 'CreateSimple', + }, + }) + + await payload.create({ + collection: 'simple', + req: t2Req, + data: { + title: 'from single task', + }, + }) + + await payload.update({ + collection: 'payload-jobs', + id, + req: t1Req, + data: { + input: { + message: 'Number ' + i + ' Update 3', + }, + processing: true, + taskSlug: 'CreateSimple', + }, + }) + + await commitTransaction(t2Req) + + /** + * T1 end + */ + /* + await payload.update({ + collection: 'payload-jobs', + id, + req: t1Req, + data: { + input: { + message: 'Number ' + i + ' Update 4', + }, + processing: true, + taskSlug: 'CreateSimple', + }, + }) + await commitTransaction(t1Req) + } + + await Promise.all( + new Array(30).fill(0).map(async (_, i) => { + await runWorkflowFN(i) + }), + ) + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(30) + })*/ + + it('can queue single tasks 8 times', async () => { + for (let i = 0; i < 8; i++) { + await payload.jobs.queue({ + task: 'CreateSimple', + input: { + message: 'from single task', + }, + }) + } + + await payload.jobs.run() + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(8) + expect(allSimples.docs[0].title).toBe('from single task') + expect(allSimples.docs[7].title).toBe('from single task') + }) + + it('can queue single tasks 500 times', async () => { + for (let i = 0; i < 500; i++) { + await payload.jobs.queue({ + task: 'CreateSimple', + input: { + message: 'from single task', + }, + }) + } + + await payload.jobs.run({ + limit: 1000, + }) + + const allSimples = await payload.find({ + collection: 'simple', + limit: 1000, + }) + + expect(allSimples.totalDocs).toBe(500) // Default limit: 10 + expect(allSimples.docs[0].title).toBe('from single task') + expect(allSimples.docs[490].title).toBe('from single task') + }) + + it('ensure default jobs run limit of 10 works', async () => { + for (let i = 0; i < 500; i++) { + await payload.jobs.queue({ + task: 'CreateSimple', + input: { + message: 'from single task', + }, + }) + } + + await payload.jobs.run() + + const allSimples = await payload.find({ + collection: 'simple', + limit: 1000, + }) + + expect(allSimples.totalDocs).toBe(10) // Default limit: 10 + expect(allSimples.docs[0].title).toBe('from single task') + expect(allSimples.docs[9].title).toBe('from single task') + }) + + it('ensure jobs run limit can be customized', async () => { + for (let i = 0; i < 500; i++) { + await payload.jobs.queue({ + task: 'CreateSimple', + input: { + message: 'from single task', + }, + }) + } + + await payload.jobs.run({ + limit: 42, + }) + + const allSimples = await payload.find({ + collection: 'simple', + limit: 1000, + }) + + expect(allSimples.totalDocs).toBe(42) // Default limit: 10 + expect(allSimples.docs[0].title).toBe('from single task') + expect(allSimples.docs[30].title).toBe('from single task') + expect(allSimples.docs[41].title).toBe('from single task') + }) + + it('can queue different kinds of single tasks multiple times', async () => { + for (let i = 0; i < 3; i++) { + await payload.jobs.queue({ + task: 'CreateSimpleWithDuplicateMessage', + input: { + message: 'hello', + }, + }) + await payload.jobs.queue({ + task: 'CreateSimple', + input: { + message: 'from single task', + }, + }) + await payload.jobs.queue({ + task: 'CreateSimpleWithDuplicateMessage', + input: { + message: 'hello', + }, + }) + } + + await payload.jobs.run() + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(9) + + let amountOfCreateSimple = 0 + let amountOfCreateSimpleWithDuplicateMessage = 0 + + for (const simple of allSimples.docs) { + if (simple.title === 'from single task') { + amountOfCreateSimple++ + } else if (simple.title === 'hellohello') { + amountOfCreateSimpleWithDuplicateMessage++ + } + } + + expect(amountOfCreateSimple).toBe(3) + expect(amountOfCreateSimpleWithDuplicateMessage).toBe(6) + }) + + it('can queue external tasks', async () => { + await payload.jobs.queue({ + task: 'ExternalTask', + input: { + message: 'external', + }, + }) + + await payload.jobs.run() + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(1) + expect(allSimples.docs[0].title).toBe('external') + }) + + it('can queue external workflow that is running external task', async () => { + await payload.jobs.queue({ + workflow: 'externalWorkflow', + input: { + message: 'externalWorkflow', + }, + }) + + await payload.jobs.run() + + const allSimples = await payload.find({ + collection: 'simple', + limit: 100, + }) + + expect(allSimples.totalDocs).toBe(1) + expect(allSimples.docs[0].title).toBe('externalWorkflow') + }) +}) diff --git a/test/queues/payload-types.ts b/test/queues/payload-types.ts new file mode 100644 index 00000000000..8049487c2c3 --- /dev/null +++ b/test/queues/payload-types.ts @@ -0,0 +1,446 @@ +/* tslint:disable */ +/* eslint-disable */ +/** + * This file was automatically generated by Payload. + * DO NOT MODIFY IT BY HAND. Instead, modify your source Payload config, + * and re-run `payload generate:types` to regenerate this file. + */ + +export interface Config { + auth: { + users: UserAuthOperations; + }; + collections: { + posts: Post; + simple: Simple; + users: User; + 'payload-jobs': PayloadJob; + 'payload-locked-documents': PayloadLockedDocument; + 'payload-preferences': PayloadPreference; + 'payload-migrations': PayloadMigration; + }; + db: { + defaultIDType: string; + }; + globals: {}; + locale: null; + user: User & { + collection: 'users'; + }; + jobs?: { + tasks: { + UpdatePost: MyUpdatePostType; + UpdatePostStep2: TaskUpdatePostStep2; + CreateSimple: TaskCreateSimple; + CreateSimpleWithDuplicateMessage: TaskCreateSimpleWithDuplicateMessage; + ExternalTask: TaskExternalTask; + inline?: { + input: unknown; + output: unknown; + }; + }; + workflows?: { + updatePost?: MyUpdatePostWorkflowType; + updatePostJSONWorkflow?: WorkflowUpdatePostJSONWorkflow; + retriesTest?: WorkflowRetriesTest; + retriesRollbackTest?: WorkflowRetriesRollbackTest; + retriesWorkflowLevelTest?: WorkflowRetriesWorkflowLevelTest; + inlineTaskTest?: WorkflowInlineTaskTest; + externalWorkflow?: WorkflowExternalWorkflow; + retriesBackoffTest?: WorkflowRetriesBackoffTest; + }; + }; +} +export interface UserAuthOperations { + forgotPassword: { + email: string; + password: string; + }; + login: { + email: string; + password: string; + }; + registerFirstUser: { + email: string; + password: string; + }; + unlock: { + email: string; + password: string; + }; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "posts". + */ +export interface Post { + id: string; + title: string; + content?: { + root: { + type: string; + children: { + type: string; + version: number; + [k: string]: unknown; + }[]; + direction: ('ltr' | 'rtl') | null; + format: 'left' | 'start' | 'center' | 'right' | 'end' | 'justify' | ''; + indent: number; + version: number; + }; + [k: string]: unknown; + } | null; + jobStep1Ran?: string | null; + jobStep2Ran?: string | null; + updatedAt: string; + createdAt: string; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "simple". + */ +export interface Simple { + id: string; + title: string; + updatedAt: string; + createdAt: string; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "users". + */ +export interface User { + id: string; + updatedAt: string; + createdAt: string; + email: string; + resetPasswordToken?: string | null; + resetPasswordExpiration?: string | null; + salt?: string | null; + hash?: string | null; + loginAttempts?: number | null; + lockUntil?: string | null; + password?: string | null; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "payload-jobs". + */ +export interface PayloadJob { + id: string; + input?: + | { + [k: string]: unknown; + } + | unknown[] + | string + | number + | boolean + | null; + taskStatus?: + | { + [k: string]: unknown; + } + | unknown[] + | string + | number + | boolean + | null; + completedAt?: string | null; + totalTried?: number | null; + hasError?: boolean | null; + error?: + | { + [k: string]: unknown; + } + | unknown[] + | string + | number + | boolean + | null; + log?: + | { + executedAt: string; + completedAt: string; + taskSlug: + | 'inline' + | 'UpdatePost' + | 'UpdatePostStep2' + | 'CreateSimple' + | 'CreateSimpleWithDuplicateMessage' + | 'ExternalTask'; + taskID: string; + input?: + | { + [k: string]: unknown; + } + | unknown[] + | string + | number + | boolean + | null; + output?: + | { + [k: string]: unknown; + } + | unknown[] + | string + | number + | boolean + | null; + state: 'failed' | 'succeeded'; + error?: + | { + [k: string]: unknown; + } + | unknown[] + | string + | number + | boolean + | null; + id?: string | null; + }[] + | null; + workflowSlug?: + | ( + | 'updatePost' + | 'updatePostJSONWorkflow' + | 'retriesTest' + | 'retriesRollbackTest' + | 'retriesWorkflowLevelTest' + | 'inlineTaskTest' + | 'externalWorkflow' + | 'retriesBackoffTest' + ) + | null; + taskSlug?: + | ( + | 'inline' + | 'UpdatePost' + | 'UpdatePostStep2' + | 'CreateSimple' + | 'CreateSimpleWithDuplicateMessage' + | 'ExternalTask' + ) + | null; + queue?: 'default' | null; + waitUntil?: string | null; + processing?: boolean | null; + updatedAt: string; + createdAt: string; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "payload-locked-documents". + */ +export interface PayloadLockedDocument { + id: string; + document?: + | ({ + relationTo: 'posts'; + value: string | Post; + } | null) + | ({ + relationTo: 'simple'; + value: string | Simple; + } | null) + | ({ + relationTo: 'users'; + value: string | User; + } | null) + | ({ + relationTo: 'payload-jobs'; + value: string | PayloadJob; + } | null); + globalSlug?: string | null; + user: { + relationTo: 'users'; + value: string | User; + }; + updatedAt: string; + createdAt: string; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "payload-preferences". + */ +export interface PayloadPreference { + id: string; + user: { + relationTo: 'users'; + value: string | User; + }; + key?: string | null; + value?: + | { + [k: string]: unknown; + } + | unknown[] + | string + | number + | boolean + | null; + updatedAt: string; + createdAt: string; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "payload-migrations". + */ +export interface PayloadMigration { + id: string; + name?: string | null; + batch?: number | null; + updatedAt: string; + createdAt: string; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "MyUpdatePostType". + */ +export interface MyUpdatePostType { + input: { + post: string | Post; + message: string; + }; + output: { + messageTwice: string; + }; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "TaskUpdatePostStep2". + */ +export interface TaskUpdatePostStep2 { + input: { + post: string | Post; + messageTwice: string; + }; + output?: unknown; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "TaskCreateSimple". + */ +export interface TaskCreateSimple { + input: { + message: string; + shouldFail?: boolean | null; + }; + output: { + simpleID: string; + }; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "TaskCreateSimpleWithDuplicateMessage". + */ +export interface TaskCreateSimpleWithDuplicateMessage { + input: { + message: string; + shouldFail?: boolean | null; + }; + output: { + simpleID: string; + }; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "TaskExternalTask". + */ +export interface TaskExternalTask { + input: { + message: string; + }; + output: { + simpleID: string; + }; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "MyUpdatePostWorkflowType". + */ +export interface MyUpdatePostWorkflowType { + input: { + post: string | Post; + message: string; + }; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "WorkflowUpdatePostJSONWorkflow". + */ +export interface WorkflowUpdatePostJSONWorkflow { + input: { + post: string | Post; + message: string; + }; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "WorkflowRetriesTest". + */ +export interface WorkflowRetriesTest { + input: { + message: string; + }; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "WorkflowRetriesRollbackTest". + */ +export interface WorkflowRetriesRollbackTest { + input: { + message: string; + }; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "WorkflowRetriesWorkflowLevelTest". + */ +export interface WorkflowRetriesWorkflowLevelTest { + input: { + message: string; + }; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "WorkflowInlineTaskTest". + */ +export interface WorkflowInlineTaskTest { + input: { + message: string; + }; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "WorkflowExternalWorkflow". + */ +export interface WorkflowExternalWorkflow { + input: { + message: string; + }; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "WorkflowRetriesBackoffTest". + */ +export interface WorkflowRetriesBackoffTest { + input: { + message: string; + }; +} +/** + * This interface was referenced by `Config`'s JSON-Schema + * via the `definition` "auth". + */ +export interface Auth { + [k: string]: unknown; +} + + +declare module 'payload' { + // @ts-ignore + export interface GeneratedTypes extends Config {} +} \ No newline at end of file diff --git a/test/queues/runners/externalTask.ts b/test/queues/runners/externalTask.ts new file mode 100644 index 00000000000..1e721bb64cb --- /dev/null +++ b/test/queues/runners/externalTask.ts @@ -0,0 +1,16 @@ +import type { TaskHandler } from 'payload' + +export const externalTaskHandler: TaskHandler<'ExternalTask'> = async ({ input, req }) => { + const newSimple = await req.payload.create({ + collection: 'simple', + req, + data: { + title: input.message, + }, + }) + return { + output: { + simpleID: newSimple.id, + }, + } +} diff --git a/test/queues/runners/externalWorkflow.ts b/test/queues/runners/externalWorkflow.ts new file mode 100644 index 00000000000..b0d381930ed --- /dev/null +++ b/test/queues/runners/externalWorkflow.ts @@ -0,0 +1,12 @@ +import type { WorkflowHandler } from 'payload' + +export const externalWorkflowHandler: WorkflowHandler<'externalWorkflow'> = async ({ + job, + tasks, +}) => { + await tasks.ExternalTask('1', { + input: { + message: job.input.message, + }, + }) +} diff --git a/test/queues/runners/updatePost.ts b/test/queues/runners/updatePost.ts new file mode 100644 index 00000000000..c9dff56a048 --- /dev/null +++ b/test/queues/runners/updatePost.ts @@ -0,0 +1,55 @@ +import type { TaskHandler } from 'payload' + +export const updatePostStep1: TaskHandler<'UpdatePost'> = async ({ req, input }) => { + const postID = + typeof input.post === 'string' || typeof input.post === 'number' ? input.post : input.post.id + + if (!postID) { + return { + state: 'failed', + output: null, + } + } + + await req.payload.update({ + collection: 'posts', + id: postID, + req, + data: { + jobStep1Ran: input.message, + }, + }) + + return { + state: 'succeeded', + output: { + messageTwice: input.message + input.message, + }, + } +} + +export const updatePostStep2: TaskHandler<'UpdatePostStep2'> = async ({ req, input, job }) => { + const postID = + typeof input.post === 'string' || typeof input.post === 'number' ? input.post : input.post.id + + if (!postID) { + return { + state: 'failed', + output: null, + } + } + + await req.payload.update({ + collection: 'posts', + id: postID, + req, + data: { + jobStep2Ran: input.messageTwice + job.taskStatus.UpdatePost['1'].output.messageTwice, + }, + }) + + return { + state: 'succeeded', + output: null, + } +} diff --git a/test/queues/schema.graphql b/test/queues/schema.graphql new file mode 100644 index 00000000000..1038d843d62 --- /dev/null +++ b/test/queues/schema.graphql @@ -0,0 +1,1902 @@ +type Query { + Post(id: String!, draft: Boolean): Post + Posts(draft: Boolean, where: Post_where, limit: Int, page: Int, sort: String): Posts + countPosts(draft: Boolean, where: Post_where): countPosts + docAccessPost(id: String!): postsDocAccess + versionPost(id: String): PostVersion + versionsPosts(where: versionsPost_where, limit: Int, page: Int, sort: String): versionsPosts + User(id: String!, draft: Boolean): User + Users(draft: Boolean, where: User_where, limit: Int, page: Int, sort: String): Users + countUsers(draft: Boolean, where: User_where): countUsers + docAccessUser(id: String!): usersDocAccess + meUser: usersMe + initializedUser: Boolean + PayloadPreference(id: String!, draft: Boolean): PayloadPreference + PayloadPreferences( + draft: Boolean + where: PayloadPreference_where + limit: Int + page: Int + sort: String + ): PayloadPreferences + countPayloadPreferences(draft: Boolean, where: PayloadPreference_where): countPayloadPreferences + docAccessPayloadPreference(id: String!): payload_preferencesDocAccess + Menu(draft: Boolean): Menu + docAccessMenu: menuDocAccess + Access: Access +} + +type Post { + id: String + text: String + richText(depth: Int): JSON + richText2(depth: Int): JSON + updatedAt: DateTime + createdAt: DateTime + _status: Post__status +} + +""" +The `JSON` scalar type represents JSON values as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf). +""" +scalar JSON + @specifiedBy(url: "http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf") + +""" +A date-time string at UTC, such as 2007-12-03T10:15:30Z, compliant with the `date-time` format outlined in section 5.6 of the RFC 3339 profile of the ISO 8601 standard for representation of dates and times using the Gregorian calendar. +""" +scalar DateTime + +enum Post__status { + draft + published +} + +type Posts { + docs: [Post] + hasNextPage: Boolean + hasPrevPage: Boolean + limit: Int + nextPage: Int + offset: Int + page: Int + pagingCounter: Int + prevPage: Int + totalDocs: Int + totalPages: Int +} + +input Post_where { + text: Post_text_operator + richText: Post_richText_operator + richText2: Post_richText2_operator + updatedAt: Post_updatedAt_operator + createdAt: Post_createdAt_operator + _status: Post__status_operator + id: Post_id_operator + AND: [Post_where_and] + OR: [Post_where_or] +} + +input Post_text_operator { + equals: String + not_equals: String + like: String + contains: String + in: [String] + not_in: [String] + all: [String] + exists: Boolean +} + +input Post_richText_operator { + equals: JSON + not_equals: JSON + like: JSON + contains: JSON + exists: Boolean +} + +input Post_richText2_operator { + equals: JSON + not_equals: JSON + like: JSON + contains: JSON + exists: Boolean +} + +input Post_updatedAt_operator { + equals: DateTime + not_equals: DateTime + greater_than_equal: DateTime + greater_than: DateTime + less_than_equal: DateTime + less_than: DateTime + like: DateTime + exists: Boolean +} + +input Post_createdAt_operator { + equals: DateTime + not_equals: DateTime + greater_than_equal: DateTime + greater_than: DateTime + less_than_equal: DateTime + less_than: DateTime + like: DateTime + exists: Boolean +} + +input Post__status_operator { + equals: Post__status_Input + not_equals: Post__status_Input + in: [Post__status_Input] + not_in: [Post__status_Input] + all: [Post__status_Input] + exists: Boolean +} + +enum Post__status_Input { + draft + published +} + +input Post_id_operator { + equals: String + not_equals: String + like: String + contains: String + in: [String] + not_in: [String] + all: [String] + exists: Boolean +} + +input Post_where_and { + text: Post_text_operator + richText: Post_richText_operator + richText2: Post_richText2_operator + updatedAt: Post_updatedAt_operator + createdAt: Post_createdAt_operator + _status: Post__status_operator + id: Post_id_operator + AND: [Post_where_and] + OR: [Post_where_or] +} + +input Post_where_or { + text: Post_text_operator + richText: Post_richText_operator + richText2: Post_richText2_operator + updatedAt: Post_updatedAt_operator + createdAt: Post_createdAt_operator + _status: Post__status_operator + id: Post_id_operator + AND: [Post_where_and] + OR: [Post_where_or] +} + +type countPosts { + totalDocs: Int +} + +type postsDocAccess { + fields: PostsDocAccessFields + create: PostsCreateDocAccess + read: PostsReadDocAccess + update: PostsUpdateDocAccess + delete: PostsDeleteDocAccess + readVersions: PostsReadVersionsDocAccess +} + +type PostsDocAccessFields { + text: PostsDocAccessFields_text + richText: PostsDocAccessFields_richText + richText2: PostsDocAccessFields_richText2 + updatedAt: PostsDocAccessFields_updatedAt + createdAt: PostsDocAccessFields_createdAt + _status: PostsDocAccessFields__status +} + +type PostsDocAccessFields_text { + create: PostsDocAccessFields_text_Create + read: PostsDocAccessFields_text_Read + update: PostsDocAccessFields_text_Update + delete: PostsDocAccessFields_text_Delete +} + +type PostsDocAccessFields_text_Create { + permission: Boolean! +} + +type PostsDocAccessFields_text_Read { + permission: Boolean! +} + +type PostsDocAccessFields_text_Update { + permission: Boolean! +} + +type PostsDocAccessFields_text_Delete { + permission: Boolean! +} + +type PostsDocAccessFields_richText { + create: PostsDocAccessFields_richText_Create + read: PostsDocAccessFields_richText_Read + update: PostsDocAccessFields_richText_Update + delete: PostsDocAccessFields_richText_Delete +} + +type PostsDocAccessFields_richText_Create { + permission: Boolean! +} + +type PostsDocAccessFields_richText_Read { + permission: Boolean! +} + +type PostsDocAccessFields_richText_Update { + permission: Boolean! +} + +type PostsDocAccessFields_richText_Delete { + permission: Boolean! +} + +type PostsDocAccessFields_richText2 { + create: PostsDocAccessFields_richText2_Create + read: PostsDocAccessFields_richText2_Read + update: PostsDocAccessFields_richText2_Update + delete: PostsDocAccessFields_richText2_Delete +} + +type PostsDocAccessFields_richText2_Create { + permission: Boolean! +} + +type PostsDocAccessFields_richText2_Read { + permission: Boolean! +} + +type PostsDocAccessFields_richText2_Update { + permission: Boolean! +} + +type PostsDocAccessFields_richText2_Delete { + permission: Boolean! +} + +type PostsDocAccessFields_updatedAt { + create: PostsDocAccessFields_updatedAt_Create + read: PostsDocAccessFields_updatedAt_Read + update: PostsDocAccessFields_updatedAt_Update + delete: PostsDocAccessFields_updatedAt_Delete +} + +type PostsDocAccessFields_updatedAt_Create { + permission: Boolean! +} + +type PostsDocAccessFields_updatedAt_Read { + permission: Boolean! +} + +type PostsDocAccessFields_updatedAt_Update { + permission: Boolean! +} + +type PostsDocAccessFields_updatedAt_Delete { + permission: Boolean! +} + +type PostsDocAccessFields_createdAt { + create: PostsDocAccessFields_createdAt_Create + read: PostsDocAccessFields_createdAt_Read + update: PostsDocAccessFields_createdAt_Update + delete: PostsDocAccessFields_createdAt_Delete +} + +type PostsDocAccessFields_createdAt_Create { + permission: Boolean! +} + +type PostsDocAccessFields_createdAt_Read { + permission: Boolean! +} + +type PostsDocAccessFields_createdAt_Update { + permission: Boolean! +} + +type PostsDocAccessFields_createdAt_Delete { + permission: Boolean! +} + +type PostsDocAccessFields__status { + create: PostsDocAccessFields__status_Create + read: PostsDocAccessFields__status_Read + update: PostsDocAccessFields__status_Update + delete: PostsDocAccessFields__status_Delete +} + +type PostsDocAccessFields__status_Create { + permission: Boolean! +} + +type PostsDocAccessFields__status_Read { + permission: Boolean! +} + +type PostsDocAccessFields__status_Update { + permission: Boolean! +} + +type PostsDocAccessFields__status_Delete { + permission: Boolean! +} + +type PostsCreateDocAccess { + permission: Boolean! + where: JSONObject +} + +""" +The `JSONObject` scalar type represents JSON objects as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf). +""" +scalar JSONObject + @specifiedBy(url: "http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf") + +type PostsReadDocAccess { + permission: Boolean! + where: JSONObject +} + +type PostsUpdateDocAccess { + permission: Boolean! + where: JSONObject +} + +type PostsDeleteDocAccess { + permission: Boolean! + where: JSONObject +} + +type PostsReadVersionsDocAccess { + permission: Boolean! + where: JSONObject +} + +type PostVersion { + parent(draft: Boolean): Post + version: PostVersion_Version + createdAt: DateTime + updatedAt: DateTime + latest: Boolean + id: String +} + +type PostVersion_Version { + text: String + richText(depth: Int): JSON + richText2(depth: Int): JSON + updatedAt: DateTime + createdAt: DateTime + _status: PostVersion_Version__status +} + +enum PostVersion_Version__status { + draft + published +} + +type versionsPosts { + docs: [PostVersion] + hasNextPage: Boolean + hasPrevPage: Boolean + limit: Int + nextPage: Int + offset: Int + page: Int + pagingCounter: Int + prevPage: Int + totalDocs: Int + totalPages: Int +} + +input versionsPost_where { + parent: versionsPost_parent_operator + version__text: versionsPost_version__text_operator + version__richText: versionsPost_version__richText_operator + version__richText2: versionsPost_version__richText2_operator + version__updatedAt: versionsPost_version__updatedAt_operator + version__createdAt: versionsPost_version__createdAt_operator + version___status: versionsPost_version___status_operator + createdAt: versionsPost_createdAt_operator + updatedAt: versionsPost_updatedAt_operator + latest: versionsPost_latest_operator + id: versionsPost_id_operator + AND: [versionsPost_where_and] + OR: [versionsPost_where_or] +} + +input versionsPost_parent_operator { + equals: JSON + not_equals: JSON + in: [JSON] + not_in: [JSON] + all: [JSON] + exists: Boolean +} + +input versionsPost_version__text_operator { + equals: String + not_equals: String + like: String + contains: String + in: [String] + not_in: [String] + all: [String] + exists: Boolean +} + +input versionsPost_version__richText_operator { + equals: JSON + not_equals: JSON + like: JSON + contains: JSON + exists: Boolean +} + +input versionsPost_version__richText2_operator { + equals: JSON + not_equals: JSON + like: JSON + contains: JSON + exists: Boolean +} + +input versionsPost_version__updatedAt_operator { + equals: DateTime + not_equals: DateTime + greater_than_equal: DateTime + greater_than: DateTime + less_than_equal: DateTime + less_than: DateTime + like: DateTime + exists: Boolean +} + +input versionsPost_version__createdAt_operator { + equals: DateTime + not_equals: DateTime + greater_than_equal: DateTime + greater_than: DateTime + less_than_equal: DateTime + less_than: DateTime + like: DateTime + exists: Boolean +} + +input versionsPost_version___status_operator { + equals: versionsPost_version___status_Input + not_equals: versionsPost_version___status_Input + in: [versionsPost_version___status_Input] + not_in: [versionsPost_version___status_Input] + all: [versionsPost_version___status_Input] + exists: Boolean +} + +enum versionsPost_version___status_Input { + draft + published +} + +input versionsPost_createdAt_operator { + equals: DateTime + not_equals: DateTime + greater_than_equal: DateTime + greater_than: DateTime + less_than_equal: DateTime + less_than: DateTime + like: DateTime + exists: Boolean +} + +input versionsPost_updatedAt_operator { + equals: DateTime + not_equals: DateTime + greater_than_equal: DateTime + greater_than: DateTime + less_than_equal: DateTime + less_than: DateTime + like: DateTime + exists: Boolean +} + +input versionsPost_latest_operator { + equals: Boolean + not_equals: Boolean + exists: Boolean +} + +input versionsPost_id_operator { + equals: String + not_equals: String + like: String + contains: String + in: [String] + not_in: [String] + all: [String] + exists: Boolean +} + +input versionsPost_where_and { + parent: versionsPost_parent_operator + version__text: versionsPost_version__text_operator + version__richText: versionsPost_version__richText_operator + version__richText2: versionsPost_version__richText2_operator + version__updatedAt: versionsPost_version__updatedAt_operator + version__createdAt: versionsPost_version__createdAt_operator + version___status: versionsPost_version___status_operator + createdAt: versionsPost_createdAt_operator + updatedAt: versionsPost_updatedAt_operator + latest: versionsPost_latest_operator + id: versionsPost_id_operator + AND: [versionsPost_where_and] + OR: [versionsPost_where_or] +} + +input versionsPost_where_or { + parent: versionsPost_parent_operator + version__text: versionsPost_version__text_operator + version__richText: versionsPost_version__richText_operator + version__richText2: versionsPost_version__richText2_operator + version__updatedAt: versionsPost_version__updatedAt_operator + version__createdAt: versionsPost_version__createdAt_operator + version___status: versionsPost_version___status_operator + createdAt: versionsPost_createdAt_operator + updatedAt: versionsPost_updatedAt_operator + latest: versionsPost_latest_operator + id: versionsPost_id_operator + AND: [versionsPost_where_and] + OR: [versionsPost_where_or] +} + +type User { + id: String + updatedAt: DateTime + createdAt: DateTime + email: EmailAddress! + resetPasswordToken: String + resetPasswordExpiration: DateTime + salt: String + hash: String + loginAttempts: Float + lockUntil: DateTime + password: String! +} + +""" +A field whose value conforms to the standard internet email address format as specified in HTML Spec: https://html.spec.whatwg.org/multipage/input.html#valid-e-mail-address. +""" +scalar EmailAddress + @specifiedBy(url: "https://html.spec.whatwg.org/multipage/input.html#valid-e-mail-address") + +type Users { + docs: [User] + hasNextPage: Boolean + hasPrevPage: Boolean + limit: Int + nextPage: Int + offset: Int + page: Int + pagingCounter: Int + prevPage: Int + totalDocs: Int + totalPages: Int +} + +input User_where { + updatedAt: User_updatedAt_operator + createdAt: User_createdAt_operator + email: User_email_operator + id: User_id_operator + AND: [User_where_and] + OR: [User_where_or] +} + +input User_updatedAt_operator { + equals: DateTime + not_equals: DateTime + greater_than_equal: DateTime + greater_than: DateTime + less_than_equal: DateTime + less_than: DateTime + like: DateTime + exists: Boolean +} + +input User_createdAt_operator { + equals: DateTime + not_equals: DateTime + greater_than_equal: DateTime + greater_than: DateTime + less_than_equal: DateTime + less_than: DateTime + like: DateTime + exists: Boolean +} + +input User_email_operator { + equals: EmailAddress + not_equals: EmailAddress + like: EmailAddress + contains: EmailAddress + in: [EmailAddress] + not_in: [EmailAddress] + all: [EmailAddress] +} + +input User_id_operator { + equals: String + not_equals: String + like: String + contains: String + in: [String] + not_in: [String] + all: [String] + exists: Boolean +} + +input User_where_and { + updatedAt: User_updatedAt_operator + createdAt: User_createdAt_operator + email: User_email_operator + id: User_id_operator + AND: [User_where_and] + OR: [User_where_or] +} + +input User_where_or { + updatedAt: User_updatedAt_operator + createdAt: User_createdAt_operator + email: User_email_operator + id: User_id_operator + AND: [User_where_and] + OR: [User_where_or] +} + +type countUsers { + totalDocs: Int +} + +type usersDocAccess { + fields: UsersDocAccessFields + create: UsersCreateDocAccess + read: UsersReadDocAccess + update: UsersUpdateDocAccess + delete: UsersDeleteDocAccess + unlock: UsersUnlockDocAccess +} + +type UsersDocAccessFields { + updatedAt: UsersDocAccessFields_updatedAt + createdAt: UsersDocAccessFields_createdAt + email: UsersDocAccessFields_email + password: UsersDocAccessFields_password +} + +type UsersDocAccessFields_updatedAt { + create: UsersDocAccessFields_updatedAt_Create + read: UsersDocAccessFields_updatedAt_Read + update: UsersDocAccessFields_updatedAt_Update + delete: UsersDocAccessFields_updatedAt_Delete +} + +type UsersDocAccessFields_updatedAt_Create { + permission: Boolean! +} + +type UsersDocAccessFields_updatedAt_Read { + permission: Boolean! +} + +type UsersDocAccessFields_updatedAt_Update { + permission: Boolean! +} + +type UsersDocAccessFields_updatedAt_Delete { + permission: Boolean! +} + +type UsersDocAccessFields_createdAt { + create: UsersDocAccessFields_createdAt_Create + read: UsersDocAccessFields_createdAt_Read + update: UsersDocAccessFields_createdAt_Update + delete: UsersDocAccessFields_createdAt_Delete +} + +type UsersDocAccessFields_createdAt_Create { + permission: Boolean! +} + +type UsersDocAccessFields_createdAt_Read { + permission: Boolean! +} + +type UsersDocAccessFields_createdAt_Update { + permission: Boolean! +} + +type UsersDocAccessFields_createdAt_Delete { + permission: Boolean! +} + +type UsersDocAccessFields_email { + create: UsersDocAccessFields_email_Create + read: UsersDocAccessFields_email_Read + update: UsersDocAccessFields_email_Update + delete: UsersDocAccessFields_email_Delete +} + +type UsersDocAccessFields_email_Create { + permission: Boolean! +} + +type UsersDocAccessFields_email_Read { + permission: Boolean! +} + +type UsersDocAccessFields_email_Update { + permission: Boolean! +} + +type UsersDocAccessFields_email_Delete { + permission: Boolean! +} + +type UsersDocAccessFields_password { + create: UsersDocAccessFields_password_Create + read: UsersDocAccessFields_password_Read + update: UsersDocAccessFields_password_Update + delete: UsersDocAccessFields_password_Delete +} + +type UsersDocAccessFields_password_Create { + permission: Boolean! +} + +type UsersDocAccessFields_password_Read { + permission: Boolean! +} + +type UsersDocAccessFields_password_Update { + permission: Boolean! +} + +type UsersDocAccessFields_password_Delete { + permission: Boolean! +} + +type UsersCreateDocAccess { + permission: Boolean! + where: JSONObject +} + +type UsersReadDocAccess { + permission: Boolean! + where: JSONObject +} + +type UsersUpdateDocAccess { + permission: Boolean! + where: JSONObject +} + +type UsersDeleteDocAccess { + permission: Boolean! + where: JSONObject +} + +type UsersUnlockDocAccess { + permission: Boolean! + where: JSONObject +} + +type usersMe { + collection: String + exp: Int + token: String + user: User +} + +type PayloadPreference { + id: String + user: PayloadPreference_User_Relationship! + key: String + value: JSON + updatedAt: DateTime + createdAt: DateTime +} + +type PayloadPreference_User_Relationship { + relationTo: PayloadPreference_User_RelationTo + value: PayloadPreference_User +} + +enum PayloadPreference_User_RelationTo { + users +} + +union PayloadPreference_User = User + +type PayloadPreferences { + docs: [PayloadPreference] + hasNextPage: Boolean + hasPrevPage: Boolean + limit: Int + nextPage: Int + offset: Int + page: Int + pagingCounter: Int + prevPage: Int + totalDocs: Int + totalPages: Int +} + +input PayloadPreference_where { + user: PayloadPreference_user_Relation + key: PayloadPreference_key_operator + value: PayloadPreference_value_operator + updatedAt: PayloadPreference_updatedAt_operator + createdAt: PayloadPreference_createdAt_operator + id: PayloadPreference_id_operator + AND: [PayloadPreference_where_and] + OR: [PayloadPreference_where_or] +} + +input PayloadPreference_user_Relation { + relationTo: PayloadPreference_user_Relation_RelationTo + value: JSON +} + +enum PayloadPreference_user_Relation_RelationTo { + users +} + +input PayloadPreference_key_operator { + equals: String + not_equals: String + like: String + contains: String + in: [String] + not_in: [String] + all: [String] + exists: Boolean +} + +input PayloadPreference_value_operator { + equals: JSON + not_equals: JSON + like: JSON + contains: JSON + within: JSON + intersects: JSON + exists: Boolean +} + +input PayloadPreference_updatedAt_operator { + equals: DateTime + not_equals: DateTime + greater_than_equal: DateTime + greater_than: DateTime + less_than_equal: DateTime + less_than: DateTime + like: DateTime + exists: Boolean +} + +input PayloadPreference_createdAt_operator { + equals: DateTime + not_equals: DateTime + greater_than_equal: DateTime + greater_than: DateTime + less_than_equal: DateTime + less_than: DateTime + like: DateTime + exists: Boolean +} + +input PayloadPreference_id_operator { + equals: String + not_equals: String + like: String + contains: String + in: [String] + not_in: [String] + all: [String] + exists: Boolean +} + +input PayloadPreference_where_and { + user: PayloadPreference_user_Relation + key: PayloadPreference_key_operator + value: PayloadPreference_value_operator + updatedAt: PayloadPreference_updatedAt_operator + createdAt: PayloadPreference_createdAt_operator + id: PayloadPreference_id_operator + AND: [PayloadPreference_where_and] + OR: [PayloadPreference_where_or] +} + +input PayloadPreference_where_or { + user: PayloadPreference_user_Relation + key: PayloadPreference_key_operator + value: PayloadPreference_value_operator + updatedAt: PayloadPreference_updatedAt_operator + createdAt: PayloadPreference_createdAt_operator + id: PayloadPreference_id_operator + AND: [PayloadPreference_where_and] + OR: [PayloadPreference_where_or] +} + +type countPayloadPreferences { + totalDocs: Int +} + +type payload_preferencesDocAccess { + fields: PayloadPreferencesDocAccessFields + create: PayloadPreferencesCreateDocAccess + read: PayloadPreferencesReadDocAccess + update: PayloadPreferencesUpdateDocAccess + delete: PayloadPreferencesDeleteDocAccess +} + +type PayloadPreferencesDocAccessFields { + user: PayloadPreferencesDocAccessFields_user + key: PayloadPreferencesDocAccessFields_key + value: PayloadPreferencesDocAccessFields_value + updatedAt: PayloadPreferencesDocAccessFields_updatedAt + createdAt: PayloadPreferencesDocAccessFields_createdAt +} + +type PayloadPreferencesDocAccessFields_user { + create: PayloadPreferencesDocAccessFields_user_Create + read: PayloadPreferencesDocAccessFields_user_Read + update: PayloadPreferencesDocAccessFields_user_Update + delete: PayloadPreferencesDocAccessFields_user_Delete +} + +type PayloadPreferencesDocAccessFields_user_Create { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_user_Read { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_user_Update { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_user_Delete { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_key { + create: PayloadPreferencesDocAccessFields_key_Create + read: PayloadPreferencesDocAccessFields_key_Read + update: PayloadPreferencesDocAccessFields_key_Update + delete: PayloadPreferencesDocAccessFields_key_Delete +} + +type PayloadPreferencesDocAccessFields_key_Create { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_key_Read { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_key_Update { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_key_Delete { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_value { + create: PayloadPreferencesDocAccessFields_value_Create + read: PayloadPreferencesDocAccessFields_value_Read + update: PayloadPreferencesDocAccessFields_value_Update + delete: PayloadPreferencesDocAccessFields_value_Delete +} + +type PayloadPreferencesDocAccessFields_value_Create { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_value_Read { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_value_Update { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_value_Delete { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_updatedAt { + create: PayloadPreferencesDocAccessFields_updatedAt_Create + read: PayloadPreferencesDocAccessFields_updatedAt_Read + update: PayloadPreferencesDocAccessFields_updatedAt_Update + delete: PayloadPreferencesDocAccessFields_updatedAt_Delete +} + +type PayloadPreferencesDocAccessFields_updatedAt_Create { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_updatedAt_Read { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_updatedAt_Update { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_updatedAt_Delete { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_createdAt { + create: PayloadPreferencesDocAccessFields_createdAt_Create + read: PayloadPreferencesDocAccessFields_createdAt_Read + update: PayloadPreferencesDocAccessFields_createdAt_Update + delete: PayloadPreferencesDocAccessFields_createdAt_Delete +} + +type PayloadPreferencesDocAccessFields_createdAt_Create { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_createdAt_Read { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_createdAt_Update { + permission: Boolean! +} + +type PayloadPreferencesDocAccessFields_createdAt_Delete { + permission: Boolean! +} + +type PayloadPreferencesCreateDocAccess { + permission: Boolean! + where: JSONObject +} + +type PayloadPreferencesReadDocAccess { + permission: Boolean! + where: JSONObject +} + +type PayloadPreferencesUpdateDocAccess { + permission: Boolean! + where: JSONObject +} + +type PayloadPreferencesDeleteDocAccess { + permission: Boolean! + where: JSONObject +} + +type Menu { + globalText: String + updatedAt: DateTime + createdAt: DateTime +} + +type menuDocAccess { + fields: MenuDocAccessFields + read: MenuReadDocAccess + update: MenuUpdateDocAccess +} + +type MenuDocAccessFields { + globalText: MenuDocAccessFields_globalText + updatedAt: MenuDocAccessFields_updatedAt + createdAt: MenuDocAccessFields_createdAt +} + +type MenuDocAccessFields_globalText { + create: MenuDocAccessFields_globalText_Create + read: MenuDocAccessFields_globalText_Read + update: MenuDocAccessFields_globalText_Update + delete: MenuDocAccessFields_globalText_Delete +} + +type MenuDocAccessFields_globalText_Create { + permission: Boolean! +} + +type MenuDocAccessFields_globalText_Read { + permission: Boolean! +} + +type MenuDocAccessFields_globalText_Update { + permission: Boolean! +} + +type MenuDocAccessFields_globalText_Delete { + permission: Boolean! +} + +type MenuDocAccessFields_updatedAt { + create: MenuDocAccessFields_updatedAt_Create + read: MenuDocAccessFields_updatedAt_Read + update: MenuDocAccessFields_updatedAt_Update + delete: MenuDocAccessFields_updatedAt_Delete +} + +type MenuDocAccessFields_updatedAt_Create { + permission: Boolean! +} + +type MenuDocAccessFields_updatedAt_Read { + permission: Boolean! +} + +type MenuDocAccessFields_updatedAt_Update { + permission: Boolean! +} + +type MenuDocAccessFields_updatedAt_Delete { + permission: Boolean! +} + +type MenuDocAccessFields_createdAt { + create: MenuDocAccessFields_createdAt_Create + read: MenuDocAccessFields_createdAt_Read + update: MenuDocAccessFields_createdAt_Update + delete: MenuDocAccessFields_createdAt_Delete +} + +type MenuDocAccessFields_createdAt_Create { + permission: Boolean! +} + +type MenuDocAccessFields_createdAt_Read { + permission: Boolean! +} + +type MenuDocAccessFields_createdAt_Update { + permission: Boolean! +} + +type MenuDocAccessFields_createdAt_Delete { + permission: Boolean! +} + +type MenuReadDocAccess { + permission: Boolean! + where: JSONObject +} + +type MenuUpdateDocAccess { + permission: Boolean! + where: JSONObject +} + +type Access { + canAccessAdmin: Boolean! + posts: postsAccess + users: usersAccess + payload_preferences: payload_preferencesAccess + menu: menuAccess +} + +type postsAccess { + fields: PostsFields + create: PostsCreateAccess + read: PostsReadAccess + update: PostsUpdateAccess + delete: PostsDeleteAccess + readVersions: PostsReadVersionsAccess +} + +type PostsFields { + text: PostsFields_text + richText: PostsFields_richText + richText2: PostsFields_richText2 + updatedAt: PostsFields_updatedAt + createdAt: PostsFields_createdAt + _status: PostsFields__status +} + +type PostsFields_text { + create: PostsFields_text_Create + read: PostsFields_text_Read + update: PostsFields_text_Update + delete: PostsFields_text_Delete +} + +type PostsFields_text_Create { + permission: Boolean! +} + +type PostsFields_text_Read { + permission: Boolean! +} + +type PostsFields_text_Update { + permission: Boolean! +} + +type PostsFields_text_Delete { + permission: Boolean! +} + +type PostsFields_richText { + create: PostsFields_richText_Create + read: PostsFields_richText_Read + update: PostsFields_richText_Update + delete: PostsFields_richText_Delete +} + +type PostsFields_richText_Create { + permission: Boolean! +} + +type PostsFields_richText_Read { + permission: Boolean! +} + +type PostsFields_richText_Update { + permission: Boolean! +} + +type PostsFields_richText_Delete { + permission: Boolean! +} + +type PostsFields_richText2 { + create: PostsFields_richText2_Create + read: PostsFields_richText2_Read + update: PostsFields_richText2_Update + delete: PostsFields_richText2_Delete +} + +type PostsFields_richText2_Create { + permission: Boolean! +} + +type PostsFields_richText2_Read { + permission: Boolean! +} + +type PostsFields_richText2_Update { + permission: Boolean! +} + +type PostsFields_richText2_Delete { + permission: Boolean! +} + +type PostsFields_updatedAt { + create: PostsFields_updatedAt_Create + read: PostsFields_updatedAt_Read + update: PostsFields_updatedAt_Update + delete: PostsFields_updatedAt_Delete +} + +type PostsFields_updatedAt_Create { + permission: Boolean! +} + +type PostsFields_updatedAt_Read { + permission: Boolean! +} + +type PostsFields_updatedAt_Update { + permission: Boolean! +} + +type PostsFields_updatedAt_Delete { + permission: Boolean! +} + +type PostsFields_createdAt { + create: PostsFields_createdAt_Create + read: PostsFields_createdAt_Read + update: PostsFields_createdAt_Update + delete: PostsFields_createdAt_Delete +} + +type PostsFields_createdAt_Create { + permission: Boolean! +} + +type PostsFields_createdAt_Read { + permission: Boolean! +} + +type PostsFields_createdAt_Update { + permission: Boolean! +} + +type PostsFields_createdAt_Delete { + permission: Boolean! +} + +type PostsFields__status { + create: PostsFields__status_Create + read: PostsFields__status_Read + update: PostsFields__status_Update + delete: PostsFields__status_Delete +} + +type PostsFields__status_Create { + permission: Boolean! +} + +type PostsFields__status_Read { + permission: Boolean! +} + +type PostsFields__status_Update { + permission: Boolean! +} + +type PostsFields__status_Delete { + permission: Boolean! +} + +type PostsCreateAccess { + permission: Boolean! + where: JSONObject +} + +type PostsReadAccess { + permission: Boolean! + where: JSONObject +} + +type PostsUpdateAccess { + permission: Boolean! + where: JSONObject +} + +type PostsDeleteAccess { + permission: Boolean! + where: JSONObject +} + +type PostsReadVersionsAccess { + permission: Boolean! + where: JSONObject +} + +type usersAccess { + fields: UsersFields + create: UsersCreateAccess + read: UsersReadAccess + update: UsersUpdateAccess + delete: UsersDeleteAccess + unlock: UsersUnlockAccess +} + +type UsersFields { + updatedAt: UsersFields_updatedAt + createdAt: UsersFields_createdAt + email: UsersFields_email + password: UsersFields_password +} + +type UsersFields_updatedAt { + create: UsersFields_updatedAt_Create + read: UsersFields_updatedAt_Read + update: UsersFields_updatedAt_Update + delete: UsersFields_updatedAt_Delete +} + +type UsersFields_updatedAt_Create { + permission: Boolean! +} + +type UsersFields_updatedAt_Read { + permission: Boolean! +} + +type UsersFields_updatedAt_Update { + permission: Boolean! +} + +type UsersFields_updatedAt_Delete { + permission: Boolean! +} + +type UsersFields_createdAt { + create: UsersFields_createdAt_Create + read: UsersFields_createdAt_Read + update: UsersFields_createdAt_Update + delete: UsersFields_createdAt_Delete +} + +type UsersFields_createdAt_Create { + permission: Boolean! +} + +type UsersFields_createdAt_Read { + permission: Boolean! +} + +type UsersFields_createdAt_Update { + permission: Boolean! +} + +type UsersFields_createdAt_Delete { + permission: Boolean! +} + +type UsersFields_email { + create: UsersFields_email_Create + read: UsersFields_email_Read + update: UsersFields_email_Update + delete: UsersFields_email_Delete +} + +type UsersFields_email_Create { + permission: Boolean! +} + +type UsersFields_email_Read { + permission: Boolean! +} + +type UsersFields_email_Update { + permission: Boolean! +} + +type UsersFields_email_Delete { + permission: Boolean! +} + +type UsersFields_password { + create: UsersFields_password_Create + read: UsersFields_password_Read + update: UsersFields_password_Update + delete: UsersFields_password_Delete +} + +type UsersFields_password_Create { + permission: Boolean! +} + +type UsersFields_password_Read { + permission: Boolean! +} + +type UsersFields_password_Update { + permission: Boolean! +} + +type UsersFields_password_Delete { + permission: Boolean! +} + +type UsersCreateAccess { + permission: Boolean! + where: JSONObject +} + +type UsersReadAccess { + permission: Boolean! + where: JSONObject +} + +type UsersUpdateAccess { + permission: Boolean! + where: JSONObject +} + +type UsersDeleteAccess { + permission: Boolean! + where: JSONObject +} + +type UsersUnlockAccess { + permission: Boolean! + where: JSONObject +} + +type payload_preferencesAccess { + fields: PayloadPreferencesFields + create: PayloadPreferencesCreateAccess + read: PayloadPreferencesReadAccess + update: PayloadPreferencesUpdateAccess + delete: PayloadPreferencesDeleteAccess +} + +type PayloadPreferencesFields { + user: PayloadPreferencesFields_user + key: PayloadPreferencesFields_key + value: PayloadPreferencesFields_value + updatedAt: PayloadPreferencesFields_updatedAt + createdAt: PayloadPreferencesFields_createdAt +} + +type PayloadPreferencesFields_user { + create: PayloadPreferencesFields_user_Create + read: PayloadPreferencesFields_user_Read + update: PayloadPreferencesFields_user_Update + delete: PayloadPreferencesFields_user_Delete +} + +type PayloadPreferencesFields_user_Create { + permission: Boolean! +} + +type PayloadPreferencesFields_user_Read { + permission: Boolean! +} + +type PayloadPreferencesFields_user_Update { + permission: Boolean! +} + +type PayloadPreferencesFields_user_Delete { + permission: Boolean! +} + +type PayloadPreferencesFields_key { + create: PayloadPreferencesFields_key_Create + read: PayloadPreferencesFields_key_Read + update: PayloadPreferencesFields_key_Update + delete: PayloadPreferencesFields_key_Delete +} + +type PayloadPreferencesFields_key_Create { + permission: Boolean! +} + +type PayloadPreferencesFields_key_Read { + permission: Boolean! +} + +type PayloadPreferencesFields_key_Update { + permission: Boolean! +} + +type PayloadPreferencesFields_key_Delete { + permission: Boolean! +} + +type PayloadPreferencesFields_value { + create: PayloadPreferencesFields_value_Create + read: PayloadPreferencesFields_value_Read + update: PayloadPreferencesFields_value_Update + delete: PayloadPreferencesFields_value_Delete +} + +type PayloadPreferencesFields_value_Create { + permission: Boolean! +} + +type PayloadPreferencesFields_value_Read { + permission: Boolean! +} + +type PayloadPreferencesFields_value_Update { + permission: Boolean! +} + +type PayloadPreferencesFields_value_Delete { + permission: Boolean! +} + +type PayloadPreferencesFields_updatedAt { + create: PayloadPreferencesFields_updatedAt_Create + read: PayloadPreferencesFields_updatedAt_Read + update: PayloadPreferencesFields_updatedAt_Update + delete: PayloadPreferencesFields_updatedAt_Delete +} + +type PayloadPreferencesFields_updatedAt_Create { + permission: Boolean! +} + +type PayloadPreferencesFields_updatedAt_Read { + permission: Boolean! +} + +type PayloadPreferencesFields_updatedAt_Update { + permission: Boolean! +} + +type PayloadPreferencesFields_updatedAt_Delete { + permission: Boolean! +} + +type PayloadPreferencesFields_createdAt { + create: PayloadPreferencesFields_createdAt_Create + read: PayloadPreferencesFields_createdAt_Read + update: PayloadPreferencesFields_createdAt_Update + delete: PayloadPreferencesFields_createdAt_Delete +} + +type PayloadPreferencesFields_createdAt_Create { + permission: Boolean! +} + +type PayloadPreferencesFields_createdAt_Read { + permission: Boolean! +} + +type PayloadPreferencesFields_createdAt_Update { + permission: Boolean! +} + +type PayloadPreferencesFields_createdAt_Delete { + permission: Boolean! +} + +type PayloadPreferencesCreateAccess { + permission: Boolean! + where: JSONObject +} + +type PayloadPreferencesReadAccess { + permission: Boolean! + where: JSONObject +} + +type PayloadPreferencesUpdateAccess { + permission: Boolean! + where: JSONObject +} + +type PayloadPreferencesDeleteAccess { + permission: Boolean! + where: JSONObject +} + +type menuAccess { + fields: MenuFields + read: MenuReadAccess + update: MenuUpdateAccess +} + +type MenuFields { + globalText: MenuFields_globalText + updatedAt: MenuFields_updatedAt + createdAt: MenuFields_createdAt +} + +type MenuFields_globalText { + create: MenuFields_globalText_Create + read: MenuFields_globalText_Read + update: MenuFields_globalText_Update + delete: MenuFields_globalText_Delete +} + +type MenuFields_globalText_Create { + permission: Boolean! +} + +type MenuFields_globalText_Read { + permission: Boolean! +} + +type MenuFields_globalText_Update { + permission: Boolean! +} + +type MenuFields_globalText_Delete { + permission: Boolean! +} + +type MenuFields_updatedAt { + create: MenuFields_updatedAt_Create + read: MenuFields_updatedAt_Read + update: MenuFields_updatedAt_Update + delete: MenuFields_updatedAt_Delete +} + +type MenuFields_updatedAt_Create { + permission: Boolean! +} + +type MenuFields_updatedAt_Read { + permission: Boolean! +} + +type MenuFields_updatedAt_Update { + permission: Boolean! +} + +type MenuFields_updatedAt_Delete { + permission: Boolean! +} + +type MenuFields_createdAt { + create: MenuFields_createdAt_Create + read: MenuFields_createdAt_Read + update: MenuFields_createdAt_Update + delete: MenuFields_createdAt_Delete +} + +type MenuFields_createdAt_Create { + permission: Boolean! +} + +type MenuFields_createdAt_Read { + permission: Boolean! +} + +type MenuFields_createdAt_Update { + permission: Boolean! +} + +type MenuFields_createdAt_Delete { + permission: Boolean! +} + +type MenuReadAccess { + permission: Boolean! + where: JSONObject +} + +type MenuUpdateAccess { + permission: Boolean! + where: JSONObject +} + +type Mutation { + createPost(data: mutationPostInput!, draft: Boolean): Post + updatePost(id: String!, autosave: Boolean, data: mutationPostUpdateInput!, draft: Boolean): Post + deletePost(id: String!): Post + duplicatePost(id: String!): Post + restoreVersionPost(id: String): Post + createUser(data: mutationUserInput!, draft: Boolean): User + updateUser(id: String!, autosave: Boolean, data: mutationUserUpdateInput!, draft: Boolean): User + deleteUser(id: String!): User + refreshTokenUser(token: String): usersRefreshedUser + logoutUser: String + unlockUser(email: String!): Boolean! + loginUser(email: String, password: String): usersLoginResult + forgotPasswordUser(disableEmail: Boolean, email: String!, expiration: Int): Boolean! + resetPasswordUser(password: String, token: String): usersResetPassword + verifyEmailUser(token: String): Boolean + createPayloadPreference(data: mutationPayloadPreferenceInput!, draft: Boolean): PayloadPreference + updatePayloadPreference( + id: String! + autosave: Boolean + data: mutationPayloadPreferenceUpdateInput! + draft: Boolean + ): PayloadPreference + deletePayloadPreference(id: String!): PayloadPreference + duplicatePayloadPreference(id: String!): PayloadPreference + updateMenu(data: mutationMenuInput!, draft: Boolean): Menu +} + +input mutationPostInput { + text: String + richText: JSON + richText2: JSON + updatedAt: String + createdAt: String + _status: Post__status_MutationInput +} + +enum Post__status_MutationInput { + draft + published +} + +input mutationPostUpdateInput { + text: String + richText: JSON + richText2: JSON + updatedAt: String + createdAt: String + _status: PostUpdate__status_MutationInput +} + +enum PostUpdate__status_MutationInput { + draft + published +} + +input mutationUserInput { + updatedAt: String + createdAt: String + email: String! + resetPasswordToken: String + resetPasswordExpiration: String + salt: String + hash: String + loginAttempts: Float + lockUntil: String + password: String! +} + +input mutationUserUpdateInput { + updatedAt: String + createdAt: String + email: String + resetPasswordToken: String + resetPasswordExpiration: String + salt: String + hash: String + loginAttempts: Float + lockUntil: String + password: String +} + +type usersRefreshedUser { + exp: Int + refreshedToken: String + user: usersJWT +} + +type usersJWT { + email: EmailAddress! + collection: String! +} + +type usersLoginResult { + exp: Int + token: String + user: User +} + +type usersResetPassword { + token: String + user: User +} + +input mutationPayloadPreferenceInput { + user: PayloadPreference_UserRelationshipInput + key: String + value: JSON + updatedAt: String + createdAt: String +} + +input PayloadPreference_UserRelationshipInput { + relationTo: PayloadPreference_UserRelationshipInputRelationTo + value: JSON +} + +enum PayloadPreference_UserRelationshipInputRelationTo { + users +} + +input mutationPayloadPreferenceUpdateInput { + user: PayloadPreferenceUpdate_UserRelationshipInput + key: String + value: JSON + updatedAt: String + createdAt: String +} + +input PayloadPreferenceUpdate_UserRelationshipInput { + relationTo: PayloadPreferenceUpdate_UserRelationshipInputRelationTo + value: JSON +} + +enum PayloadPreferenceUpdate_UserRelationshipInputRelationTo { + users +} + +input mutationMenuInput { + globalText: String + updatedAt: String + createdAt: String +} diff --git a/test/queues/seed.ts b/test/queues/seed.ts new file mode 100644 index 00000000000..5c1f1d91eb3 --- /dev/null +++ b/test/queues/seed.ts @@ -0,0 +1,30 @@ +import type { Payload } from 'payload' + +import path from 'path' +import { fileURLToPath } from 'url' + +import { devUser } from '../credentials.js' +import { seedDB } from '../helpers/seed.js' + +const filename = fileURLToPath(import.meta.url) +const dirname = path.dirname(filename) + +export const seed = async (_payload: Payload) => { + await _payload.create({ + collection: 'users', + data: { + email: devUser.email, + password: devUser.password, + }, + }) +} + +export async function clearAndSeedEverything(_payload: Payload) { + return await seedDB({ + _payload, + collectionSlugs: _payload.config.collections.map((collection) => collection.slug), + seedFunction: seed, + snapshotKey: 'fieldsTest', + uploadsDir: path.resolve(dirname, './collections/Upload/uploads'), + }) +} diff --git a/test/queues/tsconfig.eslint.json b/test/queues/tsconfig.eslint.json new file mode 100644 index 00000000000..b34cc7afbb8 --- /dev/null +++ b/test/queues/tsconfig.eslint.json @@ -0,0 +1,13 @@ +{ + // extend your base config to share compilerOptions, etc + //"extends": "./tsconfig.json", + "compilerOptions": { + // ensure that nobody can accidentally use this config for a build + "noEmit": true + }, + "include": [ + // whatever paths you intend to lint + "./**/*.ts", + "./**/*.tsx" + ] +} diff --git a/test/queues/tsconfig.json b/test/queues/tsconfig.json new file mode 100644 index 00000000000..3c43903cfdd --- /dev/null +++ b/test/queues/tsconfig.json @@ -0,0 +1,3 @@ +{ + "extends": "../tsconfig.json" +} From d89db00295147ee25d65eb8bb16add5bfc9295f8 Mon Sep 17 00:00:00 2001 From: Elliot DeNolf Date: Wed, 30 Oct 2024 14:25:34 -0400 Subject: [PATCH 19/19] chore(release): v3.0.0-beta.121 [skip ci] --- package.json | 2 +- packages/create-payload-app/package.json | 2 +- packages/db-mongodb/package.json | 2 +- packages/db-postgres/package.json | 2 +- packages/db-sqlite/package.json | 2 +- packages/db-vercel-postgres/package.json | 2 +- packages/drizzle/package.json | 2 +- packages/email-nodemailer/package.json | 2 +- packages/email-resend/package.json | 2 +- packages/graphql/package.json | 2 +- packages/live-preview-react/package.json | 2 +- packages/live-preview-vue/package.json | 2 +- packages/live-preview/package.json | 2 +- packages/next/package.json | 2 +- packages/payload-cloud/package.json | 2 +- packages/payload/package.json | 2 +- packages/plugin-cloud-storage/package.json | 2 +- packages/plugin-form-builder/package.json | 2 +- packages/plugin-nested-docs/package.json | 2 +- packages/plugin-redirects/package.json | 2 +- packages/plugin-search/package.json | 2 +- packages/plugin-sentry/package.json | 2 +- packages/plugin-seo/package.json | 2 +- packages/plugin-stripe/package.json | 2 +- packages/richtext-lexical/package.json | 2 +- packages/richtext-slate/package.json | 2 +- packages/storage-azure/package.json | 2 +- packages/storage-gcs/package.json | 2 +- packages/storage-s3/package.json | 2 +- packages/storage-uploadthing/package.json | 2 +- packages/storage-vercel-blob/package.json | 2 +- packages/translations/package.json | 2 +- packages/ui/package.json | 2 +- 33 files changed, 33 insertions(+), 33 deletions(-) diff --git a/package.json b/package.json index 77ecf266403..3e30a4d143b 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "payload-monorepo", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "private": true, "type": "module", "scripts": { diff --git a/packages/create-payload-app/package.json b/packages/create-payload-app/package.json index fbb60212bab..f9e61496446 100644 --- a/packages/create-payload-app/package.json +++ b/packages/create-payload-app/package.json @@ -1,6 +1,6 @@ { "name": "create-payload-app", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "homepage": "https://payloadcms.com", "repository": { "type": "git", diff --git a/packages/db-mongodb/package.json b/packages/db-mongodb/package.json index 8c423607df4..e919b2f300a 100644 --- a/packages/db-mongodb/package.json +++ b/packages/db-mongodb/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/db-mongodb", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "description": "The officially supported MongoDB database adapter for Payload", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/db-postgres/package.json b/packages/db-postgres/package.json index 53d60755e05..cc44883d7d0 100644 --- a/packages/db-postgres/package.json +++ b/packages/db-postgres/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/db-postgres", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "description": "The officially supported Postgres database adapter for Payload", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/db-sqlite/package.json b/packages/db-sqlite/package.json index bfbed98e009..a4e29738618 100644 --- a/packages/db-sqlite/package.json +++ b/packages/db-sqlite/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/db-sqlite", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "description": "The officially supported SQLite database adapter for Payload", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/db-vercel-postgres/package.json b/packages/db-vercel-postgres/package.json index c83a8eaf39b..952f1c33a4e 100644 --- a/packages/db-vercel-postgres/package.json +++ b/packages/db-vercel-postgres/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/db-vercel-postgres", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "description": "Vercel Postgres adapter for Payload", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/drizzle/package.json b/packages/drizzle/package.json index a8cf75293e2..561820d1b80 100644 --- a/packages/drizzle/package.json +++ b/packages/drizzle/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/drizzle", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "description": "A library of shared functions used by different payload database adapters", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/email-nodemailer/package.json b/packages/email-nodemailer/package.json index 6daf07fb36a..d4de94802fb 100644 --- a/packages/email-nodemailer/package.json +++ b/packages/email-nodemailer/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/email-nodemailer", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "description": "Payload Nodemailer Email Adapter", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/email-resend/package.json b/packages/email-resend/package.json index 4444814d3ae..92654c84c89 100644 --- a/packages/email-resend/package.json +++ b/packages/email-resend/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/email-resend", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "description": "Payload Resend Email Adapter", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/graphql/package.json b/packages/graphql/package.json index 47a52b901a3..ee143a47ff1 100644 --- a/packages/graphql/package.json +++ b/packages/graphql/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/graphql", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "homepage": "https://payloadcms.com", "repository": { "type": "git", diff --git a/packages/live-preview-react/package.json b/packages/live-preview-react/package.json index 6c658afd4e9..63cc78b85bd 100644 --- a/packages/live-preview-react/package.json +++ b/packages/live-preview-react/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/live-preview-react", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "description": "The official React SDK for Payload Live Preview", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/live-preview-vue/package.json b/packages/live-preview-vue/package.json index a967aa09e58..130c16b469c 100644 --- a/packages/live-preview-vue/package.json +++ b/packages/live-preview-vue/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/live-preview-vue", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "description": "The official Vue SDK for Payload Live Preview", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/live-preview/package.json b/packages/live-preview/package.json index ef32e30cee3..71cc073029f 100644 --- a/packages/live-preview/package.json +++ b/packages/live-preview/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/live-preview", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "description": "The official live preview JavaScript SDK for Payload", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/next/package.json b/packages/next/package.json index 258e676cbc0..9fa20a953c2 100644 --- a/packages/next/package.json +++ b/packages/next/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/next", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "homepage": "https://payloadcms.com", "repository": { "type": "git", diff --git a/packages/payload-cloud/package.json b/packages/payload-cloud/package.json index 3a6003ab1ac..1d391a24172 100644 --- a/packages/payload-cloud/package.json +++ b/packages/payload-cloud/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/payload-cloud", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "description": "The official Payload Cloud plugin", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/payload/package.json b/packages/payload/package.json index 3731fa1bdf9..7d80e02836d 100644 --- a/packages/payload/package.json +++ b/packages/payload/package.json @@ -1,6 +1,6 @@ { "name": "payload", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "description": "Node, React, Headless CMS and Application Framework built on Next.js", "keywords": [ "admin panel", diff --git a/packages/plugin-cloud-storage/package.json b/packages/plugin-cloud-storage/package.json index b138e66c264..0cbbca0c1fc 100644 --- a/packages/plugin-cloud-storage/package.json +++ b/packages/plugin-cloud-storage/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-cloud-storage", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "description": "The official cloud storage plugin for Payload CMS", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/plugin-form-builder/package.json b/packages/plugin-form-builder/package.json index dc91c89a19c..ec920818f0d 100644 --- a/packages/plugin-form-builder/package.json +++ b/packages/plugin-form-builder/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-form-builder", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "description": "Form builder plugin for Payload CMS", "keywords": [ "payload", diff --git a/packages/plugin-nested-docs/package.json b/packages/plugin-nested-docs/package.json index c98f93b4394..a6cb33a7c13 100644 --- a/packages/plugin-nested-docs/package.json +++ b/packages/plugin-nested-docs/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-nested-docs", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "description": "The official Nested Docs plugin for Payload", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/plugin-redirects/package.json b/packages/plugin-redirects/package.json index f43c9e0bbd4..8e46143f47d 100644 --- a/packages/plugin-redirects/package.json +++ b/packages/plugin-redirects/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-redirects", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "description": "Redirects plugin for Payload", "keywords": [ "payload", diff --git a/packages/plugin-search/package.json b/packages/plugin-search/package.json index a2ea3bd906c..0df6cd168ff 100644 --- a/packages/plugin-search/package.json +++ b/packages/plugin-search/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-search", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "description": "Search plugin for Payload", "keywords": [ "payload", diff --git a/packages/plugin-sentry/package.json b/packages/plugin-sentry/package.json index 8ece6d87739..fe6d70d245c 100644 --- a/packages/plugin-sentry/package.json +++ b/packages/plugin-sentry/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-sentry", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "description": "Sentry plugin for Payload", "keywords": [ "payload", diff --git a/packages/plugin-seo/package.json b/packages/plugin-seo/package.json index 80233388990..13ca0bea842 100644 --- a/packages/plugin-seo/package.json +++ b/packages/plugin-seo/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-seo", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "description": "SEO plugin for Payload", "keywords": [ "payload", diff --git a/packages/plugin-stripe/package.json b/packages/plugin-stripe/package.json index 03271ae7288..9074e4c1dcd 100644 --- a/packages/plugin-stripe/package.json +++ b/packages/plugin-stripe/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/plugin-stripe", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "description": "Stripe plugin for Payload", "keywords": [ "payload", diff --git a/packages/richtext-lexical/package.json b/packages/richtext-lexical/package.json index 97f693b2ed1..09eda178855 100644 --- a/packages/richtext-lexical/package.json +++ b/packages/richtext-lexical/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/richtext-lexical", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "description": "The officially supported Lexical richtext adapter for Payload", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/richtext-slate/package.json b/packages/richtext-slate/package.json index 699e7304af6..61acd99f087 100644 --- a/packages/richtext-slate/package.json +++ b/packages/richtext-slate/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/richtext-slate", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "description": "The officially supported Slate richtext adapter for Payload", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/storage-azure/package.json b/packages/storage-azure/package.json index 1006083e4ec..70133e3c2d5 100644 --- a/packages/storage-azure/package.json +++ b/packages/storage-azure/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/storage-azure", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "description": "Payload storage adapter for Azure Blob Storage", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/storage-gcs/package.json b/packages/storage-gcs/package.json index 0649d6e1725..8ac5072a3e3 100644 --- a/packages/storage-gcs/package.json +++ b/packages/storage-gcs/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/storage-gcs", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "description": "Payload storage adapter for Google Cloud Storage", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/storage-s3/package.json b/packages/storage-s3/package.json index a5979a2d5d8..760ef5930d8 100644 --- a/packages/storage-s3/package.json +++ b/packages/storage-s3/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/storage-s3", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "description": "Payload storage adapter for Amazon S3", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/storage-uploadthing/package.json b/packages/storage-uploadthing/package.json index 3db4fa64d72..56d3db4c2aa 100644 --- a/packages/storage-uploadthing/package.json +++ b/packages/storage-uploadthing/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/storage-uploadthing", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "description": "Payload storage adapter for uploadthing", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/storage-vercel-blob/package.json b/packages/storage-vercel-blob/package.json index de39e85da4b..b31121d6c7d 100644 --- a/packages/storage-vercel-blob/package.json +++ b/packages/storage-vercel-blob/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/storage-vercel-blob", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "description": "Payload storage adapter for Vercel Blob Storage", "homepage": "https://payloadcms.com", "repository": { diff --git a/packages/translations/package.json b/packages/translations/package.json index 6e39cedeb7c..c15c9779f38 100644 --- a/packages/translations/package.json +++ b/packages/translations/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/translations", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "homepage": "https://payloadcms.com", "repository": { "type": "git", diff --git a/packages/ui/package.json b/packages/ui/package.json index a77270dd446..f00d86545ae 100644 --- a/packages/ui/package.json +++ b/packages/ui/package.json @@ -1,6 +1,6 @@ { "name": "@payloadcms/ui", - "version": "3.0.0-beta.120", + "version": "3.0.0-beta.121", "homepage": "https://payloadcms.com", "repository": { "type": "git",